gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.directory.server.core.factory;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.lang.reflect.Constructor;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.exception.LdapUnwillingToPerformException;
import org.apache.directory.api.ldap.model.ldif.LdifEntry;
import org.apache.directory.api.ldap.model.ldif.LdifReader;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.server.core.annotations.AnnotationUtils;
import org.apache.directory.server.core.annotations.ApplyLdifFiles;
import org.apache.directory.server.core.annotations.ApplyLdifs;
import org.apache.directory.server.core.annotations.ContextEntry;
import org.apache.directory.server.core.annotations.CreateAuthenticator;
import org.apache.directory.server.core.annotations.CreateDS;
import org.apache.directory.server.core.annotations.CreateIndex;
import org.apache.directory.server.core.annotations.CreatePartition;
import org.apache.directory.server.core.annotations.LoadSchema;
import org.apache.directory.server.core.api.DirectoryService;
import org.apache.directory.server.core.api.DnFactory;
import org.apache.directory.server.core.api.interceptor.Interceptor;
import org.apache.directory.server.core.api.partition.Partition;
import org.apache.directory.server.core.authn.AuthenticationInterceptor;
import org.apache.directory.server.core.authn.Authenticator;
import org.apache.directory.server.core.authn.DelegatingAuthenticator;
import org.apache.directory.server.core.partition.impl.btree.AbstractBTreePartition;
import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex;
import org.apache.directory.server.i18n.I18n;
import org.junit.runner.Description;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A Helper class used to create a DS from the annotations
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class DSAnnotationProcessor
{
/** A logger for this class */
private static final Logger LOG = LoggerFactory.getLogger( DSAnnotationProcessor.class );
/**
* Create the DirectoryService
*/
public static DirectoryService createDS( CreateDS dsBuilder )
throws Exception
{
LOG.debug( "Starting DS {}...", dsBuilder.name() );
Class<?> factory = dsBuilder.factory();
DirectoryServiceFactory dsf = ( DirectoryServiceFactory ) factory
.newInstance();
DirectoryService service = dsf.getDirectoryService();
service.setAccessControlEnabled( dsBuilder.enableAccessControl() );
service.setAllowAnonymousAccess( dsBuilder.allowAnonAccess() );
service.getChangeLog().setEnabled( dsBuilder.enableChangeLog() );
dsf.init( dsBuilder.name() );
for ( Class<?> interceptorClass : dsBuilder.additionalInterceptors() )
{
service.addLast( ( Interceptor ) interceptorClass.newInstance() );
}
List<Interceptor> interceptorList = service.getInterceptors();
if ( dsBuilder.authenticators().length != 0 )
{
AuthenticationInterceptor authenticationInterceptor = null;
for ( Interceptor interceptor : interceptorList )
{
if ( interceptor instanceof AuthenticationInterceptor )
{
authenticationInterceptor = ( AuthenticationInterceptor ) interceptor;
break;
}
}
if ( authenticationInterceptor == null )
{
throw new IllegalStateException(
"authentication interceptor not found" );
}
Set<Authenticator> authenticators = new HashSet<Authenticator>();
for ( CreateAuthenticator createAuthenticator : dsBuilder
.authenticators() )
{
Authenticator auth = createAuthenticator.type().newInstance();
if ( auth instanceof DelegatingAuthenticator )
{
DelegatingAuthenticator dauth = ( DelegatingAuthenticator ) auth;
dauth.setDelegateHost( createAuthenticator.delegateHost() );
dauth.setDelegatePort( createAuthenticator.delegatePort() );
dauth.setDelegateSsl( createAuthenticator.delegateSsl() );
dauth.setDelegateTls( createAuthenticator.delegateTls() );
dauth.setDelegateBaseDn( createAuthenticator.delegateBaseDn() );
dauth.setDelegateSslTrustManagerFQCN( createAuthenticator.delegateSslTrustManagerFQCN() );
dauth.setDelegateTlsTrustManagerFQCN( createAuthenticator.delegateTlsTrustManagerFQCN() );
}
authenticators.add( auth );
}
authenticationInterceptor.setAuthenticators( authenticators );
authenticationInterceptor.init( service );
}
service.setInterceptors( interceptorList );
SchemaManager schemaManager = service.getSchemaManager();
// process the schemas
for ( LoadSchema loadedSchema : dsBuilder.loadedSchemas() )
{
String schemaName = loadedSchema.name();
Boolean enabled = loadedSchema.enabled();
// Check if the schema is loaded or not
boolean isLoaded = schemaManager.isSchemaLoaded( schemaName );
if ( !isLoaded )
{
// We have to load the schema, if it exists
try
{
isLoaded = schemaManager.load( schemaName );
}
catch ( LdapUnwillingToPerformException lutpe )
{
// Cannot load the schema, it does not exists
LOG.error( lutpe.getMessage() );
continue;
}
}
if ( isLoaded )
{
if ( enabled )
{
schemaManager.enable( schemaName );
if ( schemaManager.isDisabled( schemaName ) )
{
LOG.error( "Cannot enable " + schemaName );
}
}
else
{
schemaManager.disable( schemaName );
if ( schemaManager.isEnabled( schemaName ) )
{
LOG.error( "Cannot disable " + schemaName );
}
}
}
LOG.debug( "Loading schema {}, enabled= {}", schemaName, enabled );
}
// Process the Partition, if any.
for ( CreatePartition createPartition : dsBuilder.partitions() )
{
Partition partition;
// Determine the partition type
if ( createPartition.type() == Partition.class )
{
// The annotation does not specify a specific partition type.
// We use the partition factory to create partition and index
// instances.
PartitionFactory partitionFactory = dsf.getPartitionFactory();
partition = partitionFactory.createPartition(
schemaManager,
service.getDnFactory(),
createPartition.name(),
createPartition.suffix(),
createPartition.cacheSize(),
new File( service.getInstanceLayout().getPartitionsDirectory(), createPartition.name() ) );
partition.setCacheService( service.getCacheService() );
CreateIndex[] indexes = createPartition.indexes();
for ( CreateIndex createIndex : indexes )
{
partitionFactory.addIndex( partition,
createIndex.attribute(), createIndex.cacheSize() );
}
partition.initialize();
}
else
{
// The annotation contains a specific partition type, we use
// that type.
Class<?> partypes[] = new Class[]
{ SchemaManager.class, DnFactory.class };
Constructor<?> constructor = createPartition.type().getConstructor( partypes );
partition = ( Partition ) constructor.newInstance( new Object[]
{ schemaManager, service.getDnFactory() } );
partition.setId( createPartition.name() );
partition.setSuffixDn( new Dn( schemaManager, createPartition.suffix() ) );
if ( partition instanceof AbstractBTreePartition )
{
AbstractBTreePartition btreePartition = ( AbstractBTreePartition ) partition;
btreePartition.setCacheService( service.getCacheService() );
btreePartition.setCacheSize( createPartition.cacheSize() );
btreePartition.setPartitionPath( new File( service
.getInstanceLayout().getPartitionsDirectory(),
createPartition.name() ).toURI() );
// Process the indexes if any
CreateIndex[] indexes = createPartition.indexes();
for ( CreateIndex createIndex : indexes )
{
// The annotation does not specify a specific index
// type.
// We use the generic index implementation.
JdbmIndex index = new JdbmIndex( createIndex.attribute(), false );
btreePartition.addIndexedAttributes( index );
}
}
}
partition.setSchemaManager( schemaManager );
// Inject the partition into the DirectoryService
service.addPartition( partition );
// Last, process the context entry
ContextEntry contextEntry = createPartition.contextEntry();
if ( contextEntry != null )
{
injectEntries( service, contextEntry.entryLdif() );
}
}
return service;
}
/**
* Create a DirectoryService from a Unit test annotation
*
* @param description The annotations containing the info from which we will create
* the DS
* @return A valid DS
*/
public static DirectoryService getDirectoryService( Description description )
throws Exception
{
CreateDS dsBuilder = description.getAnnotation( CreateDS.class );
if ( dsBuilder != null )
{
return createDS( dsBuilder );
}
else
{
LOG.debug( "No {} DS.", description.getDisplayName() );
return null;
}
}
/**
* Create a DirectoryService from an annotation. The @CreateDS annotation
* must be associated with either the method or the encapsulating class. We
* will first try to get the annotation from the method, and if there is
* none, then we try at the class level.
*
* @return A valid DS
*/
public static DirectoryService getDirectoryService() throws Exception
{
Object instance = AnnotationUtils.getInstance( CreateDS.class );
CreateDS dsBuilder = null;
if ( instance != null )
{
dsBuilder = ( CreateDS ) instance;
// Ok, we have found a CreateDS annotation. Process it now.
return createDS( dsBuilder );
}
throw new LdapException( I18n.err( I18n.ERR_114 ) );
}
/**
* injects an LDIF entry in the given DirectoryService
*
* @param entry
* the LdifEntry to be injected
* @param service
* the DirectoryService
* @throws Exception
*/
private static void injectEntry( LdifEntry entry, DirectoryService service )
throws LdapException
{
if ( entry.isChangeAdd() || entry.isLdifContent() )
{
service.getAdminSession().add(
new DefaultEntry( service.getSchemaManager(), entry
.getEntry() ) );
}
else if ( entry.isChangeModify() )
{
service.getAdminSession().modify( entry.getDn(),
entry.getModifications() );
}
else
{
String message = I18n.err( I18n.ERR_117, entry.getChangeType() );
throw new LdapException( message );
}
}
/**
* injects the LDIF entries present in a LDIF file
*
* @param service
* the DirectoryService
* @param ldifFiles
* the array of LDIF file names (only )
* @throws Exception
*/
public static void injectLdifFiles( Class<?> clazz,
DirectoryService service, String[] ldifFiles ) throws Exception
{
if ( ( ldifFiles != null ) && ( ldifFiles.length > 0 ) )
{
for ( String ldifFile : ldifFiles )
{
InputStream is = clazz.getClassLoader().getResourceAsStream(
ldifFile );
if ( is == null )
{
throw new FileNotFoundException( "LDIF file '" + ldifFile
+ "' not found." );
}
else
{
LdifReader ldifReader = new LdifReader( is );
for ( LdifEntry entry : ldifReader )
{
injectEntry( entry, service );
}
ldifReader.close();
}
}
}
}
/**
* Inject an ldif String into the server. Dn must be relative to the root.
*
* @param service
* the directory service to use
* @param ldif
* the ldif containing entries to add to the server.
* @throws Exception
* if there is a problem adding the entries from the LDIF
*/
public static void injectEntries( DirectoryService service, String ldif )
throws Exception
{
LdifReader reader = new LdifReader();
List<LdifEntry> entries = reader.parseLdif( ldif );
for ( LdifEntry entry : entries )
{
injectEntry( entry, service );
}
// And close the reader
reader.close();
}
/**
* Load the schemas, and enable/disable them.
*/
public static void loadSchemas( Description desc, DirectoryService service )
{
if ( desc == null )
{
return;
}
/*for ( Class<?> loadSchema : dsBuilder.additionalInterceptors() )
{
service.addLast( ( Interceptor ) interceptorClass.newInstance() );
}*/
LoadSchema loadSchema = desc
.getAnnotation( LoadSchema.class );
if ( loadSchema != null )
{
System.out.println( loadSchema );
}
}
/**
* Apply the LDIF entries to the given service
*/
public static void applyLdifs( Description desc, DirectoryService service )
throws Exception
{
if ( desc == null )
{
return;
}
ApplyLdifFiles applyLdifFiles = desc
.getAnnotation( ApplyLdifFiles.class );
if ( applyLdifFiles != null )
{
LOG.debug( "Applying {} to {}", applyLdifFiles.value(),
desc.getDisplayName() );
injectLdifFiles( desc.getClass(), service, applyLdifFiles.value() );
}
ApplyLdifs applyLdifs = desc.getAnnotation( ApplyLdifs.class );
if ( ( applyLdifs != null ) && ( applyLdifs.value() != null ) )
{
String[] ldifs = applyLdifs.value();
String DN_START = "dn:";
StringBuilder sb = new StringBuilder();
for ( int i = 0; i < ldifs.length; )
{
String s = ldifs[i++].trim();
if ( s.startsWith( DN_START ) )
{
sb.append( s ).append( '\n' );
// read the rest of lines till we encounter Dn again
while ( i < ldifs.length )
{
s = ldifs[i++];
if ( !s.startsWith( DN_START ) )
{
sb.append( s ).append( '\n' );
}
else
{
break;
}
}
LOG.debug( "Applying {} to {}", sb, desc.getDisplayName() );
injectEntries( service, sb.toString() );
sb.setLength( 0 );
i--; // step up a line
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.config;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.URL;
import java.net.UnknownHostException;
import java.util.*;
import com.google.common.primitives.Longs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.auth.*;
import org.apache.cassandra.cache.IRowCacheProvider;
import org.apache.cassandra.config.Config.RequestSchedulerId;
import org.apache.cassandra.config.EncryptionOptions.ClientEncryptionOptions;
import org.apache.cassandra.config.EncryptionOptions.ServerEncryptionOptions;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DefsTable;
import org.apache.cassandra.db.SystemTable;
import org.apache.cassandra.dht.IPartitioner;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.FSWriteError;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.locator.DynamicEndpointSnitch;
import org.apache.cassandra.locator.EndpointSnitchInfo;
import org.apache.cassandra.locator.IEndpointSnitch;
import org.apache.cassandra.locator.SeedProvider;
import org.apache.cassandra.net.MessagingService;
import org.apache.cassandra.scheduler.IRequestScheduler;
import org.apache.cassandra.scheduler.NoScheduler;
import org.apache.cassandra.service.CacheService;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.utils.FBUtilities;
import org.yaml.snakeyaml.Loader;
import org.yaml.snakeyaml.TypeDescription;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.error.YAMLException;
public class DatabaseDescriptor
{
private static final Logger logger = LoggerFactory.getLogger(DatabaseDescriptor.class);
private static IEndpointSnitch snitch;
private static InetAddress listenAddress; // leave null so we can fall through to getLocalHost
private static InetAddress broadcastAddress;
private static InetAddress rpcAddress;
private static SeedProvider seedProvider;
private static IInternodeAuthenticator internodeAuthenticator;
/* Hashing strategy Random or OPHF */
private static IPartitioner<?> partitioner;
private static String paritionerName;
private static Config.DiskAccessMode indexAccessMode;
private static Config conf;
private static IAuthenticator authenticator = new AllowAllAuthenticator();
private static IAuthorizer authorizer = new AllowAllAuthorizer();
private final static String DEFAULT_CONFIGURATION = "cassandra.yaml";
private static IRequestScheduler requestScheduler;
private static RequestSchedulerId requestSchedulerId;
private static RequestSchedulerOptions requestSchedulerOptions;
private static long keyCacheSizeInMB;
private static IRowCacheProvider rowCacheProvider;
private static String localDC;
private static Comparator<InetAddress> localComparator;
/**
* Inspect the classpath to find storage configuration file
*/
static URL getStorageConfigURL() throws ConfigurationException
{
String configUrl = System.getProperty("cassandra.config");
if (configUrl == null)
configUrl = DEFAULT_CONFIGURATION;
URL url;
try
{
url = new URL(configUrl);
url.openStream().close(); // catches well-formed but bogus URLs
}
catch (Exception e)
{
ClassLoader loader = DatabaseDescriptor.class.getClassLoader();
url = loader.getResource(configUrl);
if (url == null)
throw new ConfigurationException("Cannot locate " + configUrl);
}
return url;
}
static
{
if (Config.getLoadYaml())
loadYaml();
else
conf = new Config();
}
static void loadYaml()
{
try
{
URL url = getStorageConfigURL();
logger.info("Loading settings from " + url);
InputStream input;
try
{
input = url.openStream();
}
catch (IOException e)
{
// getStorageConfigURL should have ruled this out
throw new AssertionError(e);
}
org.yaml.snakeyaml.constructor.Constructor constructor = new org.yaml.snakeyaml.constructor.Constructor(Config.class);
TypeDescription seedDesc = new TypeDescription(SeedProviderDef.class);
seedDesc.putMapPropertyType("parameters", String.class, String.class);
constructor.addTypeDescription(seedDesc);
Yaml yaml = new Yaml(new Loader(constructor));
conf = (Config)yaml.load(input);
logger.info("Data files directories: " + Arrays.toString(conf.data_file_directories));
logger.info("Commit log directory: " + conf.commitlog_directory);
if (conf.commitlog_sync == null)
{
throw new ConfigurationException("Missing required directive CommitLogSync");
}
if (conf.commitlog_sync == Config.CommitLogSync.batch)
{
if (conf.commitlog_sync_batch_window_in_ms == null)
{
throw new ConfigurationException("Missing value for commitlog_sync_batch_window_in_ms: Double expected.");
}
else if (conf.commitlog_sync_period_in_ms != null)
{
throw new ConfigurationException("Batch sync specified, but commitlog_sync_period_in_ms found. Only specify commitlog_sync_batch_window_in_ms when using batch sync");
}
logger.debug("Syncing log with a batch window of " + conf.commitlog_sync_batch_window_in_ms);
}
else
{
if (conf.commitlog_sync_period_in_ms == null)
{
throw new ConfigurationException("Missing value for commitlog_sync_period_in_ms: Integer expected");
}
else if (conf.commitlog_sync_batch_window_in_ms != null)
{
throw new ConfigurationException("commitlog_sync_period_in_ms specified, but commitlog_sync_batch_window_in_ms found. Only specify commitlog_sync_period_in_ms when using periodic sync.");
}
logger.debug("Syncing log with a period of " + conf.commitlog_sync_period_in_ms);
}
if (conf.commitlog_total_space_in_mb == null)
conf.commitlog_total_space_in_mb = System.getProperty("os.arch").contains("64") ? 1024 : 32;
/* evaluate the DiskAccessMode Config directive, which also affects indexAccessMode selection */
if (conf.disk_access_mode == Config.DiskAccessMode.auto)
{
conf.disk_access_mode = System.getProperty("os.arch").contains("64") ? Config.DiskAccessMode.mmap : Config.DiskAccessMode.standard;
indexAccessMode = conf.disk_access_mode;
logger.info("DiskAccessMode 'auto' determined to be " + conf.disk_access_mode + ", indexAccessMode is " + indexAccessMode );
}
else if (conf.disk_access_mode == Config.DiskAccessMode.mmap_index_only)
{
conf.disk_access_mode = Config.DiskAccessMode.standard;
indexAccessMode = Config.DiskAccessMode.mmap;
logger.info("DiskAccessMode is " + conf.disk_access_mode + ", indexAccessMode is " + indexAccessMode );
}
else
{
indexAccessMode = conf.disk_access_mode;
logger.info("DiskAccessMode is " + conf.disk_access_mode + ", indexAccessMode is " + indexAccessMode );
}
logger.info("disk_failure_policy is " + conf.disk_failure_policy);
/* Authentication and authorization backend, implementing IAuthenticator and IAuthorizer */
if (conf.authenticator != null)
authenticator = FBUtilities.newAuthenticator(conf.authenticator);
if (conf.authority != null)
{
logger.warn("Please rename 'authority' to 'authorizer' in cassandra.yaml");
if (!conf.authority.equals("org.apache.cassandra.auth.AllowAllAuthority"))
throw new ConfigurationException("IAuthority interface has been deprecated,"
+ " please implement IAuthorizer instead.");
}
if (conf.authorizer != null)
authorizer = FBUtilities.newAuthorizer(conf.authorizer);
if (authenticator instanceof AllowAllAuthenticator && !(authorizer instanceof AllowAllAuthorizer))
throw new ConfigurationException("AllowAllAuthenticator can't be used with " + conf.authorizer);
if (conf.internode_authenticator != null)
internodeAuthenticator = FBUtilities.construct(conf.internode_authenticator, "internode_authenticator");
else
internodeAuthenticator = new AllowAllInternodeAuthenticator();
authenticator.validateConfiguration();
authorizer.validateConfiguration();
internodeAuthenticator.validateConfiguration();
/* Hashing strategy */
if (conf.partitioner == null)
{
throw new ConfigurationException("Missing directive: partitioner");
}
try
{
partitioner = FBUtilities.newPartitioner(System.getProperty("cassandra.partitioner", conf.partitioner));
}
catch (Exception e)
{
throw new ConfigurationException("Invalid partitioner class " + conf.partitioner);
}
paritionerName = partitioner.getClass().getCanonicalName();
/* phi convict threshold for FailureDetector */
if (conf.phi_convict_threshold < 5 || conf.phi_convict_threshold > 16)
{
throw new ConfigurationException("phi_convict_threshold must be between 5 and 16");
}
/* Thread per pool */
if (conf.concurrent_reads != null && conf.concurrent_reads < 2)
{
throw new ConfigurationException("concurrent_reads must be at least 2");
}
if (conf.concurrent_writes != null && conf.concurrent_writes < 2)
{
throw new ConfigurationException("concurrent_writes must be at least 2");
}
if (conf.concurrent_replicates != null && conf.concurrent_replicates < 2)
{
throw new ConfigurationException("concurrent_replicates must be at least 2");
}
if (conf.memtable_total_space_in_mb == null)
conf.memtable_total_space_in_mb = (int) (Runtime.getRuntime().maxMemory() / (3 * 1048576));
if (conf.memtable_total_space_in_mb <= 0)
throw new ConfigurationException("memtable_total_space_in_mb must be positive");
logger.info("Global memtable threshold is enabled at {}MB", conf.memtable_total_space_in_mb);
/* Memtable flush writer threads */
if (conf.memtable_flush_writers != null && conf.memtable_flush_writers < 1)
{
throw new ConfigurationException("memtable_flush_writers must be at least 1");
}
else if (conf.memtable_flush_writers == null)
{
conf.memtable_flush_writers = conf.data_file_directories.length;
}
/* Local IP or hostname to bind services to */
if (conf.listen_address != null)
{
try
{
listenAddress = InetAddress.getByName(conf.listen_address);
}
catch (UnknownHostException e)
{
throw new ConfigurationException("Unknown listen_address '" + conf.listen_address + "'");
}
}
/* Gossip Address to broadcast */
if (conf.broadcast_address != null)
{
if (conf.broadcast_address.equals("0.0.0.0"))
{
throw new ConfigurationException("broadcast_address cannot be 0.0.0.0!");
}
try
{
broadcastAddress = InetAddress.getByName(conf.broadcast_address);
}
catch (UnknownHostException e)
{
throw new ConfigurationException("Unknown broadcast_address '" + conf.broadcast_address + "'");
}
}
/* Local IP or hostname to bind RPC server to */
if (conf.rpc_address != null)
{
try
{
rpcAddress = InetAddress.getByName(conf.rpc_address);
}
catch (UnknownHostException e)
{
throw new ConfigurationException("Unknown host in rpc_address " + conf.rpc_address);
}
}
else
{
rpcAddress = FBUtilities.getLocalAddress();
}
if (conf.thrift_framed_transport_size_in_mb <= 0)
throw new ConfigurationException("thrift_framed_transport_size_in_mb must be positive");
/* end point snitch */
if (conf.endpoint_snitch == null)
{
throw new ConfigurationException("Missing endpoint_snitch directive");
}
snitch = createEndpointSnitch(conf.endpoint_snitch);
EndpointSnitchInfo.create();
localDC = snitch.getDatacenter(FBUtilities.getBroadcastAddress());
localComparator = new Comparator<InetAddress>()
{
public int compare(InetAddress endpoint1, InetAddress endpoint2)
{
boolean local1 = localDC.equals(snitch.getDatacenter(endpoint1));
boolean local2 = localDC.equals(snitch.getDatacenter(endpoint2));
if (local1 && !local2)
return -1;
if (local2 && !local1)
return 1;
return 0;
}
};
/* Request Scheduler setup */
requestSchedulerOptions = conf.request_scheduler_options;
if (conf.request_scheduler != null)
{
try
{
if (requestSchedulerOptions == null)
{
requestSchedulerOptions = new RequestSchedulerOptions();
}
Class<?> cls = Class.forName(conf.request_scheduler);
requestScheduler = (IRequestScheduler) cls.getConstructor(RequestSchedulerOptions.class).newInstance(requestSchedulerOptions);
}
catch (ClassNotFoundException e)
{
throw new ConfigurationException("Invalid Request Scheduler class " + conf.request_scheduler);
}
catch (Exception e)
{
throw new ConfigurationException("Unable to instantiate request scheduler", e);
}
}
else
{
requestScheduler = new NoScheduler();
}
if (conf.request_scheduler_id == RequestSchedulerId.keyspace)
{
requestSchedulerId = conf.request_scheduler_id;
}
else
{
// Default to Keyspace
requestSchedulerId = RequestSchedulerId.keyspace;
}
if (logger.isDebugEnabled() && conf.auto_bootstrap != null)
{
logger.debug("setting auto_bootstrap to " + conf.auto_bootstrap);
}
logger.info((conf.multithreaded_compaction ? "" : "Not ") + "using multi-threaded compaction");
if (conf.in_memory_compaction_limit_in_mb != null && conf.in_memory_compaction_limit_in_mb <= 0)
{
throw new ConfigurationException("in_memory_compaction_limit_in_mb must be a positive integer");
}
if (conf.concurrent_compactors == null)
conf.concurrent_compactors = FBUtilities.getAvailableProcessors();
if (conf.concurrent_compactors <= 0)
throw new ConfigurationException("concurrent_compactors should be strictly greater than 0");
/* data file and commit log directories. they get created later, when they're needed. */
if (conf.commitlog_directory != null && conf.data_file_directories != null && conf.saved_caches_directory != null)
{
for (String datadir : conf.data_file_directories)
{
if (datadir.equals(conf.commitlog_directory))
throw new ConfigurationException("commitlog_directory must not be the same as any data_file_directories");
if (datadir.equals(conf.saved_caches_directory))
throw new ConfigurationException("saved_caches_directory must not be the same as any data_file_directories");
}
if (conf.commitlog_directory.equals(conf.saved_caches_directory))
throw new ConfigurationException("saved_caches_directory must not be the same as the commitlog_directory");
}
else
{
if (conf.commitlog_directory == null)
throw new ConfigurationException("commitlog_directory missing");
if (conf.data_file_directories == null)
throw new ConfigurationException("data_file_directories missing; at least one data directory must be specified");
if (conf.saved_caches_directory == null)
throw new ConfigurationException("saved_caches_directory missing");
}
if (conf.initial_token != null)
for (String token : tokensFromString(conf.initial_token))
partitioner.getTokenFactory().validate(token);
try
{
// if key_cache_size_in_mb option was set to "auto" then size of the cache should be "min(5% of Heap (in MB), 100MB)
keyCacheSizeInMB = (conf.key_cache_size_in_mb == null)
? Math.min(Math.max(1, (int) (Runtime.getRuntime().totalMemory() * 0.05 / 1024 / 1024)), 100)
: conf.key_cache_size_in_mb;
if (keyCacheSizeInMB < 0)
throw new NumberFormatException(); // to escape duplicating error message
}
catch (NumberFormatException e)
{
throw new ConfigurationException("key_cache_size_in_mb option was set incorrectly to '"
+ conf.key_cache_size_in_mb + "', supported values are <integer> >= 0.");
}
rowCacheProvider = FBUtilities.newCacheProvider(conf.row_cache_provider);
if(conf.encryption_options != null)
{
logger.warn("Please rename encryption_options as server_encryption_options in the yaml");
//operate under the assumption that server_encryption_options is not set in yaml rather than both
conf.server_encryption_options = conf.encryption_options;
}
// Hardcoded system tables
List<KSMetaData> systemKeyspaces = Arrays.asList(KSMetaData.systemKeyspace(), KSMetaData.traceKeyspace(), KSMetaData.metadataKeyspace());
assert systemKeyspaces.size() == Schema.systemKeyspaceNames.size();
for (KSMetaData ksmd : systemKeyspaces)
{
// install the definition
for (CFMetaData cfm : ksmd.cfMetaData().values())
Schema.instance.load(cfm);
Schema.instance.setTableDefinition(ksmd);
}
/* Load the seeds for node contact points */
if (conf.seed_provider == null)
{
throw new ConfigurationException("seeds configuration is missing; a minimum of one seed is required.");
}
try
{
Class<?> seedProviderClass = Class.forName(conf.seed_provider.class_name);
seedProvider = (SeedProvider)seedProviderClass.getConstructor(Map.class).newInstance(conf.seed_provider.parameters);
}
// there are about 5 checked exceptions that could be thrown here.
catch (Exception e)
{
logger.error("Fatal configuration error", e);
System.err.println(e.getMessage() + "\nFatal configuration error; unable to start server. See log for stacktrace.");
System.exit(1);
}
if (seedProvider.getSeeds().size() == 0)
throw new ConfigurationException("The seed provider lists no seeds.");
}
catch (ConfigurationException e)
{
logger.error("Fatal configuration error", e);
System.err.println(e.getMessage() + "\nFatal configuration error; unable to start server. See log for stacktrace.");
System.exit(1);
}
catch (YAMLException e)
{
logger.error("Fatal configuration error error", e);
System.err.println(e.getMessage() + "\nInvalid yaml; unable to start server. See log for stacktrace.");
System.exit(1);
}
}
private static IEndpointSnitch createEndpointSnitch(String snitchClassName) throws ConfigurationException
{
if (!snitchClassName.contains("."))
snitchClassName = "org.apache.cassandra.locator." + snitchClassName;
IEndpointSnitch snitch = FBUtilities.construct(snitchClassName, "snitch");
return conf.dynamic_snitch ? new DynamicEndpointSnitch(snitch) : snitch;
}
/** load keyspace (table) definitions, but do not initialize the table instances. */
public static void loadSchemas() throws IOException
{
ColumnFamilyStore schemaCFS = SystemTable.schemaCFS(SystemTable.SCHEMA_KEYSPACES_CF);
// if table with definitions is empty try loading the old way
if (schemaCFS.estimateKeys() == 0)
{
// we can load tables from local storage if a version is set in the system table and that actually maps to
// real data in the definitions table. If we do end up loading from xml, store the definitions so that we
// don't load from xml anymore.
UUID uuid = MigrationManager.getLastMigrationId();
if (uuid == null)
{
logger.info("Couldn't detect any schema definitions in local storage.");
// peek around the data directories to see if anything is there.
if (hasExistingNoSystemTables())
logger.info("Found table data in data directories. Consider using cqlsh to define your schema.");
else
logger.info("To create keyspaces and column families, see 'help create' in cqlsh.");
}
else
{
logger.info("Loading schema version " + uuid.toString());
Collection<KSMetaData> tableDefs = DefsTable.loadFromStorage(uuid);
// happens when someone manually deletes all tables and restarts.
if (tableDefs.size() == 0)
{
logger.warn("No schema definitions were found in local storage.");
}
else // if non-system tables where found, trying to load them
{
Schema.instance.load(tableDefs);
}
}
}
else
{
Schema.instance.load(DefsTable.loadFromTable());
}
Schema.instance.updateVersion();
}
private static boolean hasExistingNoSystemTables()
{
for (String dataDir : getAllDataFileLocations())
{
File dataPath = new File(dataDir);
if (dataPath.exists() && dataPath.isDirectory())
{
// see if there are other directories present.
int dirCount = dataPath.listFiles(new FileFilter()
{
public boolean accept(File pathname)
{
return (pathname.isDirectory() && !Schema.systemKeyspaceNames.contains(pathname.getName()));
}
}).length;
if (dirCount > 0)
return true;
}
}
return false;
}
public static IAuthenticator getAuthenticator()
{
return authenticator;
}
public static IAuthorizer getAuthorizer()
{
return authorizer;
}
public static int getPermissionsValidity()
{
return conf.permissions_validity_in_ms;
}
public static int getThriftFramedTransportSize()
{
return conf.thrift_framed_transport_size_in_mb * 1024 * 1024;
}
/**
* Creates all storage-related directories.
*/
public static void createAllDirectories()
{
try
{
if (conf.data_file_directories.length == 0)
throw new ConfigurationException("At least one DataFileDirectory must be specified");
for (String dataFileDirectory : conf.data_file_directories)
{
FileUtils.createDirectory(dataFileDirectory);
}
if (conf.commitlog_directory == null)
throw new ConfigurationException("commitlog_directory must be specified");
FileUtils.createDirectory(conf.commitlog_directory);
if (conf.saved_caches_directory == null)
throw new ConfigurationException("saved_caches_directory must be specified");
FileUtils.createDirectory(conf.saved_caches_directory);
}
catch (ConfigurationException e)
{
logger.error("Fatal error: " + e.getMessage());
System.err.println("Bad configuration; unable to start server");
System.exit(1);
}
catch (FSWriteError e)
{
logger.error("Fatal error: " + e.getMessage());
System.err.println(e.getCause().getMessage() + "; unable to start server");
System.exit(1);
}
}
public static IPartitioner<?> getPartitioner()
{
return partitioner;
}
public static String getPartitionerName()
{
return paritionerName;
}
/* For tests ONLY, don't use otherwise or all hell will break loose */
public static void setPartitioner(IPartitioner<?> newPartitioner)
{
partitioner = newPartitioner;
}
public static IEndpointSnitch getEndpointSnitch()
{
return snitch;
}
public static void setEndpointSnitch(IEndpointSnitch eps)
{
snitch = eps;
}
public static IRequestScheduler getRequestScheduler()
{
return requestScheduler;
}
public static RequestSchedulerOptions getRequestSchedulerOptions()
{
return requestSchedulerOptions;
}
public static RequestSchedulerId getRequestSchedulerId()
{
return requestSchedulerId;
}
public static int getColumnIndexSize()
{
return conf.column_index_size_in_kb * 1024;
}
public static Collection<String> getInitialTokens()
{
return tokensFromString(System.getProperty("cassandra.initial_token", conf.initial_token));
}
public static Collection<String> tokensFromString(String tokenString)
{
List<String> tokens = new ArrayList<String>();
if (tokenString != null)
for (String token : tokenString.split(","))
tokens.add(token.replaceAll("^\\s+", "").replaceAll("\\s+$", ""));
return tokens;
}
public static Integer getNumTokens()
{
return conf.num_tokens;
}
public static Collection<String> getReplaceTokens()
{
return tokensFromString(System.getProperty("cassandra.replace_token", null));
}
public static UUID getReplaceNode()
{
try
{
return UUID.fromString(System.getProperty("cassandra.replace_node", null));
} catch (NullPointerException e)
{
return null;
}
}
public static boolean isReplacing()
{
return 0 != getReplaceTokens().size() || getReplaceNode() != null;
}
public static String getClusterName()
{
return conf.cluster_name;
}
public static int getMaxStreamingRetries()
{
return conf.max_streaming_retries;
}
public static int getStoragePort()
{
return Integer.parseInt(System.getProperty("cassandra.storage_port", conf.storage_port.toString()));
}
public static int getSSLStoragePort()
{
return Integer.parseInt(System.getProperty("cassandra.ssl_storage_port", conf.ssl_storage_port.toString()));
}
public static int getRpcPort()
{
return Integer.parseInt(System.getProperty("cassandra.rpc_port", conf.rpc_port.toString()));
}
public static long getRpcTimeout()
{
return conf.request_timeout_in_ms;
}
public static void setRpcTimeout(Long timeOutInMillis)
{
conf.request_timeout_in_ms = timeOutInMillis;
}
public static long getReadRpcTimeout()
{
return conf.read_request_timeout_in_ms;
}
public static void setReadRpcTimeout(Long timeOutInMillis)
{
conf.read_request_timeout_in_ms = timeOutInMillis;
}
public static long getRangeRpcTimeout()
{
return conf.range_request_timeout_in_ms;
}
public static void setRangeRpcTimeout(Long timeOutInMillis)
{
conf.range_request_timeout_in_ms = timeOutInMillis;
}
public static long getWriteRpcTimeout()
{
return conf.write_request_timeout_in_ms;
}
public static void setWriteRpcTimeout(Long timeOutInMillis)
{
conf.write_request_timeout_in_ms = timeOutInMillis;
}
public static long getTruncateRpcTimeout()
{
return conf.truncate_request_timeout_in_ms;
}
public static void setTruncateRpcTimeout(Long timeOutInMillis)
{
conf.truncate_request_timeout_in_ms = timeOutInMillis;
}
public static boolean hasCrossNodeTimeout()
{
return conf.cross_node_timeout;
}
// not part of the Verb enum so we can change timeouts easily via JMX
public static long getTimeout(MessagingService.Verb verb)
{
switch (verb)
{
case READ:
return getReadRpcTimeout();
case RANGE_SLICE:
return getRangeRpcTimeout();
case TRUNCATE:
return getTruncateRpcTimeout();
case READ_REPAIR:
case MUTATION:
return getWriteRpcTimeout();
default:
return getRpcTimeout();
}
}
/**
* @return the minimum configured {read, write, range, truncate, misc} timeout
*/
public static long getMinRpcTimeout()
{
return Longs.min(getRpcTimeout(), getReadRpcTimeout(), getRangeRpcTimeout(), getWriteRpcTimeout(), getTruncateRpcTimeout());
}
public static double getPhiConvictThreshold()
{
return conf.phi_convict_threshold;
}
public static void setPhiConvictThreshold(double phiConvictThreshold)
{
conf.phi_convict_threshold = phiConvictThreshold;
}
public static int getConcurrentReaders()
{
return conf.concurrent_reads;
}
public static int getConcurrentWriters()
{
return conf.concurrent_writes;
}
public static int getConcurrentReplicators()
{
return conf.concurrent_replicates;
}
public static int getFlushWriters()
{
return conf.memtable_flush_writers;
}
public static int getInMemoryCompactionLimit()
{
return conf.in_memory_compaction_limit_in_mb * 1024 * 1024;
}
public static void setInMemoryCompactionLimit(int sizeInMB)
{
conf.in_memory_compaction_limit_in_mb = sizeInMB;
}
public static int getConcurrentCompactors()
{
return conf.concurrent_compactors;
}
public static boolean isMultithreadedCompaction()
{
return conf.multithreaded_compaction;
}
public static int getCompactionThroughputMbPerSec()
{
return conf.compaction_throughput_mb_per_sec;
}
public static void setCompactionThroughputMbPerSec(int value)
{
conf.compaction_throughput_mb_per_sec = value;
}
public static int getStreamThroughputOutboundMegabitsPerSec()
{
return conf.stream_throughput_outbound_megabits_per_sec;
}
public static void setStreamThroughputOutboundMegabitsPerSec(int value)
{
conf.stream_throughput_outbound_megabits_per_sec = value;
}
public static String[] getAllDataFileLocations()
{
return conf.data_file_directories;
}
public static String getCommitLogLocation()
{
return conf.commitlog_directory;
}
/**
* size of commitlog segments to allocate
*/
public static int getCommitLogSegmentSize()
{
return conf.commitlog_segment_size_in_mb * 1024 * 1024;
}
public static String getSavedCachesLocation()
{
return conf.saved_caches_directory;
}
public static Set<InetAddress> getSeeds()
{
return Collections.unmodifiableSet(new HashSet<InetAddress>(seedProvider.getSeeds()));
}
public static InetAddress getListenAddress()
{
return listenAddress;
}
public static InetAddress getBroadcastAddress()
{
return broadcastAddress;
}
public static IInternodeAuthenticator getInternodeAuthenticator()
{
return internodeAuthenticator;
}
public static void setBroadcastAddress(InetAddress broadcastAdd)
{
broadcastAddress = broadcastAdd;
}
public static boolean startRpc()
{
return conf.start_rpc;
}
public static InetAddress getRpcAddress()
{
return rpcAddress;
}
public static String getRpcServerType()
{
return conf.rpc_server_type;
}
public static boolean getRpcKeepAlive()
{
return conf.rpc_keepalive;
}
public static Integer getRpcMinThreads()
{
return conf.rpc_min_threads;
}
public static Integer getRpcMaxThreads()
{
return conf.rpc_max_threads;
}
public static Integer getRpcSendBufferSize()
{
return conf.rpc_send_buff_size_in_bytes;
}
public static Integer getRpcRecvBufferSize()
{
return conf.rpc_recv_buff_size_in_bytes;
}
public static Integer getInternodeSendBufferSize()
{
return conf.internode_send_buff_size_in_bytes;
}
public static Integer getInternodeRecvBufferSize()
{
return conf.internode_recv_buff_size_in_bytes;
}
public static boolean startNativeTransport()
{
return conf.start_native_transport;
}
public static InetAddress getNativeTransportAddress()
{
return getRpcAddress();
}
public static int getNativeTransportPort()
{
return Integer.parseInt(System.getProperty("cassandra.native_transport_port", conf.native_transport_port.toString()));
}
public static Integer getNativeTransportMinThreads()
{
return conf.native_transport_min_threads;
}
public static Integer getNativeTransportMaxThreads()
{
return conf.native_transport_max_threads;
}
public static double getCommitLogSyncBatchWindow()
{
return conf.commitlog_sync_batch_window_in_ms;
}
public static int getCommitLogSyncPeriod() {
return conf.commitlog_sync_period_in_ms;
}
public static Config.CommitLogSync getCommitLogSync()
{
return conf.commitlog_sync;
}
public static Config.DiskAccessMode getDiskAccessMode()
{
return conf.disk_access_mode;
}
public static Config.DiskAccessMode getIndexAccessMode()
{
return indexAccessMode;
}
public static void setDiskFailurePolicy(Config.DiskFailurePolicy policy)
{
conf.disk_failure_policy = policy;
}
public static Config.DiskFailurePolicy getDiskFailurePolicy()
{
return conf.disk_failure_policy;
}
public static boolean isSnapshotBeforeCompaction()
{
return conf.snapshot_before_compaction;
}
public static boolean isAutoSnapshot() {
return conf.auto_snapshot;
}
public static boolean isAutoBootstrap()
{
return conf.auto_bootstrap;
}
public static void setHintedHandoffEnabled(boolean hintedHandoffEnabled)
{
conf.hinted_handoff_enabled = hintedHandoffEnabled;
}
public static boolean hintedHandoffEnabled()
{
return conf.hinted_handoff_enabled;
}
public static void setMaxHintWindow(int ms)
{
conf.max_hint_window_in_ms = ms;
}
public static int getMaxHintWindow()
{
return conf.max_hint_window_in_ms;
}
public static Integer getIndexInterval()
{
return conf.index_interval;
}
public static File getSerializedCachePath(String ksName, String cfName, CacheService.CacheType cacheType, String version)
{
return new File(conf.saved_caches_directory + File.separator + ksName + "-" + cfName + "-" + cacheType + (version == null ? "" : "-" + version + ".db"));
}
public static int getDynamicUpdateInterval()
{
return conf.dynamic_snitch_update_interval_in_ms;
}
public static void setDynamicUpdateInterval(Integer dynamicUpdateInterval)
{
conf.dynamic_snitch_update_interval_in_ms = dynamicUpdateInterval;
}
public static int getDynamicResetInterval()
{
return conf.dynamic_snitch_reset_interval_in_ms;
}
public static void setDynamicResetInterval(Integer dynamicResetInterval)
{
conf.dynamic_snitch_reset_interval_in_ms = dynamicResetInterval;
}
public static double getDynamicBadnessThreshold()
{
return conf.dynamic_snitch_badness_threshold;
}
public static void setDynamicBadnessThreshold(Double dynamicBadnessThreshold)
{
conf.dynamic_snitch_badness_threshold = dynamicBadnessThreshold;
}
public static ServerEncryptionOptions getServerEncryptionOptions()
{
return conf.server_encryption_options;
}
public static ClientEncryptionOptions getClientEncryptionOptions()
{
return conf.client_encryption_options;
}
public static double getFlushLargestMemtablesAt()
{
return conf.flush_largest_memtables_at;
}
public static double getReduceCacheSizesAt()
{
return conf.reduce_cache_sizes_at;
}
public static double getReduceCacheCapacityTo()
{
return conf.reduce_cache_capacity_to;
}
public static int getHintedHandoffThrottleInKB()
{
return conf.hinted_handoff_throttle_in_kb;
}
public static int getMaxHintsThread()
{
return conf.max_hints_delivery_threads;
}
public static boolean getPreheatKeyCache()
{
return conf.compaction_preheat_key_cache;
}
public static boolean isIncrementalBackupsEnabled()
{
return conf.incremental_backups;
}
public static void setIncrementalBackupsEnabled(boolean value)
{
conf.incremental_backups = value;
}
public static int getFlushQueueSize()
{
return conf.memtable_flush_queue_size;
}
public static int getTotalMemtableSpaceInMB()
{
// should only be called if estimatesRealMemtableSize() is true
assert conf.memtable_total_space_in_mb > 0;
return conf.memtable_total_space_in_mb;
}
public static long getTotalCommitlogSpaceInMB()
{
return conf.commitlog_total_space_in_mb;
}
public static boolean getTrickleFsync()
{
return conf.trickle_fsync;
}
public static int getTrickleFsyncIntervalInKb()
{
return conf.trickle_fsync_interval_in_kb;
}
public static long getKeyCacheSizeInMB()
{
return keyCacheSizeInMB;
}
public static int getKeyCacheSavePeriod()
{
return conf.key_cache_save_period;
}
public static void setKeyCacheSavePeriod(int keyCacheSavePeriod)
{
conf.key_cache_save_period = keyCacheSavePeriod;
}
public static int getKeyCacheKeysToSave()
{
return conf.key_cache_keys_to_save;
}
public static long getRowCacheSizeInMB()
{
return conf.row_cache_size_in_mb;
}
public static int getRowCacheSavePeriod()
{
return conf.row_cache_save_period;
}
public static void setRowCacheSavePeriod(int rowCacheSavePeriod)
{
conf.row_cache_save_period = rowCacheSavePeriod;
}
public static int getRowCacheKeysToSave()
{
return conf.row_cache_keys_to_save;
}
public static IRowCacheProvider getRowCacheProvider()
{
return rowCacheProvider;
}
public static int getStreamingSocketTimeout()
{
return conf.streaming_socket_timeout_in_ms;
}
public static String getLocalDataCenter()
{
return localDC;
}
public static Comparator<InetAddress> getLocalComparator()
{
return localComparator;
}
public static Config.InternodeCompression internodeCompression()
{
return conf.internode_compression;
}
public static boolean getInterDCTcpNoDelay()
{
return conf.inter_dc_tcp_nodelay;
}
}
| |
package com.busqueumlugar.service.impl;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import org.springframework.validation.BindingResult;
import com.busqueumlugar.dao.ImovelDao;
import com.busqueumlugar.dao.ParamservicoDao;
import com.busqueumlugar.dao.ServicoDao;
import com.busqueumlugar.dao.UsuarioDao;
import com.busqueumlugar.enumerador.AcaoNotificacaoEnum;
import com.busqueumlugar.enumerador.PerfilUsuarioOpcaoEnum;
import com.busqueumlugar.enumerador.ServicoValueEnum;
import com.busqueumlugar.enumerador.StatusPagtoOpcaoEnum;
import com.busqueumlugar.enumerador.TipoNotificacaoEnum;
import com.busqueumlugar.enumerador.TipoParamServicoOpcaoEnum;
import com.busqueumlugar.form.AdministracaoForm;
import com.busqueumlugar.form.MensagemForm;
import com.busqueumlugar.form.ParamservicoForm;
import com.busqueumlugar.form.RelatorioForm;
import com.busqueumlugar.form.ServicoForm;
import com.busqueumlugar.form.UsuarioForm;
import com.busqueumlugar.model.Formapagamento;
import com.busqueumlugar.model.Imovel;
import com.busqueumlugar.model.Infoservico;
import com.busqueumlugar.model.Paramservico;
import com.busqueumlugar.model.Plano;
import com.busqueumlugar.model.Planousuario;
import com.busqueumlugar.model.RelatorioQuantAssinatura;
import com.busqueumlugar.model.Servico;
import com.busqueumlugar.model.Usuario;
import com.busqueumlugar.service.ImovelService;
import com.busqueumlugar.service.InfoservicoService;
import com.busqueumlugar.service.NotificacaoService;
import com.busqueumlugar.service.ParamservicoService;
import com.busqueumlugar.service.PlanoService;
import com.busqueumlugar.service.ServicoService;
import com.busqueumlugar.service.UsuarioService;
import com.busqueumlugar.util.DateUtil;
import com.busqueumlugar.util.GenerateAccessToken;
import com.paypal.api.payments.Amount;
import com.paypal.api.payments.Details;
import com.paypal.api.payments.Item;
import com.paypal.api.payments.ItemList;
import com.paypal.api.payments.Links;
import com.paypal.api.payments.Payer;
import com.paypal.api.payments.Payment;
import com.paypal.api.payments.PaymentExecution;
import com.paypal.api.payments.RedirectUrls;
import com.paypal.api.payments.Transaction;
import com.paypal.base.rest.APIContext;
import com.paypal.base.rest.PayPalRESTException;
import com.busqueumlugar.util.MessageUtils;
@Service
public class ServicoServiceImpl implements ServicoService {
private static final Logger log = LoggerFactory.getLogger(ServicoServiceImpl.class);
@Autowired
private ServicoDao dao;
@Autowired
private ParamservicoService paramservicoService;
@Autowired
private ParamservicoDao paramservicoDao;
@Autowired
private UsuarioService usuarioService;
@Autowired
private UsuarioDao usuarioDao;
@Autowired
private ImovelService imovelService;
@Autowired
private ImovelDao imovelDao;
@Autowired
private InfoservicoService infoservicoService;
@Autowired
private PlanoService planoService;
@Autowired
private NotificacaoService notificacaoService;
public Servico recuperarServicoPorId(Long id) {
return dao.findServicoById(id);
}
@Transactional
public void cadastrarSolicitacaoPagtoServico(ServicoForm frm, String opcaoTempoPagto) {
Servico servico = new Servico();
servico.setDataSolicitacao(new Date());
servico.setStatusPgto(StatusPagtoOpcaoEnum.SOLICITADO.getRotulo());
servico.setFormaPgto(frm.getNomeFormaPagto());
servico.setUsuario(usuarioDao.findUsuario(frm.getIdUsuario()));
servico.setOpcaoTempoPagto(frm.getOpcaoTempoPagto());
servico.setDescServico(frm.getAcao());
servico.setParamServico(paramservicoDao.findParamservicoPorNome(frm.getAcao()));
servico.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo());
servico.setAssociadoPlano("N");
dao.save(servico);
}
@Transactional
public void cadastrarSolicitacaoPagtoAssinatura(ServicoForm frm) {
Usuario usuario = usuarioService.recuperarUsuarioPorId(frm.getIdUsuario());
if ( usuario.getPerfil().equals(PerfilUsuarioOpcaoEnum.PADRAO.getRotulo()) )
frm.setAcao(ServicoValueEnum.ASSINATURA_PADRAO.getRotulo());
else if ( usuario.getPerfil().equals(PerfilUsuarioOpcaoEnum.CORRETOR.getRotulo()) )
frm.setAcao(ServicoValueEnum.ASSINATURA_CORRETOR.getRotulo());
else if ( usuario.getPerfil().equals(PerfilUsuarioOpcaoEnum.IMOBILIARIA.getRotulo()) )
frm.setAcao(ServicoValueEnum.ASSINATURA_IMOBILIARIA.getRotulo());
Servico servico = new Servico();
servico.setDataSolicitacao(new Date());
servico.setStatusPgto(StatusPagtoOpcaoEnum.SOLICITADO.getRotulo());
servico.setFormaPgto(frm.getNomeFormaPagto());
servico.setUsuario(usuario);
servico.setOpcaoTempoPagto(frm.getOpcaoTempoPagto());
servico.setDescServico(frm.getAcao());
servico.setParamServico(paramservicoDao.findParamservicoPorNome(frm.getAcao()));
servico.setTempoDuracao(frm.getTempoDuracao());
servico.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo());
servico.setAssociadoPlano("N");
dao.save(servico);
// atualizando status do usuario sinalizando que ele havia feito uma solicitacao de assinatura
usuarioService.atualizarStatusUsuario(servico.getUsuario().getId(), "assinaturaSolicitada");
}
@Transactional
public void cadastrarSolicitacaoPagtoServico(ServicoForm frm, ParamservicoForm paramservicoForm, String opcaoQuantFoto, Long idImovel) {
Servico servico = new Servico();
servico.setDataSolicitacao(new Date());
servico.setStatusPgto(StatusPagtoOpcaoEnum.SOLICITADO.getRotulo());
servico.setFormaPgto(frm.getNomeFormaPagto());
servico.setQuantidadeServico(frm.getQuantidadeServico());
servico.setDescServico(frm.getAcao());
servico.setParamServico(paramservicoDao.findParamservicoPorNome(frm.getAcao()));
servico.setImovel(imovelDao.findImovelById(idImovel));
servico.setAssociadoPlano("N");
if ( frm.getAcao().equals(ServicoValueEnum.ADICIONAR_FOTOS.getRotulo()) )
servico.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo());
else
servico.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo());
dao.save(servico);
}
@Transactional
public void cadastrarSolicitacaoPagtoServicoImovel(ServicoForm frm, Long idImovel) {
Servico servico = new Servico();
servico.setDataSolicitacao(new Date());
servico.setStatusPgto(StatusPagtoOpcaoEnum.SOLICITADO.getRotulo());
servico.setFormaPgto(frm.getNomeFormaPagto());
servico.setUsuario(usuarioDao.findUsuario(frm.getIdUsuario()));
servico.setQuantidadeServico(frm.getQuantidadeServico());
servico.setDescServico(frm.getAcao());
servico.setParamServico(paramservicoDao.findParamservicoPorNome(frm.getAcao()));
servico.setImovel(imovelDao.findImovelById(idImovel));
servico.setAssociadoPlano("N");
servico.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo());
dao.save(servico);
}
@Transactional
public void simulaRetornoPagto(long idServico) {
DateUtil dtAtual = new DateUtil();
Servico servico = dao.findServicoById(idServico);
servico.setDataPagto(dtAtual.getTime());
servico.setStatusPgto(StatusPagtoOpcaoEnum.PAGO.getRotulo());
int quantDiasFimServico = servico.getTempoDuracao();
Calendar cal = Calendar.getInstance();
cal.setTime(servico.getDataPagto());
cal.add(Calendar.DAY_OF_MONTH, quantDiasFimServico);
servico.setDataFimServico(cal.getTime());
dao.save(servico);
// inserindo notificacao para o usuario solicitante do servico
notificacaoService.cadastrarNotificacao(servico.getUsuario().getId(),
AcaoNotificacaoEnum.SERVICO.getRotulo(),
MessageUtils.getMessage("msg.sucesso.recebimento.pagto.servico")+": " +servico.getLabelServico(),
TipoNotificacaoEnum.SERVICO.getRotulo(),
0l);
}
@Transactional
public void simulaRetornoPagtoImovel(long idServico) {
DateUtil dtAtual = new DateUtil();
Servico servico = dao.findServicoById(idServico);
if (servico.getTipoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo())){
Usuario usuario = servico.getUsuario();
if ( servico.getDescServico().equals(ServicoValueEnum.ADICIONAR_FOTOS.getRotulo()))
usuario.setQuantMaxFotosPorImovel(usuario.getQuantMaxFotosPorImovel() + servico.getQuantidadeServico());
else if ( servico.getDescServico().equals(ServicoValueEnum.ADICIONAR_IMOVEIS.getRotulo()))
usuario.setQuantCadastroImoveis(usuario.getQuantCadastroImoveis() + servico.getQuantidadeServico());
else if ( servico.getDescServico().equals(ServicoValueEnum.INDICACOES_IMOVEIS.getRotulo()))
usuario.setQuantMaxIndicacoesImovel(usuario.getQuantMaxIndicacoesImovel() + servico.getQuantidadeServico());
else if ( servico.getDescServico().equals(ServicoValueEnum.INDICACOES_EMAIL.getRotulo()))
usuario.setQuantMaxIndicacoesImovelEmail(usuario.getQuantMaxIndicacoesImovelEmail() + servico.getQuantidadeServico());
usuarioService.editarUsuario(usuario);
}
servico.setDataPagto(dtAtual.getTime());
servico.setStatusPgto(StatusPagtoOpcaoEnum.PAGO.getRotulo());
dao.save(servico);
// inserindo notificacao para o usuario solicitante do servico
notificacaoService.cadastrarNotificacao(servico.getUsuario().getId(),
AcaoNotificacaoEnum.SERVICO.getRotulo(),
MessageUtils.getMessage("msg.sucesso.recebimento.pagto.servico")+": " + servico.getLabelServico(),
TipoNotificacaoEnum.SERVICO.getRotulo(),
0l);
}
public boolean checarPossuiServicoPago(Long idUsuario, String descServico) {
//checando se o servico esta sendo cobrado
boolean cobrado = paramservicoService.checarServicoCobrado(descServico);
if ( cobrado){
DateUtil dtAtual = new DateUtil();
List<Servico> lista = dao.findServicoPagoPorIdUsuario(idUsuario, descServico, dtAtual.getTime());
if (! CollectionUtils.isEmpty(lista))
return true;
else
return false;
}
else
return true;
}
@Transactional
public void concederServicoImovel(Long idImovel, String tipoServico, String tempoDuracao, int quant) {
Servico servico = new Servico();
servico.setDataSolicitacao(new Date());
servico.setDataPagto(new Date());
servico.setStatusPgto(StatusPagtoOpcaoEnum.PAGO.getRotulo());
servico.setFormaPgto("concessao");
Imovel imovel = imovelDao.findImovelById(idImovel);
servico.setUsuario(imovel.getUsuario());
if ( tipoServico.equals("foto"))
servico.setQuantFoto(quant);
servico.setDescServico(ServicoValueEnum.ADICIONAR_FOTOS.getRotulo());
servico.setImovel(imovel);
servico.setValorServico(0);
servico.setOpcaoTempoPagto(tempoDuracao);
servico.setLabelServico(MessageUtils.getMessage("lbl.servico.selecionado.adicionar.fotos"));
int quantDiasFimServico = 0;
if ( tempoDuracao.equals("dia"))
quantDiasFimServico = 2;
else if ( tempoDuracao.equals("semana"))
quantDiasFimServico = 7;
else if ( tempoDuracao.equals("mes"))
quantDiasFimServico = 30;
else if ( tempoDuracao.equals("ano"))
quantDiasFimServico = 365;
Calendar cal = Calendar.getInstance();
cal.setTime(servico.getDataPagto());
cal.add(Calendar.DAY_OF_MONTH, quantDiasFimServico);
servico.setDataFimServico(cal.getTime());
servico.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo());
dao.save(servico);
imovelService.adicionarMaisFotos(servico.getImovel().getId(), servico.getQuantFoto());
}
public List<Servico> recuperarServicosDisponiveisPorImovel(Long idImovel) {
return dao.findServicoPorIdImovel(idImovel);
}
@Transactional
public void excluirServicoDisponivelImovel(Servico servico) {
dao.delete(servico);
}
@Transactional
public void excluirServico(Long idServico) {
Servico servico = dao.findServicoById(idServico);
dao.delete(servico);
}
public List<Servico> recuperarServicosDisponiveisPorUsuario(Long idUsuario) {
List<Servico> lista = dao.findServicosByIdUsuario(idUsuario);
List<Servico> listaFinal = new ArrayList<Servico>();
for (Servico servico : lista ){
if ( servico.getDescServico().contains("relatorio") || servico.getDescServico().contains("assinatura"))
servico.setTipoServicoComum("S");
else
servico.setTipoServicoComum("N");
listaFinal.add(servico);
}
return listaFinal;
}
@Transactional
public void concederServicoUsuario(Long idUsuario, String opcaoServico, String opcaoTempo, String opcaoQuantidade) {
Servico servico = new Servico();
servico.setDataSolicitacao(new Date());
servico.setDataPagto(new Date());
servico.setStatusPgto(StatusPagtoOpcaoEnum.CONCEDIDO.getRotulo());
servico.setFormaPgto(StatusPagtoOpcaoEnum.CONCEDIDO.getRotulo());
servico.setUsuario(usuarioDao.findUsuario(idUsuario));
servico.setDescServico(opcaoServico);
int quantDiasFimServico = 0;
if ( opcaoTempo.equals("dia"))
quantDiasFimServico = 2;
else if ( opcaoTempo.equals("semana"))
quantDiasFimServico = 7;
else if ( opcaoTempo.equals("mes"))
quantDiasFimServico = 30;
else if ( opcaoTempo.equals("ano"))
quantDiasFimServico = 365;
Calendar cal = Calendar.getInstance();
cal.setTime(servico.getDataPagto());
cal.add(Calendar.DAY_OF_MONTH, quantDiasFimServico);
servico.setParamServico(paramservicoDao.findParamservicoPorNome(opcaoServico));
servico.setAssociadoPlano("N");
if ( servico.getParamServico().getTipoParamServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo()) ||
servico.getParamServico().getTipoParamServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo()) ){
servico.setOpcaoTempoPagto(opcaoTempo);
servico.setDataFimServico(cal.getTime());
}
else if ( servico.getParamServico().getTipoParamServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo())){
servico.setQuantidadeServico(Integer.parseInt(opcaoQuantidade));
}
servico.setValorServico(0d);
dao.save(servico);
if ( servico.getParamServico().getTipoParamServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo())){
Usuario usuario = servico.getUsuario();
if ( servico.getParamServico().getValueServico().equals(ServicoValueEnum.ADICIONAR_FOTOS.getRotulo())) {
usuario.setQuantMaxFotosPorImovel(usuario.getQuantMaxFotosPorImovel() + servico.getQuantidadeServico());
}
else if ( servico.getParamServico().getValueServico().equals(ServicoValueEnum.ADICIONAR_IMOVEIS.getRotulo())) {
usuario.setQuantCadastroImoveis(usuario.getQuantCadastroImoveis() + servico.getQuantidadeServico());
}
else if ( servico.getParamServico().getValueServico().equals(ServicoValueEnum.INDICACOES_IMOVEIS.getRotulo())) {
usuario.setQuantMaxIndicacoesImovel(usuario.getQuantMaxIndicacoesImovel() + servico.getQuantidadeServico());
}
else if ( servico.getParamServico().getValueServico().equals(ServicoValueEnum.INDICACOES_EMAIL.getRotulo())) {
usuario.setQuantMaxIndicacoesImovelEmail(usuario.getQuantMaxIndicacoesImovelEmail() + servico.getQuantidadeServico());
}
usuarioService.editarUsuario(usuario);
}
else if ( servico.getParamServico().getTipoParamServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo())){
// atualizando a data validade de acesso do usuario
usuarioService.atualizarAssinaturaUsuario(servico.getUsuario().getId(), servico.getDataFimServico());
}
}
@Transactional
public void revogarServicoDisponivelUsuario(Servico servico) {
dao.delete(servico);
}
public void selecionarPagamentoServico(ServicoForm frm, long idInfoServico, Long idFormaPagto) {
Infoservico infoservico = infoservicoService.recuperarInfoServicoPorId(idInfoServico);
frm.setValorServico(infoservico.getValorServico());
if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_RELATORIO.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_RELATORIO.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo())){
frm.setQuantidadeServico(infoservico.getQuantidade());
frm.setLabelImovelQuantidade(String.valueOf(infoservico.getQuantidade()) );
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo());
}
Formapagamento formapagamento = paramservicoService.recuperaTipoFormaPagtoPorId(idFormaPagto);
frm.setFormaPagtoSelecionado(formapagamento.getLabel() );
frm.setNomeFormaPagto(formapagamento.getNome());
}
public void selecionarPagamentoPlano(ServicoForm frm, Long idformaPagto) {
Formapagamento formapagamento = paramservicoService.recuperaTipoFormaPagtoPorId(idformaPagto);
frm.setFormaPagtoSelecionado(formapagamento.getLabel() );
frm.setNomeFormaPagto(formapagamento.getNome());
}
public List<Servico> checarServicosMaisPorStatus(Date dataInicio, Date dataFim, String statusServico) {
List lista = dao.findServicosMaisPorPeriodo(dataInicio, dataFim, statusServico);
List<Servico> listaFinal = new ArrayList<Servico>();
Servico serv = null;
if ( !CollectionUtils.isEmpty(lista)){
for (Iterator iter = lista.iterator();iter.hasNext();){
Object[] obj = (Object[]) iter.next();
serv = new Servico();
serv.setLabelServico(obj[0].toString());
serv.setQuantidade(Integer.parseInt(obj[1].toString()));
listaFinal.add(serv);
}
}
return listaFinal;
}
public double checarVolumeFnanceiroServMaisPorStatus(Date dataInicio, Date dataFim, String statusServico) {
return dao.calcularVolumeFinanceiroMaisSolicitadosMaisPagosPorPeriodoPorTipoServico(dataInicio, dataFim, statusServico, null);
}
public double checarBalancoFinanceiro(Date dataInicio, Date dataFim) {
double vlTotalSol = dao.calcularVolumeFinanceiroMaisSolicitadosMaisPagosPorPeriodoPorTipoServico(dataInicio, dataFim, StatusPagtoOpcaoEnum.SOLICITADO.getRotulo(), null);
double vlTotalPago = dao.calcularVolumeFinanceiroMaisSolicitadosMaisPagosPorPeriodoPorTipoServico(dataInicio, dataFim, StatusPagtoOpcaoEnum.PAGO.getRotulo(), null);
return vlTotalPago - vlTotalSol;
}
public List<Servico> checarUsuariosVolFinanceiroServicoPorStatus(AdministracaoForm form) {
List lista = dao.findUsuariosVolFinanceiroServico(form);
List<Servico> listaFinal = new ArrayList<Servico>();
if ( !CollectionUtils.isEmpty(lista)){
Servico serv = null;
for (Iterator iter = lista.iterator();iter.hasNext();){
Object[] obj = (Object[]) iter.next();
serv = new Servico();
serv.setUsuario(usuarioDao.findUsuario(Long.parseLong(obj[0].toString())));
serv.setValorServico(Double.parseDouble(obj[1].toString()));
listaFinal.add(serv);
}
}
return listaFinal;
}
public List<Servico> checarServicosVolFinanceiroServicoPorStatus(Date dataInicio, Date dataFim, String statusServico) {
List lista = dao.findServicosVolFinanceiroServico(dataInicio, dataFim, statusServico);
List<Servico> listaFinal = new ArrayList<Servico>();
if ( !CollectionUtils.isEmpty(lista)){
Servico serv = null;
for (Iterator iter = lista.iterator();iter.hasNext();){
Object[] obj = (Object[]) iter.next();
serv = new Servico();
serv.setLabelServico(obj[0].toString());
serv.setValorServico(Double.parseDouble(obj[1].toString()));
listaFinal.add(serv);
}
}
return listaFinal;
}
public void simulaRetornoPagtoAdicionarCompartilhamento(Long idServico) {
DateUtil dtAtual = new DateUtil();
Servico servico = dao.findServicoById(idServico);
servico.setDataPagto(dtAtual.getTime());
servico.setStatusPgto(StatusPagtoOpcaoEnum.PAGO.getRotulo());
dao.save(servico);
imovelService.adicionarMaisCompartilhamento(servico.getImovel().getId(), servico.getQuantidadeServico());
}
public String validarPagamentoServico(long idInfoServico, Long formaPgto) {
if (( formaPgto == null ) || (formaPgto != null && formaPgto.longValue() == -1))
return MessageUtils.getMessage("msg.erro.selecionar.pagamento");
if ( idInfoServico <= 0 )
return MessageUtils.getMessage("msg.erro.nenhum.valor.servico.plano");
return "";
}
// esta validacao so envolve servicos que solicitem Tempo de um servico como Relatorio e nao quantidade como Adicionar Imoveis
public String validarSolicitacaoServico(long idParamServico, long idUsuario, Date dtCorrente) {
// checando se existe um periodo contrato no periodo
Servico servico = null;
try {
if ( idParamServico == -1 )
return MessageUtils.getMessage("msg.erro.selecionar.um.servico");
// checando se o servico ja foi solicitado anteriormente
servico = dao.findServicoByIdParamServicoByIdUsuarioByStatusByTipoServico(idParamServico, idUsuario, StatusPagtoOpcaoEnum.SOLICITADO.getRotulo(), TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo());
if ( servico != null)
return MessageUtils.getMessage("msg.erro.servico.sol.anteriormente");
else { // checando se o servico esta aguardando pagamento
servico = dao.findServicoByIdParamServicoByIdUsuarioByStatusByTipoServico(idParamServico, idUsuario, StatusPagtoOpcaoEnum.AGUARDANDO.getRotulo(), TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo());
if ( servico != null )
return MessageUtils.getMessage("msg.erro.servico.aguarde.pagto");
else {
servico = dao.findServicoByIdParamServicoByData(idParamServico, idUsuario, dtCorrente);
if ( servico != null )
return MessageUtils.getMessage("msg.erro.servico.pago.periodo");
}
}
return "";
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
public String validarSolicitacaoServicoPorLabelServico(String nomeServico, long idUsuario, Date dtCorrente) {
// checando se existe um periodo contrato no periodo
Servico servico = null;
try {
servico = dao.findServicoByLabelServicoServicoByData(nomeServico, idUsuario, dtCorrente);
if ( servico != null )
return MessageUtils.getMessage("msg.erro.selecionar.servico.contratado");
return "";
} catch (Exception e) {
e.printStackTrace();
return "";
}
}
public List<Servico> checarServicosMaisConcedidos(Date dataInicio, Date dataFim) {
List lista = dao.findServicosMaisPorPeriodo(dataInicio, dataFim, StatusPagtoOpcaoEnum.CONCEDIDO.getRotulo());
List<Servico> listaFinal = new ArrayList<Servico>();
if (!CollectionUtils.isEmpty(lista)){
Servico serv = null;
for (Iterator iter = lista.iterator();iter.hasNext();){
Object[] obj = (Object[]) iter.next();
serv = new Servico();
serv.setLabelServico(obj[0].toString());
serv.setQuantidade(Integer.parseInt(obj[1].toString()));
listaFinal.add(serv);
}
}
return listaFinal;
}
public double checarVolumeFinanceiroServMaisConcedidos(Date dataInicio, Date dataFim) {
return dao.calcularVolumeFinanceiroMaisConcedidosPorPeriodo(dataInicio, dataFim);
}
public List<Servico> recuperarServicosDisponiveisPorUsuarioAssociadoPlano(Long idUsuario, String assocPlano) {
List<Servico> lista = dao.findServicosByIdUsuarioByAssocPlano(idUsuario, assocPlano);
List<Servico> listaFinal = new ArrayList<Servico>();
for (Servico servico : lista ){
if ( servico.getDescServico().contains("relatorio") || servico.getDescServico().contains("assinatura"))
servico.setTipoServicoComum("S");
else
servico.setTipoServicoComum("N");
listaFinal.add(servico);
}
return listaFinal;
}
public List<Servico> recuperarServicosDisponiveisPorUsuarioPorTipo(Long idUsuario, String tipoServico) {
return dao.findServicosByIdUsuarioByTipoServico(idUsuario, tipoServico);
}
@Transactional
public void atualizarServicoImovelPlano(Planousuario p, Plano plano, String descServico) {
Servico servico = new Servico();
servico.setDataSolicitacao(p.getDataSolicitacao());
servico.setStatusPgto(StatusPagtoOpcaoEnum.PAGO.getRotulo());
servico.setFormaPgto(p.getFormaPagto());
servico.setParamServico(paramservicoDao.findParamservicoPorNome(descServico));
servico.setValorServico(0); // tratar ainda
servico.setTempoDuracao(0);
servico.setDataPagto(new Date());
servico.setAssociadoPlano("S");
Usuario usuario = servico.getUsuario();
if ( descServico.equals(ServicoValueEnum.ADICIONAR_FOTOS.getRotulo())) {
servico.setQuantidadeServico(plano.getQuantFotosImovel());
usuario.setQuantMaxFotosPorImovel(usuario.getQuantMaxFotosPorImovel() + servico.getQuantidadeServico());
}
else if ( descServico.equals(ServicoValueEnum.ADICIONAR_IMOVEIS.getRotulo())) {
servico.setQuantidadeServico(plano.getQuantCadastroImoveis());
usuario.setQuantCadastroImoveis(usuario.getQuantCadastroImoveis() + servico.getQuantidadeServico());
}
else if ( descServico.equals(ServicoValueEnum.INDICACOES_IMOVEIS.getRotulo())) {
servico.setQuantidadeServico(plano.getQuantImoveisIndicacao());
usuario.setQuantMaxIndicacoesImovel(usuario.getQuantMaxIndicacoesImovel() + servico.getQuantidadeServico());
}
else if ( descServico.equals(ServicoValueEnum.INDICACOES_EMAIL.getRotulo())) {
servico.setQuantidadeServico(plano.getQuantEmailsPorImovel());
usuario.setQuantMaxIndicacoesImovelEmail(usuario.getQuantMaxIndicacoesImovelEmail() + servico.getQuantidadeServico());
}
else if ( descServico.equals(ServicoValueEnum.RELATORIOS.getRotulo())){
DateUtil dtAtual = new DateUtil();
int quantDias = plano.getDuracaoPlano() * 30 ; // duracao do plano esta em meses
Calendar cal = Calendar.getInstance();
cal.setTime(new Date());
cal.add(Calendar.DAY_OF_MONTH, quantDias);
servico.setDataPagto(dtAtual.getTime());
servico.setStatusPgto(StatusPagtoOpcaoEnum.PAGO.getRotulo());
servico.setDataFimServico(cal.getTime());
dao.save(servico);
}
usuarioService.editarUsuario(usuario);
dao.save(servico);
}
@Transactional
public void simulaRetornoPagtoAssinaturaUsuario(Long idServico) {
DateUtil dtAtual = new DateUtil();
Servico servico = dao.findServicoById(idServico);
servico.setDataPagto(dtAtual.getTime());
servico.setStatusPgto(StatusPagtoOpcaoEnum.PAGO.getRotulo());
int quantDiasFimServico = servico.getTempoDuracao();
Calendar cal = Calendar.getInstance();
cal.setTime(servico.getDataPagto());
cal.add(Calendar.DAY_OF_MONTH, quantDiasFimServico);
servico.setDataFimServico(cal.getTime());
dao.save(servico);
// atualizando a data validade de acesso do usuario
usuarioService.atualizarAssinaturaUsuario(servico.getUsuario().getId(), servico.getDataFimServico());
// inserindo notificacao para o usuario solicitante da assinatura
notificacaoService.cadastrarNotificacao(servico.getUsuario().getId(),
AcaoNotificacaoEnum.SERVICO.getRotulo(),
MessageUtils.getMessage("msg.sucesso.recebimento.pagto.assinatura"),
TipoNotificacaoEnum.SERVICO.getRotulo(),
0l);
}
public ServicoForm recuperarAssinaturaSolicitaPorIdUsuario(Long idUsuario, String perfilUsuario) {
Servico servico = null;
ServicoForm frm = new ServicoForm();
if ( perfilUsuario.equals(PerfilUsuarioOpcaoEnum.PADRAO.getRotulo()))
servico = dao.findServicoAssinaturaSolicitadaByIdUsuario(idUsuario, ServicoValueEnum.ASSINATURA_PADRAO.getRotulo());
else if ( perfilUsuario.equals(PerfilUsuarioOpcaoEnum.CORRETOR.getRotulo()))
servico = dao.findServicoAssinaturaSolicitadaByIdUsuario(idUsuario, ServicoValueEnum.ASSINATURA_CORRETOR.getRotulo());
else if ( perfilUsuario.equals(PerfilUsuarioOpcaoEnum.IMOBILIARIA.getRotulo()))
servico = dao.findServicoAssinaturaSolicitadaByIdUsuario(idUsuario, ServicoValueEnum.ASSINATURA_IMOBILIARIA.getRotulo());
if ( servico != null )
BeanUtils.copyProperties(frm, servico);
return frm;
}
@Transactional
public void excluirAssinaturaUsuarioSolicitada(Long idUsuario, String perfilUsuario) {
Servico servico = null;
if ( perfilUsuario.equals(PerfilUsuarioOpcaoEnum.PADRAO.getRotulo()))
servico = dao.findServicoAssinaturaSolicitadaByIdUsuario(idUsuario, ServicoValueEnum.ASSINATURA_PADRAO.getRotulo());
else if ( perfilUsuario.equals(PerfilUsuarioOpcaoEnum.CORRETOR.getRotulo()))
servico = dao.findServicoAssinaturaSolicitadaByIdUsuario(idUsuario, ServicoValueEnum.ASSINATURA_CORRETOR.getRotulo());
else if ( perfilUsuario.equals(PerfilUsuarioOpcaoEnum.IMOBILIARIA.getRotulo()))
servico = dao.findServicoAssinaturaSolicitadaByIdUsuario(idUsuario, ServicoValueEnum.ASSINATURA_IMOBILIARIA.getRotulo());
dao.delete(servico);
}
public Servico checarHabilitadoModuloRelatorio(Long idUsuario, String tipoRelatorio, Date dtCorrente) {
return dao.findServicoRelatorioByIdUsuario(idUsuario, tipoRelatorio, dtCorrente);
}
@Transactional
public void atualizarServicoRelatorioPlano(Planousuario p, Plano plano) {
Servico servico = new Servico();
servico.setDataSolicitacao(p.getDataSolicitacao());
servico.setStatusPgto(StatusPagtoOpcaoEnum.PAGO.getRotulo());
servico.setFormaPgto(p.getFormaPagto());
UsuarioForm user = null;
String descServico = "";
if (user.getPerfil().equals(PerfilUsuarioOpcaoEnum.PADRAO.getRotulo()))
descServico = (ServicoValueEnum.RELATORIO_PADRAO.getRotulo());
else if (user.getPerfil().equals(PerfilUsuarioOpcaoEnum.CORRETOR.getRotulo()))
descServico = (ServicoValueEnum.RELATORIO_CORRETOR.getRotulo());
else if (user.getPerfil().equals(PerfilUsuarioOpcaoEnum.IMOBILIARIA.getRotulo()))
descServico = (ServicoValueEnum.RELATORIO_IMOBILIARIA.getRotulo());
servico.setParamServico(paramservicoDao.findParamservicoPorNome(descServico));
servico.setValorServico(0); // tratar ainda
servico.setDataPagto(new Date());
servico.setAssociadoPlano("S");
DateUtil dtAtual = new DateUtil();
int quantDias = plano.getDuracaoPlano() * 30 ; // duracao do plano esta em meses
Calendar cal = Calendar.getInstance();
cal.setTime(new Date());
cal.add(Calendar.DAY_OF_MONTH, quantDias);
servico.setDataPagto(dtAtual.getTime());
servico.setDataFimServico(cal.getTime());
dao.save(servico);
}
public List<Servico> checarTotalServicos(AdministracaoForm form) {
List<Servico> listaFinal = new ArrayList<Servico>();
List lista = dao.findTotalServicosPorPeriodo(form);
if (!CollectionUtils.isEmpty(lista)){
Servico serv = null;
for (Iterator iter = lista.iterator();iter.hasNext();){
Object[] obj = (Object[]) iter.next();
serv = new Servico();
serv.setLabelServico(obj[0].toString());
serv.setQuantidade(Integer.parseInt(obj[1].toString()));
listaFinal.add(serv);
}
}
return listaFinal;
}
public List<Servico> checarVolumeFinanceirosServicos(AdministracaoForm form) {
List<Servico> listaFinal = new ArrayList<Servico>();
List lista = dao.findVolumeFinanceiroServicos(form);
if (!CollectionUtils.isEmpty(lista)){
Servico serv = null;
for (Iterator iter = lista.iterator();iter.hasNext();){
Object[] obj = (Object[]) iter.next();
serv = new Servico();
serv.setLabelServico(obj[0].toString());
serv.setValorFinanceiro(Double.parseDouble(obj[1].toString()));
listaFinal.add(serv);
}
}
return listaFinal;
}
public Long recuperarUltimoIdServicoPorUsuario(long idUsuario, String descServico) {
return dao.findLastServicoGeradoByIdUsuario(idUsuario, descServico).getId();
}
public List<Servico> recuperarServicosDisponiveisPorUsuarioPorStatus(Long idUsuario, String status) {
List<Servico> lista = dao.findServicoByIdUsuarioByStatus(idUsuario, status) ;
List<Servico> listaFinal = new ArrayList<Servico>();
for (Servico servico : lista ){
if ( servico.getDescServico().contains("relatorio") || servico.getDescServico().contains("assinatura"))
servico.setTipoServicoComum("S");
else
servico.setTipoServicoComum("N");
listaFinal.add(servico);
}
return listaFinal;
}
@Override
public void selecionarPagamentoServico(RelatorioForm frm, long idInfoServico, long idFormaPagto) {
Infoservico infoservico = infoservicoService.recuperarInfoServicoPorId(idInfoServico);
frm.setValorServico(infoservico.getValorServico());
if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_RELATORIO.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_RELATORIO.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo())){
frm.setQuantidadeServico(infoservico.getQuantidade());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo());
}
Formapagamento formapagamento = paramservicoService.recuperaTipoFormaPagtoPorId(idFormaPagto);
frm.setFormaPagtoSelecionado(formapagamento.getLabel() );
frm.setNomeFormaPagto(formapagamento.getNome());
}
@Override
@Transactional
public void cadastrarSolicitacaoPagtoServico(RelatorioForm frm, String opcaoTempoPagto) {
Servico servico = new Servico();
servico.setDataSolicitacao(new Date());
servico.setStatusPgto(StatusPagtoOpcaoEnum.SOLICITADO.getRotulo());
servico.setFormaPgto(frm.getNomeFormaPagto());
servico.setUsuario(usuarioDao.findUsuario(frm.getIdUsuarioSelecionado()));
servico.setOpcaoTempoPagto(frm.getOpcaoTempoPagto());
servico.setDescServico(frm.getAcao());
servico.setParamServico(paramservicoDao.findParamservicoPorNome(frm.getAcao()));
servico.setValorServico(frm.getValorServico());
servico.setTempoDuracao(frm.getTempoDuracao());
servico.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo()); // checar ainda se para Relatorio eh este o tipo de servico
servico.setAssociadoPlano("N");
dao.save(servico);
}
@Override
public void selecionarPagamentoServico(MensagemForm frm, long idInfoServico, long idFormaPagto) {
Infoservico infoservico = infoservicoService.recuperarInfoServicoPorId(idInfoServico);
frm.setValorServico(infoservico.getValorServico());
if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_RELATORIO.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_RELATORIO.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo())){
frm.setQuantidadeServico(infoservico.getQuantidade());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo());
}
else if ( infoservico.getTipoInfoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo())){
frm.setTempoDuracao(infoservico.getTempoDuracao());
frm.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_ASSINATURA.getRotulo());
}
Formapagamento formapagamento = paramservicoService.recuperaTipoFormaPagtoPorId(idFormaPagto);
frm.setFormaPagtoSelecionado(formapagamento.getLabel() );
frm.setNomeFormaPagto(formapagamento.getNome());
}
@Override
@Transactional
public void cadastrarSolicitacaoPagtoServico(MensagemForm frm, String opcaoTempoPagto) {
Servico servico = new Servico();
servico.setDataSolicitacao(new Date());
servico.setStatusPgto(StatusPagtoOpcaoEnum.SOLICITADO.getRotulo());
servico.setFormaPgto(frm.getNomeFormaPagto());
servico.setUsuario(usuarioDao.findUsuario(frm.getIdUsuarioSelecionado()));
servico.setOpcaoTempoPagto(frm.getOpcaoTempoPagto());
servico.setDescServico(frm.getAcao());
servico.setParamServico(paramservicoDao.findParamservicoPorNome(frm.getAcao()));
servico.setValorServico(frm.getValorServico());
servico.setTempoDuracao(frm.getTempoDuracao());
servico.setTipoServico(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_USUARIO.getRotulo()); // checar ainda se para Relatorio eh este o tipo de servico
servico.setAssociadoPlano("N");
dao.save(servico);
}
@Override
public void carregarInfoPlanoSelecionado(ServicoForm form) {
Plano plano = planoService.recuperarPlanoPorId(form.getIdPlanoSelecionado());
form.setNomePlanoSelecionado(plano.getNome());
form.setValorPlanoSelecionado(plano.getValorPlano());
}
@Override
public ServicoForm carregarInfoPlanoSelecionadoPorId(Long idPlano) {
Plano plano = planoService.recuperarPlanoPorId(idPlano);
ServicoForm form = new ServicoForm();
form.setNomePlanoSelecionado(plano.getNome());
form.setValorPlanoSelecionado(plano.getValorPlano());
return form;
}
@Override
@Transactional
public void atualizarServico(Servico servico) {
dao.save(servico);
}
@Override
@Transactional
public void cadastrarServico(Servico servico) {
dao.save(servico);
}
@Override
public long checarTotalAssinaturas(AdministracaoForm form) {
return dao.findTotalTipoServicoPorPeriodo(form);
}
@Override
public List<RelatorioQuantAssinatura> checarVolFinanceiroAssinaturasPorPeriodo(AdministracaoForm form) {
/*List lista = dao.calcularVolumeFinanceiroMaisSolicitadosMaisPagosPorPeriodoPorTipoServico(dataInicio, dataFim, statusAssinatura, "A");
List<RelatorioQuantAssinatura> listaFinal = new ArrayList<RelatorioQuantAssinatura>();
if ( lista != null && lista.size() > 0 ){
RelatorioQuantAssinatura relatorio = null;
Object[] obj = null;
for (Iterator iter = lista.iterator();iter.hasNext();){
obj = (Object[])iter.next();
relatorio = new RelatorioQuantAssinatura();
relatorio.setTipoAssinatura(obj[1].toString());
relatorio.setValorFinanceiro(Double.parseDouble(obj[0].toString()));
listaFinal.add(relatorio);
}
}
return listaFinal;*/
return null;
}
@Override
public Collection<? extends RelatorioQuantAssinatura> checarTotalAssinaturasPorUsuario(Long idUsuario, AdministracaoForm form) {
// TODO Auto-generated method stub
return null;
}
@Override
public Payment confirmarPagamentoPayPal(HttpServletRequest req, HttpServletResponse resp, ServicoForm form) {
Payment createdPayment = null;
Map<String, String> map = new HashMap<String, String>();
// ###AccessToken
// Retrieve the access token from
// OAuthTokenCredential by passing in
// ClientID and ClientSecret
APIContext apiContext = null;
String accessToken = null;
try {
accessToken = GenerateAccessToken.getAccessToken();
// ### Api Context
// Pass in a `ApiContext` object to authenticate
// the call and to send a unique request id
// (that ensures idempotency). The SDK generates
// a request id if you do not pass one explicitly.
apiContext = new APIContext(accessToken);
// Use this variant if you want to pass in a request id
// that is meaningful in your application, ideally
// a order id.
/*
* String requestId = Long.toString(System.nanoTime(); APIContext
* apiContext = new APIContext(accessToken, requestId ));
*/
} catch (PayPalRESTException e) {
req.setAttribute("error", e.getMessage());
}
if (req.getParameter("PayerID") != null) {
Payment payment = new Payment();
if (req.getParameter("guid") != null) {
payment.setId(map.get(req.getParameter("guid")));
}
PaymentExecution paymentExecution = new PaymentExecution();
paymentExecution.setPayerId(req.getParameter("PayerID"));
try {
createdPayment = payment.execute(apiContext, paymentExecution);
} catch (PayPalRESTException e) {
e.printStackTrace();
}
} else {
// ###Details
// Let's you specify details of a payment amount.
Details details = new Details();
details.setShipping("0");
details.setSubtotal(String.format(Locale.US, "%.2f", form.getValorServico()));
details.setTax("0");
// ###Amount
// Let's you specify a payment amount.
Amount amount = new Amount();
amount.setCurrency("BRL");
// Total must be equal to sum of shipping, tax and subtotal.
amount.setTotal(String.format(Locale.US, "%.2f", form.getValorServico()));
amount.setDetails(details);
// ###Transaction
// A transaction defines the contract of a
// payment - what is the payment for and who
// is fulfilling it. Transaction is created with
// a `Payee` and `Amount` types
Transaction transaction = new Transaction();
transaction.setAmount(amount);
transaction.setDescription("This is the payment transaction description.");
// ### Items
Item item = new Item();
if ( form.getTipoServico().equals(TipoParamServicoOpcaoEnum.TIPO_PARAM_SERVICO_IMOVEL.getRotulo()))
item.setName(form.getLabelServicoFmt()).setQuantity("1").setCurrency("BRL").setPrice(String.format(Locale.US, "%.2f", form.getValorServico()));
else
item.setName(form.getLabelServico()).setQuantity("1").setCurrency("BRL").setPrice(String.format(Locale.US, "%.2f", form.getValorServico()));
ItemList itemList = new ItemList();
List<Item> items = new ArrayList<Item>();
items.add(item);
itemList.setItems(items);
transaction.setItemList(itemList);
// The Payment creation API requires a list of
// Transaction; add the created `Transaction`
// to a List
List<Transaction> transactions = new ArrayList<Transaction>();
transactions.add(transaction);
// ###Payer
// A resource representing a Payer that funds a payment
// Payment Method
// as 'paypal'
Payer payer = new Payer();
payer.setPaymentMethod("paypal");
// ###Payment
// A Payment Resource; create one using
// the above types and intent as 'sale'
Payment payment = new Payment();
payment.setIntent("sale");
payment.setPayer(payer);
payment.setTransactions(transactions);
// ###Redirect URLs
RedirectUrls redirectUrls = new RedirectUrls();
String guid = UUID.randomUUID().toString().replaceAll("-", "");
redirectUrls.setCancelUrl(req.getScheme() + "://"
+ req.getServerName() + ":" + req.getServerPort()
+ req.getContextPath() + "/servico/cancelarServico?guid=" + guid);
redirectUrls.setReturnUrl(req.getScheme() + "://"
+ req.getServerName() + ":" + req.getServerPort()
+ req.getContextPath() + "/paymentwithpaypal?guid=" + guid);
payment.setRedirectUrls(redirectUrls);
// Create a payment by posting to the APIService
// using a valid AccessToken
// The return object contains the status;
try {
createdPayment = payment.create(apiContext);
// ###Payment Approval Url
Iterator<Links> links = createdPayment.getLinks().iterator();
while (links.hasNext()) {
Links link = links.next();
if (link.getRel().equalsIgnoreCase("approval_url")) {
req.setAttribute("redirectURL", link.getHref());
}
}
map.put(guid, createdPayment.getId());
} catch (PayPalRESTException e) {
e.printStackTrace();
}
// atualizar status na tabela Servico
Servico servico = recuperarServicoPorId(form.getIdServicoGerado());
servico.setStatusPgto(StatusPagtoOpcaoEnum.AGUARDANDO.getRotulo());
servico.setDataAguardandoPagto(new Date());
servico.setToken(guid);
atualizarServico(servico);
}
return createdPayment;
}
@Override
public ServicoForm carregaServicoForm(Long idServico) {
Servico servico = dao.findServicoById(idServico);
ServicoForm form = new ServicoForm();
BeanUtils.copyProperties(servico, form);
form.setAcao(servico.getDescServico());
if ( form.getFormaPgto().equals("payPal"))
form.setFormaPagtoSelecionado("Pay Pal");
else if ( form.getFormaPgto().equals("pagSeguro"))
form.setFormaPagtoSelecionado("Pag Seguro");
form.setIdServicoGerado(idServico);
return form;
}
@Override
public void carregaInfoServicoSelecionado(ServicoForm form) {
Servico servico = dao.findServicoById(form.getIdServicoSelecionado());
BeanUtils.copyProperties(servico, form);
form.setDataSolicitacao(new Date());
form.setListaOpcoesFormaPagamento(paramservicoService.listarTodasFormasPagamento());
Paramservico paramservico = paramservicoService.recuperarParamServicoPorId(form.getIdServicoSelecionado());
form.setAcao(paramservico.getValueServico());
form.setListaInfoServicoPagamento(infoservicoService.recuperarInfoServicoPagamentoPorNomeServico(paramservico.getValueServico()));
}
@Override
public ServicoForm carregaSolicitacaoAssinaturaPorUsuario(UsuarioForm usuario) {
Servico servico = null;
if ( usuario.getPerfil().equals(PerfilUsuarioOpcaoEnum.PADRAO.getRotulo()))
servico = dao.findSolAssinaturaByIdUsuario(usuario.getId(), ServicoValueEnum.ASSINATURA_PADRAO.getRotulo());
else if ( usuario.getPerfil().equals(PerfilUsuarioOpcaoEnum.CORRETOR.getRotulo()))
servico = dao.findSolAssinaturaByIdUsuario(usuario.getId(), ServicoValueEnum.ASSINATURA_CORRETOR.getRotulo());
else if ( usuario.getPerfil().equals(PerfilUsuarioOpcaoEnum.IMOBILIARIA.getRotulo()))
servico = dao.findSolAssinaturaByIdUsuario(usuario.getId(), ServicoValueEnum.ASSINATURA_IMOBILIARIA.getRotulo());
if (servico != null){
ServicoForm form = new ServicoForm();
BeanUtils.copyProperties(servico, form);
return form;
}
else
return null;
}
@Override
@Transactional
public void cancelarServico(HttpServletRequest req) {
Servico servico = dao.findServicoByToken(req.getParameter("guid"));
servico.setStatusPgto(StatusPagtoOpcaoEnum.CANCELADO.getRotulo());
dao.save(servico);
}
@Override
public List<Servico> recuperarServicosPorIdPlanoUsuario(Long idPlanoUsuario) {
return (List<Servico>) dao.findServicosByIdPlanoUsuario(idPlanoUsuario);
}
@Override
public void revogarServicoUsuario(Long idUsuario, Long idServicoSelecionado) {
Servico servico = dao.findServicoById(idServicoSelecionado);
Usuario usuario = usuarioService.recuperarUsuarioPorId(idUsuario);
if ( servico.getDescServico().equals(ServicoValueEnum.ADICIONAR_FOTOS.getRotulo())) {
usuario.setQuantMaxFotosPorImovel(usuario.getQuantMaxFotosPorImovel() - servico.getQuantidadeServico());
}
else if ( servico.getDescServico().equals(ServicoValueEnum.ADICIONAR_IMOVEIS.getRotulo())) {
usuario.setQuantCadastroImoveis(usuario.getQuantCadastroImoveis() - servico.getQuantidadeServico());
}
else if ( servico.getDescServico().equals(ServicoValueEnum.INDICACOES_IMOVEIS.getRotulo())) {
usuario.setQuantMaxIndicacoesImovel(usuario.getQuantMaxIndicacoesImovel() - servico.getQuantidadeServico());
}
else if ( servico.getDescServico().equals(ServicoValueEnum.INDICACOES_EMAIL.getRotulo())) {
usuario.setQuantMaxIndicacoesImovelEmail(usuario.getQuantMaxIndicacoesImovelEmail() - servico.getQuantidadeServico());
}
dao.delete(servico);
}
@Override
public boolean validarSelecaoPagamentoServico(ServicoForm form, BindingResult result){
boolean obrigatoriedade = false;
if ( StringUtils.isEmpty(form.getIdInfoServicoPagamentoSelecionada()) && form.getIdInfoServicoPagamentoSelecionada() == -1){
result.rejectValue("idInfoServicoPagamentoSelecionada", "msg.erro.campo.obrigatorio");
obrigatoriedade = true;
}
if ( StringUtils.isEmpty(form.getIdFormapagamentoSelecionada()) && form.getIdFormapagamentoSelecionada().longValue() == -1 ){
result.rejectValue("idFormapagamentoSelecionada", "msg.erro.campo.obrigatorio");
obrigatoriedade = true;
}
return obrigatoriedade;
}
@Override
public boolean validarSelecaoPagamentoServico(MensagemForm form, BindingResult result){
boolean obrigatoriedade = false;
if ( StringUtils.isEmpty(form.getIdInfoServicoPagamentoSelecionada()) || form.getIdInfoServicoPagamentoSelecionada() == -1){
result.rejectValue("idInfoServicoPagamentoSelecionada", "msg.erro.campo.obrigatorio");
obrigatoriedade = true;
}
if ( StringUtils.isEmpty(form.getIdFormapagamentoSelecionada()) || form.getIdFormapagamentoSelecionada().longValue() == -1 ){
result.rejectValue("idFormapagamentoSelecionada", "msg.erro.campo.obrigatorio");
obrigatoriedade = true;
}
return obrigatoriedade;
}
@Override
public boolean validarSelecaoPagamentoServico(RelatorioForm form, BindingResult result) {
boolean obrigatoriedade = false;
if ( StringUtils.isEmpty(form.getIdInfoServicoPagamentoSelecionada()) || form.getIdInfoServicoPagamentoSelecionada() == -1){
result.rejectValue("idInfoServicoPagamentoSelecionada", "msg.erro.campo.obrigatorio");
obrigatoriedade = true;
}
if ( StringUtils.isEmpty(form.getIdFormapagamentoSelecionada()) || form.getIdFormapagamentoSelecionada().longValue() == -1 ){
result.rejectValue("idFormapagamentoSelecionada", "msg.erro.campo.obrigatorio");
obrigatoriedade = true;
}
return obrigatoriedade;
}
}
| |
package org.sdmlib.openbank.util;
import org.sdmlib.models.pattern.AttributeConstraint;
import org.sdmlib.models.pattern.Pattern;
import org.sdmlib.models.pattern.PatternObject;
import org.sdmlib.openbank.*;
import java.math.BigInteger;
import java.util.Date;
import java.util.Set;
import org.sdmlib.openbank.TransactionTypeEnum;
import java.lang.StringBuilder;
import org.sdmlib.openbank.Bank;
public class BankPO extends PatternObject<BankPO, Bank>
{
public BankSet allMatches()
{
this.setDoAllMatches(true);
BankSet matches = new BankSet();
while (this.getPattern().getHasMatch())
{
matches.add((Bank) this.getCurrentMatch());
this.getPattern().findMatch();
}
return matches;
}
public BankPO(){
newInstance(null);
}
public BankPO(Bank... hostGraphObject) {
if(hostGraphObject==null || hostGraphObject.length<1){
return ;
}
newInstance(null, hostGraphObject);
}
public BankPO(String modifier)
{
this.setModifier(modifier);
}
public BankPO createFeeCondition(double value)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_FEE)
.withTgtValue(value)
.withSrc(this)
.withModifier(this.getPattern().getModifier())
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
public BankPO createFeeCondition(double lower, double upper)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_FEE)
.withTgtValue(lower)
.withUpperTgtValue(upper)
.withSrc(this)
.withModifier(this.getPattern().getModifier())
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
public BankPO createFeeAssignment(double value)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_FEE)
.withTgtValue(value)
.withSrc(this)
.withModifier(Pattern.CREATE)
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
public double getFee()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).getFee();
}
return 0;
}
public BankPO withFee(double value)
{
if (this.getPattern().getHasMatch())
{
((Bank) getCurrentMatch()).setFee(value);
}
return this;
}
public BankPO createBankNameCondition(String value)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_BANKNAME)
.withTgtValue(value)
.withSrc(this)
.withModifier(this.getPattern().getModifier())
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
public BankPO createBankNameCondition(String lower, String upper)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_BANKNAME)
.withTgtValue(lower)
.withUpperTgtValue(upper)
.withSrc(this)
.withModifier(this.getPattern().getModifier())
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
public BankPO createBankNameAssignment(String value)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_BANKNAME)
.withTgtValue(value)
.withSrc(this)
.withModifier(Pattern.CREATE)
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
public String getBankName()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).getBankName();
}
return null;
}
public BankPO withBankName(String value)
{
if (this.getPattern().getHasMatch())
{
((Bank) getCurrentMatch()).setBankName(value);
}
return this;
}
public UserPO createCustomerUserPO()
{
UserPO result = new UserPO(new User[]{});
result.setModifier(this.getPattern().getModifier());
super.hasLink(Bank.PROPERTY_CUSTOMERUSER, result);
return result;
}
public UserPO createCustomerUserPO(String modifier)
{
UserPO result = new UserPO(new User[]{});
result.setModifier(modifier);
super.hasLink(Bank.PROPERTY_CUSTOMERUSER, result);
return result;
}
public BankPO createCustomerUserLink(UserPO tgt)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_CUSTOMERUSER);
}
public BankPO createCustomerUserLink(UserPO tgt, String modifier)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_CUSTOMERUSER, modifier);
}
public UserSet getCustomerUser()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) this.getCurrentMatch()).getCustomerUser();
}
return null;
}
public TransactionPO createTransactionPO()
{
TransactionPO result = new TransactionPO(new Transaction[]{});
result.setModifier(this.getPattern().getModifier());
super.hasLink(Bank.PROPERTY_TRANSACTION, result);
return result;
}
public TransactionPO createTransactionPO(String modifier)
{
TransactionPO result = new TransactionPO(new Transaction[]{});
result.setModifier(modifier);
super.hasLink(Bank.PROPERTY_TRANSACTION, result);
return result;
}
public BankPO createTransactionLink(TransactionPO tgt)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_TRANSACTION);
}
public BankPO createTransactionLink(TransactionPO tgt, String modifier)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_TRANSACTION, modifier);
}
public Transaction getTransaction()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) this.getCurrentMatch()).getTransaction();
}
return null;
}
public AccountPO createCustomerAccountsPO()
{
AccountPO result = new AccountPO(new Account[]{});
result.setModifier(this.getPattern().getModifier());
super.hasLink(Bank.PROPERTY_CUSTOMERACCOUNTS, result);
return result;
}
public AccountPO createCustomerAccountsPO(String modifier)
{
AccountPO result = new AccountPO(new Account[]{});
result.setModifier(modifier);
super.hasLink(Bank.PROPERTY_CUSTOMERACCOUNTS, result);
return result;
}
public BankPO createCustomerAccountsLink(AccountPO tgt)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_CUSTOMERACCOUNTS);
}
public BankPO createCustomerAccountsLink(AccountPO tgt, String modifier)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_CUSTOMERACCOUNTS, modifier);
}
public AccountSet getCustomerAccounts()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) this.getCurrentMatch()).getCustomerAccounts();
}
return null;
}
//==========================================================================
public boolean validateLogin(int accountID, String username, String password)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).validateLogin(accountID, username, password);
}
return false;
}
//==========================================================================
public org.sdmlib.openbank.Account findAccountByID(int accountID)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).findAccountByID(accountID);
}
return null;
}
//==========================================================================
public org.sdmlib.openbank.User findUserByID(String userID)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).findUserByID(userID);
}
return null;
}
//==========================================================================
public boolean confirmTransaction(int toAcctID, int fromAcctID, BigInteger dollarValue, BigInteger decimalValue)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).confirmTransaction(toAcctID, fromAcctID, dollarValue, decimalValue);
}
return false;
}
public UserPO createAdminUsersPO()
{
UserPO result = new UserPO(new User[]{});
result.setModifier(this.getPattern().getModifier());
super.hasLink(Bank.PROPERTY_ADMINUSERS, result);
return result;
}
public UserPO createAdminUsersPO(String modifier)
{
UserPO result = new UserPO(new User[]{});
result.setModifier(modifier);
super.hasLink(Bank.PROPERTY_ADMINUSERS, result);
return result;
}
public BankPO createAdminUsersLink(UserPO tgt)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_ADMINUSERS);
}
public BankPO createAdminUsersLink(UserPO tgt, String modifier)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_ADMINUSERS, modifier);
}
public UserSet getAdminUsers()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) this.getCurrentMatch()).getAdminUsers();
}
return null;
}
public AccountPO createAdminAccountsPO()
{
AccountPO result = new AccountPO(new Account[]{});
result.setModifier(this.getPattern().getModifier());
super.hasLink(Bank.PROPERTY_ADMINACCOUNTS, result);
return result;
}
public AccountPO createAdminAccountsPO(String modifier)
{
AccountPO result = new AccountPO(new Account[]{});
result.setModifier(modifier);
super.hasLink(Bank.PROPERTY_ADMINACCOUNTS, result);
return result;
}
public BankPO createAdminAccountsLink(AccountPO tgt)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_ADMINACCOUNTS);
}
public BankPO createAdminAccountsLink(AccountPO tgt, String modifier)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_ADMINACCOUNTS, modifier);
}
public AccountSet getAdminAccounts()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) this.getCurrentMatch()).getAdminAccounts();
}
return null;
}
//==========================================================================
public FeeValuePO createFeeValuePO()
{
FeeValuePO result = new FeeValuePO(new FeeValue[]{});
result.setModifier(this.getPattern().getModifier());
super.hasLink(Bank.PROPERTY_FEEVALUE, result);
return result;
}
public FeeValuePO createFeeValuePO(String modifier)
{
FeeValuePO result = new FeeValuePO(new FeeValue[]{});
result.setModifier(modifier);
super.hasLink(Bank.PROPERTY_FEEVALUE, result);
return result;
}
public BankPO createFeeValueLink(FeeValuePO tgt)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_FEEVALUE);
}
public BankPO createFeeValueLink(FeeValuePO tgt, String modifier)
{
return hasLinkConstraint(tgt, Bank.PROPERTY_FEEVALUE, modifier);
}
public FeeValueSet getFeeValue()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) this.getCurrentMatch()).getFeeValue();
}
return null;
}
public boolean disableUser(String userID, StringBuilder msg) {
if (this.getPattern().getHasMatch()) {
return ((Bank) getCurrentMatch()).disableUser(userID, msg);
}
return false;
}
//==========================================================================
public String Login(String username, String password)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).Login(username, password);
}
return null;
}
//==========================================================================
public java.math.BigInteger withDrawFunds(int accountNum, BigInteger amount, StringBuilder msg) {
if (this.getPattern().getHasMatch()) {
return ((Bank) getCurrentMatch()).withDrawFunds(accountNum, amount, msg);
}
return null;
}
//==========================================================================
public void generateCode()
{
if (this.getPattern().getHasMatch())
{
((Bank) getCurrentMatch()).generateCode();
}
}
//==========================================================================
public java.math.BigInteger depositFunds(int accountNum, BigInteger amount, StringBuilder msg)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).depositFunds(accountNum, amount, msg);
}
return null;
}
//==========================================================================
public String updateUserInfo(String userID, String fieldName, String fieldValue)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).updateUserInfo(userID, fieldName, fieldValue);
}
return null;
}
//==========================================================================
public int getNextID()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).getNextID();
}
return 0;
}
//==========================================================================
public boolean closeAccount(int accountNumber, StringBuilder msg) {
if (this.getPattern().getHasMatch()) {
return ((Bank) getCurrentMatch()).closeAccount(accountNumber, msg);
}
return false;
}
public Set getTransactions(int accountNumber, BigInteger amount, Date date)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).getTransactions(accountNumber, amount, date);
}
return null;
}
//==========================================================================
public String getSecureID(String secretWord, byte[] salt)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).getSecureID(secretWord, salt);
}
return null;
}
//==========================================================================
//==========================================================================
/*public String getSecureID(String secretWord, byte salt)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).getSecureID(secretWord, salt);
}
return null;
}*/
//==========================================================================
public boolean confirmCode(String code)
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).confirmCode(code);
}
return false;
}
public BankPO createPasswordCodeCondition(String value)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_PASSWORDCODE)
.withTgtValue(value)
.withSrc(this)
.withModifier(this.getPattern().getModifier())
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
public BankPO createPasswordCodeCondition(String lower, String upper)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_PASSWORDCODE)
.withTgtValue(lower)
.withUpperTgtValue(upper)
.withSrc(this)
.withModifier(this.getPattern().getModifier())
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
public BankPO createPasswordCodeAssignment(String value)
{
new AttributeConstraint()
.withAttrName(Bank.PROPERTY_PASSWORDCODE)
.withTgtValue(value)
.withSrc(this)
.withModifier(Pattern.CREATE)
.withPattern(this.getPattern());
super.filterAttr();
return this;
}
//==========================================================================
public void recordTransaction(int sender, int receiver, TransactionTypeEnum type, BigInteger amount, String note, boolean isAdmin, StringBuilder msg)
{
if (this.getPattern().getHasMatch())
{
((Bank) getCurrentMatch()).recordTransaction(sender, receiver, type, amount, note, isAdmin, msg);
}
}
public String getPasswordCode()
{
if (this.getPattern().getHasMatch())
{
return ((Bank) getCurrentMatch()).getPasswordCode();
}
return null;
}
public BankPO withPasswordCode(String value)
{
if (this.getPattern().getHasMatch())
{
((Bank) getCurrentMatch()).setPasswordCode(value);
}
return this;
}
}
| |
/*
* Copyright 2015-206 NETHead <NETHead@gmx.net>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.tinymediamanager.scraper.aebn;
import java.io.InputStream;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.tinymediamanager.scraper.Certification;
import org.tinymediamanager.scraper.MediaArtwork;
import org.tinymediamanager.scraper.MediaArtwork.FanartSizes;
import org.tinymediamanager.scraper.MediaArtwork.MediaArtworkType;
import org.tinymediamanager.scraper.MediaCastMember;
import org.tinymediamanager.scraper.MediaCastMember.CastType;
import org.tinymediamanager.scraper.MediaGenres;
import org.tinymediamanager.scraper.MediaMetadata;
import org.tinymediamanager.scraper.MediaProviderInfo;
import org.tinymediamanager.scraper.MediaScrapeOptions;
import org.tinymediamanager.scraper.MediaSearchOptions;
import org.tinymediamanager.scraper.MediaSearchResult;
import org.tinymediamanager.scraper.MediaType;
import org.tinymediamanager.scraper.http.Url;
import org.tinymediamanager.scraper.mediaprovider.IMediaArtworkProvider;
import org.tinymediamanager.scraper.mediaprovider.IMovieMetadataProvider;
import org.tinymediamanager.scraper.util.MetadataUtil;
import org.tinymediamanager.scraper.util.StrgUtils;
import net.xeoh.plugins.base.annotations.PluginImplementation;
/**
* A meta data provider class for scraping aebn.net.
*
* Implements scraping of meta data, artwork and trailers.
*
* @author NETHead <NETHead@gmx.net>
* @version 0.3
* @see IMediaMetadataProvider
* @see IMediaArtworkProvider
* @see IMediaTrailerProvider
*
*/
@PluginImplementation
public class AebnMetadataProvider implements IMovieMetadataProvider, IMediaArtworkProvider {
private static AebnMetadataProvider instance;
private static final String AEBNID = "AebnID";
private static MediaProviderInfo providerInfo = createMediaProviderInfo();
private static final Logger LOGGER = LoggerFactory.getLogger(AebnMetadataProvider.class);
private static final String BASE_DATAURL = "http://theater.aebn.net";
private static final String BASE_IMGURL = "http://pic.aebn.net";
private static final Integer SEARCH_COUNT = 60;
public static synchronized AebnMetadataProvider getInstance() {
if (instance == null) {
instance = new AebnMetadataProvider();
}
return instance;
}
public AebnMetadataProvider() {
}
@Override
public MediaProviderInfo getProviderInfo() {
return providerInfo;
}
private static MediaProviderInfo createMediaProviderInfo() {
MediaProviderInfo providerInfo = new MediaProviderInfo(AEBNID, "aebn.net",
"<html><h3>Adult Entertainment Broadcast Network</h3><br />An adult movie database."
+ "This scraper is able to scrape metadata and artwork.</html>",
AebnMetadataProvider.class.getResource("/aebn_net.png"));
providerInfo.setVersion(AebnMetadataProvider.class);
return providerInfo;
}
/**
* Search for movies at aebn.net.
*
*/
@Override
public List<MediaSearchResult> search(MediaSearchOptions query) throws Exception {
LOGGER.debug("AEBN: search() {}", query);
List<MediaSearchResult> resultList = new ArrayList<MediaSearchResult>();
Elements movies = null;
String searchString = "";
// Search for query
if (StringUtils.isNotEmpty(query.get(MediaSearchOptions.SearchParam.QUERY))) {
searchString = query.get(MediaSearchOptions.SearchParam.QUERY);
}
// Search
String searchUrl = BASE_DATAURL + "/dispatcher/fts?userQuery="
+ URLEncoder.encode(cleanSearchQuery(searchString), "UTF-8")
+ "&targetSearchMode=basic&isAdvancedSearch=true&isFlushAdvancedSearchCriteria=false" + "&count="
+ SEARCH_COUNT.toString() + "&imageType=Large&sortType=Relevance";
try {
LOGGER.info("========= BEGIN AEBN Scraper Search for: {}", searchString);
Url url = new Url(searchUrl);
InputStream in = url.getInputStream();
Document doc = Jsoup.parse(in, "UTF-8", "");
in.close();
// only look for movie links like
// <a id="FTSMovieSearch_link_title_detail_30" ... </a>
movies = doc.getElementsByAttributeValueMatching("id", "FTSMovieSearch_link_title_detail_\\d+");
LOGGER.debug("AEBN: found {} search results", movies.size());
} catch (Exception e) {
LOGGER.error("AEBN: failed to search for {}: ", searchString, e);
}
if (movies == null || movies.isEmpty()) {
LOGGER.debug("AEBN: no movie found");
return resultList;
}
// there are search results, so fill media data structure
HashSet<String> foundResultUrls = new HashSet<String>();
for (Element anchor : movies) {
try {
String movieUrl = BASE_DATAURL + StrgUtils.substr(anchor.toString(), "href=\\\"(.*?)\\\"");
String movieId = StrgUtils.substr(anchor.toString(), "movieId=(\\d+)");
String movieName = StringEscapeUtils.unescapeHtml4(anchor.text());
String posterUrl = BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + movieId + "_160w.jpg";
LOGGER.debug("AEBN: found movie {} (id{})", movieName, movieId);
// check if it is a valid AEBN id
if (!isValidAebnId(Integer.parseInt(movieId))) {
LOGGER.error("AEBN: id({}) is not a valid aebn id", movieId);
}
MediaSearchResult sr = new MediaSearchResult(providerInfo.getId());
sr.setId(movieId);
sr.setIMDBId("");
sr.setTitle(movieName);
sr.setOriginalTitle(movieName);
// sr.setYear not possible, no data at this point
sr.setYear(null);
sr.setMediaType(MediaType.MOVIE);
sr.setUrl(movieUrl);
sr.setPosterUrl(posterUrl);
// compare score based on names
float score = MetadataUtil.calculateScore(searchString, movieName);
if (posterUrl.isEmpty() || posterUrl.contains("nopicture")) {
LOGGER.debug("AEBN: no poster - downgrading score by 0.01");
score = score - 0.01f;
}
sr.setScore(score);
// check if result has at least a title and id
if (StringUtils.isBlank(sr.getTitle()) || StringUtils.isBlank(sr.getId())) {
LOGGER.warn("AEBN: no title nor id, skipping");
continue;
}
// check if the movie has been already added to the search results
if (foundResultUrls.contains(sr.getUrl())) {
continue;
}
foundResultUrls.add(sr.getUrl());
// populate extra arguments (deprecated)
// MetadataUtil.copySearchQueryToSearchResult(query, sr);
resultList.add(sr);
} catch (Exception e) {
LOGGER.warn("AEBN: error parsing search result: {}", e);
}
}
Collections.sort(resultList);
Collections.reverse(resultList);
return resultList;
}
/**
* Get movie meta data from aebn.net.
*
*/
@Override
public MediaMetadata getMetadata(MediaScrapeOptions options) throws Exception {
LOGGER.debug("AEBN: getMetadata() {}", options);
// check if there is already meta data present in the result
if ((options.getResult() != null) && (options.getResult().getMediaMetadata() != null)) {
LOGGER.debug("AEBN: return metadata from cache");
return options.getResult().getMediaMetadata();
}
MediaMetadata md = new MediaMetadata(providerInfo.getId());
Elements elements = null;
Element element = null;
Integer aebnId = 0;
// get AebnId from previous search result
if ((options.getResult() != null) && (options.getResult().getId() != null)) {
aebnId = Integer.parseInt(options.getResult().getId());
LOGGER.debug("AEBN: aebnId() from previous search result = {}", aebnId);
// preset some values from search result (if there is one)
// Use core.Utils.RemoveSortableName() if you want e.g. "Bourne Legacy, The" -> "The Bourne Legacy".
md.storeMetadata(MediaMetadata.ORIGINAL_TITLE,
StrgUtils.removeCommonSortableName(options.getResult().getOriginalTitle()));
md.storeMetadata(MediaMetadata.TITLE, StrgUtils.removeCommonSortableName(options.getResult().getTitle()));
}
// or get AebnId from options
if (!isValidAebnId(aebnId) && (options.getId(AEBNID) != null)) {
LOGGER.debug("AEBN: aebnId() from options = {}", options.getId(AEBNID));
aebnId = Integer.parseInt(options.getId(AEBNID));
}
if (!isValidAebnId(aebnId)) {
LOGGER.warn("AEBN: no or incorrect aebnId, aborting");
return md;
}
// ID
md.setId(providerInfo.getId(), aebnId);
LOGGER.debug("AEBN: aebnId({})", aebnId);
// Base download url for data scraping
String downloadUrl = BASE_DATAURL + "/dispatcher/movieDetail?movieId=" + aebnId;
String locale = options.getLanguage().name();
if (!StringUtils.isBlank(locale)) {
downloadUrl = downloadUrl + "&locale=" + locale;
LOGGER.debug("AEBN: used locale({})", locale);
}
// begin download and scrape
try {
LOGGER.debug("AEBN: download movie detail page");
Url url = new Url(downloadUrl);
InputStream in = url.getInputStream();
Document document = Jsoup.parse(in, "UTF-8", "");
in.close();
// Title
// <h1 itemprop="name" class="md-movieTitle" >Titelname</h1>
LOGGER.debug("AEBN: parse title");
elements = document.getElementsByAttributeValue("class", "md-movieTitle");
if (elements.size() > 0) {
LOGGER.debug("AEBN: {} elements found (should be one!)", elements.size());
element = elements.first();
String movieTitle = cleanString(element.text());
LOGGER.debug("AEBN: title({})", movieTitle);
md.storeMetadata(MediaMetadata.TITLE, movieTitle);
}
// Poster
// front cover:
// http://pic.aebn.net/Stream/Movie/Boxcovers/a66568_xlf.jpg
String posterUrl = BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + aebnId.toString() + "_xlf.jpg";
md.storeMetadata(MediaMetadata.POSTER_URL, posterUrl);
// Fanart/Background
// http://pic.aebn.net/Stream/Movie/Scenes/a113324_s534541.jpg
// <img class="sceneThumbnail" alt="Scene Thumbnail" title="Scene Thumbnail" onError="..."
// src="http://pic.aebn.net/Stream/Movie/Scenes/a113324_s534544.jpg" onclick="..." />
LOGGER.debug("AEBN: parse fanart / scene thumbs");
elements = document.getElementsByAttributeValue("class", "SceneThumbnail");
LOGGER.debug("AEBN: {} elements found", elements.size());
int i = 1;
for (Element anchor : elements) {
String backgroundUrl = anchor.attr("src");
LOGGER.debug("AEBN: backgroundUrl{}({})", i, backgroundUrl);
md.storeMetadata("backgroundUrl" + Integer.valueOf(i).toString(), backgroundUrl);
i++;
}
// Runtime
LOGGER.debug("AEBN: parse runtime");
elements = document.getElementsByAttributeValue("id", "md-details").select("[itemprop=duration]");
if (elements.size() > 0) {
LOGGER.debug("AEBN: " + elements.size() + " elements found (should be one!)");
element = elements.first();
String movieRuntime = cleanString(element.attr("content"));
movieRuntime = StrgUtils.substr(movieRuntime, "PT(\\d+)M");
LOGGER.debug("AEBN: runtime({})", movieRuntime);
md.storeMetadata(MediaMetadata.RUNTIME, movieRuntime);
}
// Year
LOGGER.debug("AEBN: parse year");
elements = document.getElementsByAttributeValue("id", "md-details").select("[itemprop=datePublished]");
if (elements.size() > 0) {
LOGGER.debug("AEBN: " + elements.size() + " elements found (should be one!)");
element = elements.first();
String movieYear = cleanString(element.attr("content"));
movieYear = StrgUtils.substr(movieYear, "(\\d+)-");
LOGGER.debug("AEBN: year({})", movieYear);
md.storeMetadata(MediaMetadata.YEAR, movieYear);
}
// Series (Collection)
LOGGER.debug("AEBN: parse collection");
elements = document.getElementsByAttributeValue("id", "md-details").select("[class=series]");
if (elements.size() > 0) {
LOGGER.debug("AEBN: {} elements found (should be one!)", elements.size());
element = elements.first();
String movieCollection = cleanString(element.text());
// Fake a TMDB_SET based on the hash value of the collection name
int movieCollectionHash = movieCollection.hashCode();
md.storeMetadata(MediaMetadata.COLLECTION_NAME, movieCollection);
md.storeMetadata(MediaMetadata.TMDB_SET, movieCollectionHash);
LOGGER.debug("AEBN: collection({}), hashcode({})", movieCollection, movieCollectionHash);
}
// Studio
LOGGER.debug("AEBN: parse studio");
elements = document.getElementsByAttributeValue("id", "md-details").select("[itemprop=productionCompany]");
if (elements.size() > 0) {
LOGGER.debug("AEBN: {} elements found (should be one!)", elements.size());
String movieStudio = cleanString(elements.first().text());
LOGGER.debug("AEBN: studio({})", movieStudio);
md.storeMetadata(MediaMetadata.PRODUCTION_COMPANY, movieStudio);
}
// Genre
LOGGER.debug("AEBN: parse genre");
elements = document.getElementsByAttributeValue("id", "md-details").select("[itemprop=genre]");
for (Element g : elements) {
md.addGenre(getTmmGenre(g.text()));
}
// add basic genre, since all genres at AEBN could be summarised
// into this one
md.addGenre(MediaGenres.EROTIC);
// Certification
// no data scrapeable---but obviously it's adult only, so simply
// generate it
String movieCertification = null;
Certification certification = null;
String country = options.getCountry().getAlpha2();
LOGGER.debug("AEBN: generate certification for {}", country);
// @formatter:off
if (country.equals("DE")) { movieCertification = "FSK 18"; }
if (country.equals("US")) { movieCertification = "NC-17"; }
if (country.equals("GB")) { movieCertification = "R18"; }
if (country.equals("FR")) { movieCertification = "18"; }
if (country.equals("ES")) { movieCertification = "PX"; }
if (country.equals("JP")) { movieCertification = "R18+"; }
if (country.equals("IT")) { movieCertification = "V.M.18"; }
if (country.equals("NL")) { movieCertification = "16"; }
// @formatter:on
certification = Certification.getCertification(options.getCountry(), movieCertification);
if (certification != null) {
LOGGER.debug("AEBN: certification({})", certification);
md.addCertification(certification);
}
// Plot and Tagline
LOGGER.debug("AEBN: parse plot");
elements = document.getElementsByAttributeValue("id", "md-details").select("[itemprop=about]");
if (elements.size() > 0) {
LOGGER.debug("AEBN: {} elements found (should be one!)", elements.size());
String moviePlot = cleanString(elements.first().text());
md.storeMetadata(MediaMetadata.PLOT, moviePlot);
// no separate tagline available, so extract the first sentence
// from the movie plot
String movieTagline = StrgUtils.substr(moviePlot, "^(.*?[.!?:])");
LOGGER.debug("AEBN: tagline(" + movieTagline + ")");
md.storeMetadata(MediaMetadata.TAGLINE, movieTagline);
}
// Actors
LOGGER.debug("AEBN: parse actors");
elements = document.getElementsByAttributeValue("id", "md-details").select("[itemprop=actor]");
LOGGER.debug("AEBN: {} actors found", elements.size());
for (Element anchor : elements) {
String actorid = StrgUtils.substr(anchor.toString(), "starId=(\\d+)");
String actorname = cleanString(anchor.select("[itemprop=name]").first().text());
String actordetailsurl = BASE_DATAURL + anchor.attr("href");
if (!actorname.isEmpty()) {
LOGGER.debug("AEBN: add actor id({}), name({}), details({})", actorid, actorname, actordetailsurl);
MediaCastMember cm = new MediaCastMember();
cm.setType(MediaCastMember.CastType.ACTOR);
cm.setName(actorname);
if (!actorid.isEmpty()) {
cm.setId(actorid);
}
// Actor detail page
try {
Url starurl = new Url(actordetailsurl);
InputStream starurlstream = starurl.getInputStream();
Document stardocument = Jsoup.parse(starurlstream, "UTF-8", "");
starurlstream.close();
Elements elements2 = stardocument.getElementsByAttributeValue("class", "StarInfo");
if (elements2.size() == 0) {
LOGGER.debug("AEBN: no additional actor details found");
} else {
// Actor image
String actorimage = elements2.select("[itemprop=image]").first().attr("src");
LOGGER.debug("AEBN: actor image({})", actorimage);
if (!actorimage.isEmpty()) {
cm.setImageUrl(actorimage);
}
// Actor 'fanart' images
// unsure if this is ever shown in tmm
elements2 = stardocument.getElementsByAttributeValue("class", "StarDetailGallery")
.select("a");
LOGGER.debug("AEBN: {} gallery images found", elements2.size());
for (Element thumbnail : elements2) {
LOGGER.debug("AEBN: add fanart image({})", thumbnail.attr("href"));
cm.addFanart(thumbnail.attr("href"));
}
}
} catch (Exception e) {
LOGGER.error("AEBN: Error downloading {}: {}", actordetailsurl, e);
}
md.addCastMember(cm);
}
}
// Director
LOGGER.debug("AEBN: parse director");
elements = document.getElementsByAttributeValue("id", "md-details").select("[itemprop=director]");
if (elements.size() > 0) {
LOGGER.debug("AEBN: {} elements found (should be one!)", elements.size());
String directorid = StrgUtils.substr(elements.toString(), "directorID=(\\d+)");
String directorname = cleanString(elements.select("[itemprop=name]").first().text());
if (!directorname.isEmpty()) {
MediaCastMember cm = new MediaCastMember(CastType.DIRECTOR);
cm.setName(directorname);
if (!directorid.isEmpty()) {
cm.setId(directorid);
}
cm.setImageUrl("");
md.addCastMember(cm);
LOGGER.debug("AEBN: add director id({}), name({})", directorid, directorname);
}
}
// Original Title
// if we have no original title, just copy the title
if (StringUtils.isBlank(md.getStringValue(MediaMetadata.ORIGINAL_TITLE))) {
md.storeMetadata(MediaMetadata.ORIGINAL_TITLE, md.getStringValue(MediaMetadata.TITLE));
}
} catch (Exception e) {
LOGGER.error("AEBN: Error parsing {}: {}", options.getResult().getUrl(), e);
}
return md;
}
/**
* Get movie artwork from aebn.net.
* <p>
* <b>NOTICE:</b> Automatic image scraping does not work (aebnId is not transferred)! Must be set to manual image
* scraping at the tmm movie scraper settings.
*
*/
@Override
public List<MediaArtwork> getArtwork(MediaScrapeOptions options) throws Exception {
LOGGER.debug("AEBN: getArtwork() {}", options);
List<MediaArtwork> artwork = new ArrayList<MediaArtwork>();
MediaMetadata md;
Integer aebnId = 0;
// get aebnId from options
if (options.getId(AEBNID) != null) {
aebnId = Integer.parseInt(options.getId(AEBNID));
LOGGER.debug("AEBN: got aebnId({}) from options", aebnId);
}
if (!isValidAebnId(aebnId)) {
LOGGER.info("AEBN: could not scrape artwork, no or incorrect aebnId");
return artwork;
}
// Poster
if ((options.getArtworkType() == MediaArtworkType.ALL)
|| (options.getArtworkType() == MediaArtworkType.POSTER)) {
// http://pic.aebn.net/Stream/Movie/Boxcovers/a136807_xlf.jpg
// http://pic.aebn.net/Stream/Movie/Boxcovers/a136807_bf.jpg
// http://pic.aebn.net/Stream/Movie/Boxcovers/a136807_160w.jpg
MediaArtwork ma = new MediaArtwork();
ma.setProviderId(providerInfo.getId());
String posterpreviewUrl = BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + aebnId.toString() + "_xlf.jpg";
String posterUrl = posterpreviewUrl;
ma.setDefaultUrl(posterUrl);
ma.setPreviewUrl(posterpreviewUrl);
ma.addImageSize(380, 540, BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + aebnId.toString() + "_xlf.jpg");
ma.addImageSize(220, 313, BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + aebnId.toString() + "_bf.jpg");
ma.addImageSize(160, 227, BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + aebnId.toString() + "_160w.jpg");
ma.setSizeOrder(FanartSizes.MEDIUM.getOrder());
ma.setLanguage(options.getLanguage().name());
ma.setType(MediaArtworkType.POSTER);
artwork.add(ma);
LOGGER.debug("AEBN: add poster({})", posterpreviewUrl);
}
// Poster Back (stored as Disc)
if ((options.getArtworkType() == MediaArtworkType.ALL) || (options.getArtworkType() == MediaArtworkType.DISC)) {
// http://pic.aebn.net/Stream/Movie/Boxcovers/a136807_xlb.jpg
// http://pic.aebn.net/Stream/Movie/Boxcovers/a136807_bb.jpg
MediaArtwork ma = new MediaArtwork();
ma.setProviderId(providerInfo.getId());
String posterpreviewUrl = BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + aebnId.toString() + "_xlb.jpg";
String posterUrl = posterpreviewUrl;
ma.setDefaultUrl(posterUrl);
ma.setPreviewUrl(posterpreviewUrl);
ma.addImageSize(380, 540, BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + aebnId.toString() + "_xlb.jpg");
ma.addImageSize(220, 313, BASE_IMGURL + "/Stream/Movie/Boxcovers/a" + aebnId.toString() + "_bb.jpg");
ma.setSizeOrder(FanartSizes.MEDIUM.getOrder());
ma.setLanguage(options.getLanguage().name());
ma.setType(MediaArtworkType.DISC);
artwork.add(ma);
LOGGER.debug("AEBN: add poster({})", posterpreviewUrl);
}
if ((options.getArtworkType() == MediaArtworkType.ALL)
|| (options.getArtworkType() == MediaArtworkType.BACKGROUND)) {
// Need to scrape movie metadata first (see getMetaData() -> Fanart/Background)
md = getMetadata(options);
LOGGER.debug("AEBN: return from media metadata scraping");
aebnId = Integer.parseInt(options.getId(AEBNID));
int i = 1;
while (!md.getStringValue("backgroundUrl" + Integer.valueOf(i).toString()).isEmpty()) {
MediaArtwork ma = new MediaArtwork();
ma.setProviderId(providerInfo.getId());
String backgroundUrl = md.getStringValue("backgroundUrl" + Integer.valueOf(i).toString());
ma.setDefaultUrl(backgroundUrl);
ma.setPreviewUrl(backgroundUrl.replace("_179_101", ""));
ma.addImageSize(179, 101, backgroundUrl);
ma.addImageSize(120, 68, backgroundUrl.replace("_179_101", ""));
ma.setSizeOrder(FanartSizes.SMALL.getOrder());
ma.setLanguage(options.getLanguage().name());
ma.setType(MediaArtworkType.BACKGROUND);
artwork.add(ma);
LOGGER.debug("AEBN: add background({})", backgroundUrl);
i++;
}
}
return artwork;
}
/**
* Maps scraper genres to internal TMM genres.
*
* @param genre
* genre name to map
* @return MediaGenres genre
*/
private MediaGenres getTmmGenre(String genre) {
LOGGER.debug("AEBN: getTmmGenre() {}", genre);
MediaGenres g = null;
if (genre.isEmpty()) {
return g;
}
if (g == null) {
g = MediaGenres.getGenre(genre);
}
return g;
}
/**
* Sanitizes a string (remove non breaking spaces and trim).
*
* @param oldString
* string to clean
* @return the cleaned string
*/
private String cleanString(String oldString) {
if (StringUtils.isEmpty(oldString)) {
return "";
}
// remove non breaking spaces
String newString = oldString.replace(String.valueOf((char) 160), " ");
// and trim
return StringUtils.trim(newString);
}
/**
* Sanitizes the search query by removing
* <ul>
* <li>stop words (a, the, der, die, das, la, le, il),
* <li>digit values (e.g. year),
* <li>punctuation marks,
* <li>multiple spaces.
* </ul>
*
* @param query
* search query string to clean
* @return the cleaned search query string
*/
private String cleanSearchQuery(String query) {
if (StringUtils.isEmpty(query)) {
return "";
}
// use default function first
// String newString = MetadataUtil.removeNonSearchCharacters(oldString);
// prepare for easier regex ...
String newString = " " + query + " ";
// ... and sanitize, TODO: multiple stopwords
newString = newString.replaceAll("(?i)( a | the | der | die | das | la | le | il |\\(\\d+\\))", " ");
newString = newString.replaceAll("[\\.#&:!?,]", " ");
newString = newString.replaceAll("\\s{2,}", " ");
return StringUtils.trim(newString);
}
/**
* Validates an AEBN id.
*
* @param aebnId
* the AEBN id to be validated
* @return true if is a valid AEBN id, false otherwise
*/
private static boolean isValidAebnId(Integer aebnId) {
return ((aebnId != null) && (aebnId.intValue() > 0) && (aebnId.intValue() < 1000000));
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.idea.svn;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.CharsetToolkit;
import com.intellij.util.containers.MultiMap;
import junit.framework.Assert;
import junit.framework.TestCase;
import org.jetbrains.idea.svn.api.Depth;
import org.jetbrains.idea.svn.api.NodeKind;
import org.jetbrains.idea.svn.info.Info;
import org.jetbrains.idea.svn.info.SvnInfoHandler;
import org.jetbrains.idea.svn.status.CmdStatusClient;
import org.jetbrains.idea.svn.status.PortableStatus;
import org.jetbrains.idea.svn.status.SvnStatusHandler;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.wc.SVNRevision;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
/**
* Created with IntelliJ IDEA.
* User: Irina.Chernushina
* Date: 11/9/12
* Time: 7:03 PM
*/
public class SvnParseCommandLineParseTest extends TestCase {
public static final String LINUX_ROOT = "/c7181320/";
public void testInfo() throws Exception {
final String s = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<info>\n" +
"<entry\n" +
" path=\"ReduceReturnOrYieldBreakTransformation.cs\"\n" +
" revision=\"91603\"\n" +
" kind=\"file\">\n" +
"<url>http://svn.labs.intellij.net/resharper/trunk/ReSharper/src/Decompiler.Core/Src/Transformations/StatementStructure/ReduceReturnOrYieldBreakTransformation.cs</url>\n" +
"<repository>\n" +
"<root>http://svn.labs.intellij.net/resharper</root>\n" +
"<uuid>ed0594e5-7722-0410-9c76-949374689613</uuid>\n" +
"</repository>\n" +
"<wc-info>\n" +
"<wcroot-abspath>C:/TestProjects/sortedProjects/Subversion/Resharper17short</wcroot-abspath>\n" +
"<schedule>normal</schedule>\n" +
"<depth>infinity</depth>\n" +
"<text-updated>2012-01-20T11:25:32.625000Z</text-updated>\n" +
"<checksum>7af8adacb93afaa48b2cfb76de605824c220983a</checksum>\n" +
"</wc-info>\n" +
"<commit\n" +
" revision=\"87972\">\n" +
"<author>Slava.Trenogin</author>\n" +
"<date>2011-10-06T21:27:41.539022Z</date>\n" +
"</commit>\n" +
"</entry>\n" +
"</info>";
final Info[] info = new Info[1];
final SvnInfoHandler handler = new SvnInfoHandler(new File("C:/base/"), info1 -> info[0] = info1);
SAXParser parser = SAXParserFactory.newInstance().newSAXParser();
parser.parse(new ByteArrayInputStream(s.getBytes(CharsetToolkit.UTF8_CHARSET)), handler);
Assert.assertNotNull(info[0]);
}
public void testStatus() throws Exception {
final String s = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<status>\n" +
"<target\n" +
" path=\".\">\n" +
"<entry\n" +
" path=\".\">\n" +
"<wc-status\n" +
" props=\"normal\"\n" +
" item=\"incomplete\"\n" +
" revision=\"92339\">\n" +
"<commit\n" +
" revision=\"91672\">\n" +
"<author>qx</author>\n" +
"<date>2012-01-27T16:11:06.069351Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"Decompiler\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" tree-conflicted=\"true\"\n" +
" item=\"missing\">\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"Help\">\n" +
"<wc-status\n" +
" props=\"normal\"\n" +
" item=\"incomplete\"\n" +
" revision=\"92339\">\n" +
"<commit\n" +
" revision=\"91555\">\n" +
"<author>Egor.Malyshev</author>\n" +
"<date>2012-01-18T18:05:07.328519Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"Help\\XML\">\n" +
"<wc-status\n" +
" props=\"normal\"\n" +
" item=\"incomplete\"\n" +
" revision=\"92339\">\n" +
"<commit\n" +
" revision=\"91555\">\n" +
"<author>Egor.Malyshev</author>\n" +
"<date>2012-01-18T18:05:07.328519Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"Help\\XML\\images\">\n" +
"<wc-status\n" +
" props=\"normal\"\n" +
" item=\"incomplete\"\n" +
" revision=\"92339\">\n" +
"<commit\n" +
" revision=\"91170\">\n" +
"<author>Maria.Egorkina</author>\n" +
"<date>2011-12-20T13:26:52.217550Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"Platform\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"external\">\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"ReSharper.ipr\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"91677\"\n" +
" props=\"normal\">\n" +
"<commit\n" +
" revision=\"87631\">\n" +
"<author>Alexey.Kuptsov</author>\n" +
"<date>2011-09-30T11:25:10.391467Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"ReSharper.iws\">\n" +
"<wc-status\n" +
" item=\"ignored\"\n" +
" props=\"none\">\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"measure.bat\">\n" +
"<wc-status\n" +
" item=\"unversioned\"\n" +
" props=\"none\">\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"src\\Daemon\\src\\HighlightingBase.cs\">\n" +
"<wc-status\n" +
" item=\"deleted\"\n" +
" revision=\"91677\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"79264\">\n" +
"<author>xvost</author>\n" +
"<date>2011-02-05T16:06:12.116814Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"src\\Daemon\\src\\HighlightingBase1.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" copied=\"true\"\n" +
" item=\"added\">\n" +
"<commit\n" +
" revision=\"79264\">\n" +
"<author>xvost</author>\n" +
"<date>2011-02-05T16:06:12.116814Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"src\\Decompiler.Core\">\n" +
"<wc-status\n" +
" item=\"added\"\n" +
" props=\"normal\"\n" +
" copied=\"true\"\n" +
" tree-conflicted=\"true\">\n" +
"<commit\n" +
" revision=\"91559\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2012-01-18T19:17:40.876383Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"src\\Decompiler.Core\\Src\\Transformations\\Loops\\EliminateRedundantContinueTransformation.cs\">\n" +
"<wc-status\n" +
" props=\"normal\"\n" +
" copied=\"true\"\n" +
" item=\"modified\">\n" +
"<commit\n" +
" revision=\"87972\">\n" +
"<author>Slava.Trenogin</author>\n" +
"<date>2011-10-06T21:27:41.539022Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"src\\Decompiler.Core\\Src\\Transformations\\StatementStructure\\ReduceReturnOrYieldBreakTransformation.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" props=\"normal\"\n" +
" copied=\"true\">\n" +
"<commit\n" +
" revision=\"87972\">\n" +
"<author>Slava.Trenogin</author>\n" +
"<date>2011-10-06T21:27:41.539022Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"stat.txt\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"unversioned\">\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\CommonServices\\src\\Services\\CommonServices.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"73708\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-09-10T14:14:04.090943Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Document.Tests\\AssemblyInfo.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"86795\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2011-09-11T13:54:16.917943Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Document.Tests\\RangeMarkerTest.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"77688\">\n" +
"<author>Alexander.Zverev</author>\n" +
"<date>2010-12-14T15:56:38.322018Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\AssemblyInfo.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"82127\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2011-04-13T20:57:30.828600Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\AssemblyNameInfo.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"90865\">\n" +
"<author>xvost</author>\n" +
"<date>2011-12-13T13:10:30.902950Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\BinaryBlobExtensions.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"74075\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-09-17T09:32:30.827654Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\BinaryBlobReader.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"74075\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-09-17T09:32:30.827654Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\BinaryBlobStream.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"74075\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-09-17T09:32:30.827654Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\BlobOnReader.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"74075\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-09-17T09:32:30.827654Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\ComStreamWrapper.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"91348\">\n" +
"<author>Sergey.Shkredov</author>\n" +
"<date>2012-01-09T11:26:53.770349Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\EmptyBlob.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"74075\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-09-17T09:32:30.827654Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\GAC.cs\">\n" +
"<wc-status\n" +
" props=\"normal\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"75626\">\n" +
"<author>Slava.Trenogin</author>\n" +
"<date>2010-10-21T07:41:45.036722Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\GacUtil.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"91646\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2012-01-21T19:08:04.108471Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\IBinaryReader.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"71390\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-07-12T18:29:27.763006Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\IntInterval.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"55567\">\n" +
"<author>qx</author>\n" +
"<date>2009-06-03T10:11:14.985037Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\ManifestResourceUtil.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"87972\">\n" +
"<author>Slava.Trenogin</author>\n" +
"<date>2011-10-06T21:27:41.539022Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\MarshalSpecParser.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"76982\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-11-28T10:16:36.309593Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\MetadataHelpers.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"91348\">\n" +
"<author>Sergey.Shkredov</author>\n" +
"<date>2012-01-09T11:26:53.770349Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\ModuleQualificationUtil.cs\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"modified\"\n" +
" revision=\"92564\">\n" +
"<commit\n" +
" revision=\"60918\">\n" +
"<author>xvost</author>\n" +
"<date>2009-11-03T10:42:37.363952Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\StreamBinaryReader.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"71390\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-07-12T18:29:27.763006Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\Resharper17short\\Platform\\src\\Metadata\\src\\Utils\\SubStream.cs\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"92564\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"76725\">\n" +
"<author>Leonid.Shalupov</author>\n" +
"<date>2010-11-20T13:23:44.172899Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"</target>\n" +
"<changelist\n" +
" name=\"ads\">\n" +
"<entry\n" +
" path=\"BuildVsix.cmd\">\n" +
"<wc-status\n" +
" item=\"modified\"\n" +
" revision=\"91677\"\n" +
" props=\"none\">\n" +
"<commit\n" +
" revision=\"77579\">\n" +
"<author>Victor.Kropp</author>\n" +
"<date>2010-12-13T11:06:36.141754Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"</changelist>\n" +
"</status>\n";
final SvnStatusHandler[] handlerArr = new SvnStatusHandler[1];
final boolean isWindows = SystemInfo.isWindows;
final String basePath = isWindows ? "C:/base/" : "/base33729/";
final SvnStatusHandler handler = new
SvnStatusHandler(new SvnStatusHandler.ExternalDataCallback() {
@Override
public void switchPath() {
handlerArr[0].getPending().getKind();
}
@Override
public void switchChangeList(String newList) {
}
}, new File(basePath), o -> {
try {
o.getCanonicalFile();
}
catch (IOException e) {
throw new RuntimeException(e);
}
if (isWindows) {
final int idx = o.getPath().indexOf(":");
Assert.assertTrue(idx > 0);
final int secondIdx = o.getPath().indexOf(":", idx + 1);
Assert.assertTrue(o.getPath(), secondIdx == -1);
} else {
if (o.getPath().contains(LINUX_ROOT)) {
Assert.assertFalse(o.getPath().contains(basePath));
}
}
try {
return createStubInfo(basePath + "1", "http://a.b.c");
}
catch (SVNException e) {
//
throw new RuntimeException(e);
}
});
handlerArr[0] = handler;
final String osChecked = changePathsIfNix(s);
SAXParser parser = SAXParserFactory.newInstance().newSAXParser();
parser.parse(new ByteArrayInputStream(osChecked.getBytes(CharsetToolkit.UTF8_CHARSET)), handler);
final MultiMap<String,PortableStatus> changes = handler.getCurrentListChanges();
}
private String changePathsIfNix(String s) {
if (SystemInfo.isWindows) return s;
s = FileUtil.toSystemIndependentName(s);
return StringUtil.replace(s, "C:/", LINUX_ROOT);
}
private Info createStubInfo(final String basePath, final String baseUrl) throws SVNException {
return new Info(basePath, SVNURL.parseURIEncoded(baseUrl), SVNRevision.HEAD, NodeKind.FILE, "",
SVNURL.parseURIEncoded("http://a.b.c"), 1, new Date(), "me", null, Depth.EMPTY);
}
public void testStatusInExternalMove() throws Exception {
final String status = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<status>\n" +
"<target\n" +
" path=\".\">\n" +
"<entry\n" +
" path=\"slave\">\n" +
"<wc-status\n" +
" item=\"external\"\n" +
" props=\"none\">\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"src\\com\\test\\just\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"unversioned\">\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\local2\\sep12main\\main\\slave\\src\\com\\slave\\MacMessagesParser.java\">\n" +
"<wc-status\n" +
" item=\"added\"\n" +
" props=\"none\"\n" +
" copied=\"true\">\n" +
"<commit\n" +
" revision=\"7\">\n" +
"<author>admin</author>\n" +
"<date>2012-09-12T12:16:51.621000Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"C:\\TestProjects\\sortedProjects\\Subversion\\local2\\sep12main\\main\\slave\\src\\com\\slave\\SomeOtherClass.java\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"deleted\"\n" +
" revision=\"7\">\n" +
"<commit\n" +
" revision=\"7\">\n" +
"<author>admin</author>\n" +
"<date>2012-09-12T12:16:51.621000Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"</target>\n" +
"</status>";
final String basePath = "C:\\TestProjects\\sortedProjects\\Subversion\\local2\\sep12main\\main";
final SvnStatusHandler[] handler = new SvnStatusHandler[1];
final File baseFile = new File(basePath);
final SvnStatusHandler.ExternalDataCallback callback = CmdStatusClient.createStatusCallback(status1 -> {
System.out.println(status1.getURL());
if (new File(
"C:\\TestProjects\\sortedProjects\\Subversion\\local2\\sep12main\\main\\slave\\src\\com\\slave\\MacMessagesParser.java")
.equals(status1.getFile())) {
Assert.assertEquals("http://external/src/com/slave/MacMessagesParser.java", status1.getURL().toString());
}
if (new File("C:\\TestProjects\\sortedProjects\\Subversion\\local2\\sep12main\\main\\slave\\src\\com\\slave\\SomeOtherClass.java")
.equals(status1.getFile())) {
Assert.assertEquals("http://external/src/com/slave/SomeOtherClass.java", status1.getURL().toString());
}
}, baseFile, createStubInfo(basePath, "http://mainurl/"), handler);
handler[0] = new SvnStatusHandler(callback, baseFile, o -> {
try {
if (new File("C:\\TestProjects\\sortedProjects\\Subversion\\local2\\sep12main\\main\\slave").equals(o)) {
return createStubInfo(o.getPath(), "http://external");
}
return createStubInfo(o.getPath(), "http://12345");
}
catch (SVNException e) {
throw new RuntimeException(e);
}
});
SAXParser parser = SAXParserFactory.newInstance().newSAXParser();
parser.parse(new ByteArrayInputStream(status.getBytes(CharsetToolkit.UTF8_CHARSET)), handler[0]);
final MultiMap<String,PortableStatus> changes = handler[0].getCurrentListChanges();
}
public void testStatusWithSwitched() throws Exception {
final String s = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<status>\n" +
"<target\n" +
" path=\".\">\n" +
"<entry\n" +
" path=\"root\\source\\s1.txt\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" switched=\"true\"\n" +
" item=\"normal\"\n" +
" revision=\"5\">\n" +
"<commit\n" +
" revision=\"4\">\n" +
"<author>Irina.Chernushina</author>\n" +
"<date>2013-02-18T13:14:24.391537Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"<entry\n" +
" path=\"root\\target\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" switched=\"true\"\n" +
" item=\"normal\"\n" +
" revision=\"5\">\n" +
"<commit\n" +
" revision=\"4\">\n" +
"<author>Irina.Chernushina</author>\n" +
"<date>2013-02-18T13:14:24.391537Z</date>\n" +
"</commit>\n" +
"</wc-status>\n" +
"</entry>\n" +
"</target>\n" +
"</status>";
final SvnStatusHandler[] handlerArr = new SvnStatusHandler[1];
final boolean isWindows = SystemInfo.isWindows;
final String basePath = isWindows ? "C:/base/" : "/base33729/";
final Set<PortableStatus> statuses = new HashSet<>();
final SvnStatusHandler handler = new
SvnStatusHandler(new SvnStatusHandler.ExternalDataCallback() {
@Override
public void switchPath() {
statuses.add(handlerArr[0].getPending());
handlerArr[0].getPending().getKind();
}
@Override
public void switchChangeList(String newList) {
}
}, new File(basePath), o -> {
try {
o.getCanonicalFile();
}
catch (IOException e) {
throw new RuntimeException(e);
}
if (isWindows) {
final int idx = o.getPath().indexOf(":");
Assert.assertTrue(idx > 0);
final int secondIdx = o.getPath().indexOf(":", idx + 1);
Assert.assertTrue(o.getPath(), secondIdx == -1);
} else {
if (o.getPath().contains(LINUX_ROOT)) {
Assert.assertFalse(o.getPath().contains(basePath));
}
}
try {
return createStubInfo(basePath + "1", "http://a.b.c");
}
catch (SVNException e) {
//
throw new RuntimeException(e);
}
});
handlerArr[0] = handler;
final String osChecked = changePathsIfNix(s);
SAXParser parser = SAXParserFactory.newInstance().newSAXParser();
parser.parse(new ByteArrayInputStream(osChecked.getBytes(CharsetToolkit.UTF8_CHARSET)), handler);
final String[] expected = {"root\\source\\s1.txt", "root\\target"};
for (int i = 0; i < expected.length; i++) {
expected[i] = FileUtil.toSystemDependentName(expected[i]);
}
int cntMatched = 0;
for (PortableStatus status : statuses) {
Assert.assertTrue(status.isSwitched());
final String path = FileUtil.toSystemDependentName(status.getPath());
for (String s1 : expected) {
if (s1.equals(path)) {
++ cntMatched;
break;
}
}
}
Assert.assertEquals(2, cntMatched);
}
public void testOneFileInChangeListStatus() throws Exception {
final String s = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" +
"<status>\n" +
"<target\n" +
" path=\".\">\n" +
"</target>\n" +
"<changelist\n" +
" name=\"target\">\n" +
"<entry\n" +
" path=\"a.txt\">\n" +
"<wc-status\n" +
" props=\"none\"\n" +
" item=\"added\"\n" +
" revision=\"-1\">\n" +
"</wc-status>\n" +
"</entry>\n" +
"</changelist>\n" +
"</status>";
final SvnStatusHandler[] handlerArr = new SvnStatusHandler[1];
final boolean isWindows = SystemInfo.isWindows;
final String basePath = isWindows ? "C:/base/" : "/base33729/";
final Set<PortableStatus> statuses = new HashSet<>();
final String[] clName = new String[1];
final SvnStatusHandler handler = new
SvnStatusHandler(new SvnStatusHandler.ExternalDataCallback() {
@Override
public void switchPath() {
final PortableStatus pending = handlerArr[0].getPending();
pending.setChangelistName(clName[0]);
statuses.add(pending);
pending.getKind();
}
@Override
public void switchChangeList(String newList) {
clName[0] = newList;
}
}, new File(basePath), o -> {
try {
o.getCanonicalFile();
}
catch (IOException e) {
throw new RuntimeException(e);
}
if (isWindows) {
final int idx = o.getPath().indexOf(":");
Assert.assertTrue(idx > 0);
final int secondIdx = o.getPath().indexOf(":", idx + 1);
Assert.assertTrue(o.getPath(), secondIdx == -1);
} else {
if (o.getPath().contains(LINUX_ROOT)) {
Assert.assertFalse(o.getPath().contains(basePath));
}
}
try {
return createStubInfo(basePath + "1", "http://a.b.c");
}
catch (SVNException e) {
//
throw new RuntimeException(e);
}
});
handlerArr[0] = handler;
final String osChecked = changePathsIfNix(s);
SAXParser parser = SAXParserFactory.newInstance().newSAXParser();
parser.parse(new ByteArrayInputStream(osChecked.getBytes(CharsetToolkit.UTF8_CHARSET)), handler);
Assert.assertEquals(1, statuses.size());
final PortableStatus next = statuses.iterator().next();
Assert.assertEquals("a.txt", next.getPath());
Assert.assertEquals("target", next.getChangelistName());
}
}
| |
/*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.remoting.rmi;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.rmi.Remote;
import java.rmi.RemoteException;
import javax.naming.NamingException;
import javax.rmi.PortableRemoteObject;
import org.aopalliance.intercept.MethodInterceptor;
import org.aopalliance.intercept.MethodInvocation;
import org.omg.CORBA.OBJECT_NOT_EXIST;
import org.omg.CORBA.SystemException;
import org.springframework.aop.support.AopUtils;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.jndi.JndiObjectLocator;
import org.springframework.remoting.RemoteAccessException;
import org.springframework.remoting.RemoteConnectFailureException;
import org.springframework.remoting.RemoteInvocationFailureException;
import org.springframework.remoting.RemoteLookupFailureException;
import org.springframework.remoting.support.DefaultRemoteInvocationFactory;
import org.springframework.remoting.support.RemoteInvocation;
import org.springframework.remoting.support.RemoteInvocationFactory;
import org.springframework.util.ReflectionUtils;
/**
* {@link org.aopalliance.intercept.MethodInterceptor} for accessing RMI services from JNDI.
* Typically used for RMI-IIOP (CORBA), but can also be used for EJB home objects
* (for example, a Stateful Session Bean home). In contrast to a plain JNDI lookup,
* this accessor also performs narrowing through PortableRemoteObject.
*
* <p>With conventional RMI services, this invoker is typically used with the RMI
* service interface. Alternatively, this invoker can also proxy a remote RMI service
* with a matching non-RMI business interface, i.e. an interface that mirrors the RMI
* service methods but does not declare RemoteExceptions. In the latter case,
* RemoteExceptions thrown by the RMI stub will automatically get converted to
* Spring's unchecked RemoteAccessException.
*
* <p>The JNDI environment can be specified as "jndiEnvironment" property,
* or be configured in a <code>jndi.properties</code> file or as system properties.
* For example:
*
* <pre class="code"><property name="jndiEnvironment">
* <props>
* <prop key="java.naming.factory.initial">com.sun.jndi.cosnaming.CNCtxFactory</prop>
* <prop key="java.naming.provider.url">iiop://localhost:1050</prop>
* </props>
* </property></pre>
*
* @author Juergen Hoeller
* @since 1.1
* @see #setJndiTemplate
* @see #setJndiEnvironment
* @see #setJndiName
* @see JndiRmiServiceExporter
* @see JndiRmiProxyFactoryBean
* @see org.springframework.remoting.RemoteAccessException
* @see java.rmi.RemoteException
* @see java.rmi.Remote
* @see javax.rmi.PortableRemoteObject#narrow
*/
public class JndiRmiClientInterceptor extends JndiObjectLocator
implements MethodInterceptor, InitializingBean {
private Class serviceInterface;
private RemoteInvocationFactory remoteInvocationFactory = new DefaultRemoteInvocationFactory();
private boolean lookupStubOnStartup = true;
private boolean cacheStub = true;
private boolean refreshStubOnConnectFailure = false;
private Remote cachedStub;
private final Object stubMonitor = new Object();
/**
* Set the interface of the service to access.
* The interface must be suitable for the particular service and remoting tool.
* <p>Typically required to be able to create a suitable service proxy,
* but can also be optional if the lookup returns a typed stub.
*/
public void setServiceInterface(Class serviceInterface) {
if (serviceInterface != null && !serviceInterface.isInterface()) {
throw new IllegalArgumentException("'serviceInterface' must be an interface");
}
this.serviceInterface = serviceInterface;
}
/**
* Return the interface of the service to access.
*/
public Class getServiceInterface() {
return this.serviceInterface;
}
/**
* Set the RemoteInvocationFactory to use for this accessor.
* Default is a {@link DefaultRemoteInvocationFactory}.
* <p>A custom invocation factory can add further context information
* to the invocation, for example user credentials.
*/
public void setRemoteInvocationFactory(RemoteInvocationFactory remoteInvocationFactory) {
this.remoteInvocationFactory = remoteInvocationFactory;
}
/**
* Return the RemoteInvocationFactory used by this accessor.
*/
public RemoteInvocationFactory getRemoteInvocationFactory() {
return this.remoteInvocationFactory;
}
/**
* Set whether to look up the RMI stub on startup. Default is "true".
* <p>Can be turned off to allow for late start of the RMI server.
* In this case, the RMI stub will be fetched on first access.
* @see #setCacheStub
*/
public void setLookupStubOnStartup(boolean lookupStubOnStartup) {
this.lookupStubOnStartup = lookupStubOnStartup;
}
/**
* Set whether to cache the RMI stub once it has been located.
* Default is "true".
* <p>Can be turned off to allow for hot restart of the RMI server.
* In this case, the RMI stub will be fetched for each invocation.
* @see #setLookupStubOnStartup
*/
public void setCacheStub(boolean cacheStub) {
this.cacheStub = cacheStub;
}
/**
* Set whether to refresh the RMI stub on connect failure.
* Default is "false".
* <p>Can be turned on to allow for hot restart of the RMI server.
* If a cached RMI stub throws an RMI exception that indicates a
* remote connect failure, a fresh proxy will be fetched and the
* invocation will be retried.
* @see java.rmi.ConnectException
* @see java.rmi.ConnectIOException
* @see java.rmi.NoSuchObjectException
*/
public void setRefreshStubOnConnectFailure(boolean refreshStubOnConnectFailure) {
this.refreshStubOnConnectFailure = refreshStubOnConnectFailure;
}
public void afterPropertiesSet() throws NamingException {
super.afterPropertiesSet();
prepare();
}
/**
* Fetches the RMI stub on startup, if necessary.
* @throws RemoteLookupFailureException if RMI stub creation failed
* @see #setLookupStubOnStartup
* @see #lookupStub
*/
public void prepare() throws RemoteLookupFailureException {
// Cache RMI stub on initialization?
if (this.lookupStubOnStartup) {
Remote remoteObj = lookupStub();
if (logger.isDebugEnabled()) {
if (remoteObj instanceof RmiInvocationHandler) {
logger.debug("JNDI RMI object [" + getJndiName() + "] is an RMI invoker");
}
else if (getServiceInterface() != null) {
boolean isImpl = getServiceInterface().isInstance(remoteObj);
logger.debug("Using service interface [" + getServiceInterface().getName() +
"] for JNDI RMI object [" + getJndiName() + "] - " +
(!isImpl ? "not " : "") + "directly implemented");
}
}
if (this.cacheStub) {
this.cachedStub = remoteObj;
}
}
}
/**
* Create the RMI stub, typically by looking it up.
* <p>Called on interceptor initialization if "cacheStub" is "true";
* else called for each invocation by {@link #getStub()}.
* <p>The default implementation retrieves the service from the
* JNDI environment. This can be overridden in subclasses.
* @return the RMI stub to store in this interceptor
* @throws RemoteLookupFailureException if RMI stub creation failed
* @see #setCacheStub
* @see #lookup
*/
protected Remote lookupStub() throws RemoteLookupFailureException {
try {
Object stub = lookup();
if (getServiceInterface() != null && Remote.class.isAssignableFrom(getServiceInterface())) {
try {
stub = PortableRemoteObject.narrow(stub, getServiceInterface());
}
catch (ClassCastException ex) {
throw new RemoteLookupFailureException(
"Could not narrow RMI stub to service interface [" + getServiceInterface().getName() + "]", ex);
}
}
if (!(stub instanceof Remote)) {
throw new RemoteLookupFailureException("Located RMI stub of class [" + stub.getClass().getName() +
"], with JNDI name [" + getJndiName() + "], does not implement interface [java.rmi.Remote]");
}
return (Remote) stub;
}
catch (NamingException ex) {
throw new RemoteLookupFailureException("JNDI lookup for RMI service [" + getJndiName() + "] failed", ex);
}
}
/**
* Return the RMI stub to use. Called for each invocation.
* <p>The default implementation returns the stub created on initialization,
* if any. Else, it invokes {@link #lookupStub} to get a new stub for
* each invocation. This can be overridden in subclasses, for example in
* order to cache a stub for a given amount of time before recreating it,
* or to test the stub whether it is still alive.
* @return the RMI stub to use for an invocation
* @throws NamingException if stub creation failed
* @throws RemoteLookupFailureException if RMI stub creation failed
*/
protected Remote getStub() throws NamingException, RemoteLookupFailureException {
if (!this.cacheStub || (this.lookupStubOnStartup && !this.refreshStubOnConnectFailure)) {
return (this.cachedStub != null ? this.cachedStub : lookupStub());
}
else {
synchronized (this.stubMonitor) {
if (this.cachedStub == null) {
this.cachedStub = lookupStub();
}
return this.cachedStub;
}
}
}
/**
* Fetches an RMI stub and delegates to {@link #doInvoke}.
* If configured to refresh on connect failure, it will call
* {@link #refreshAndRetry} on corresponding RMI exceptions.
* @see #getStub
* @see #doInvoke
* @see #refreshAndRetry
* @see java.rmi.ConnectException
* @see java.rmi.ConnectIOException
* @see java.rmi.NoSuchObjectException
*/
public Object invoke(MethodInvocation invocation) throws Throwable {
Remote stub = null;
try {
stub = getStub();
}
catch (NamingException ex) {
throw new RemoteLookupFailureException("JNDI lookup for RMI service [" + getJndiName() + "] failed", ex);
}
try {
return doInvoke(invocation, stub);
}
catch (RemoteConnectFailureException ex) {
return handleRemoteConnectFailure(invocation, ex);
}
catch (RemoteException ex) {
if (isConnectFailure(ex)) {
return handleRemoteConnectFailure(invocation, ex);
}
else {
throw ex;
}
}
catch (SystemException ex) {
if (isConnectFailure(ex)) {
return handleRemoteConnectFailure(invocation, ex);
}
else {
throw ex;
}
}
}
/**
* Determine whether the given RMI exception indicates a connect failure.
* <p>The default implementation delegates to
* {@link RmiClientInterceptorUtils#isConnectFailure}.
* @param ex the RMI exception to check
* @return whether the exception should be treated as connect failure
*/
protected boolean isConnectFailure(RemoteException ex) {
return RmiClientInterceptorUtils.isConnectFailure(ex);
}
/**
* Determine whether the given CORBA exception indicates a connect failure.
* <p>The default implementation checks for CORBA's
* {@link org.omg.CORBA.OBJECT_NOT_EXIST} exception.
* @param ex the RMI exception to check
* @return whether the exception should be treated as connect failure
*/
protected boolean isConnectFailure(SystemException ex) {
return (ex instanceof OBJECT_NOT_EXIST);
}
/**
* Refresh the stub and retry the remote invocation if necessary.
* <p>If not configured to refresh on connect failure, this method
* simply rethrows the original exception.
* @param invocation the invocation that failed
* @param ex the exception raised on remote invocation
* @return the result value of the new invocation, if succeeded
* @throws Throwable an exception raised by the new invocation, if failed too.
*/
private Object handleRemoteConnectFailure(MethodInvocation invocation, Exception ex) throws Throwable {
if (this.refreshStubOnConnectFailure) {
if (logger.isDebugEnabled()) {
logger.debug("Could not connect to RMI service [" + getJndiName() + "] - retrying", ex);
}
else if (logger.isWarnEnabled()) {
logger.warn("Could not connect to RMI service [" + getJndiName() + "] - retrying");
}
return refreshAndRetry(invocation);
}
else {
throw ex;
}
}
/**
* Refresh the RMI stub and retry the given invocation.
* Called by invoke on connect failure.
* @param invocation the AOP method invocation
* @return the invocation result, if any
* @throws Throwable in case of invocation failure
* @see #invoke
*/
protected Object refreshAndRetry(MethodInvocation invocation) throws Throwable {
Remote freshStub = null;
synchronized (this.stubMonitor) {
this.cachedStub = null;
freshStub = lookupStub();
if (this.cacheStub) {
this.cachedStub = freshStub;
}
}
return doInvoke(invocation, freshStub);
}
/**
* Perform the given invocation on the given RMI stub.
* @param invocation the AOP method invocation
* @param stub the RMI stub to invoke
* @return the invocation result, if any
* @throws Throwable in case of invocation failure
*/
protected Object doInvoke(MethodInvocation invocation, Remote stub) throws Throwable {
if (stub instanceof RmiInvocationHandler) {
// RMI invoker
try {
return doInvoke(invocation, (RmiInvocationHandler) stub);
}
catch (RemoteException ex) {
throw convertRmiAccessException(ex, invocation.getMethod());
}
catch (SystemException ex) {
throw convertCorbaAccessException(ex, invocation.getMethod());
}
catch (InvocationTargetException ex) {
throw ex.getTargetException();
}
catch (Throwable ex) {
throw new RemoteInvocationFailureException("Invocation of method [" + invocation.getMethod() +
"] failed in RMI service [" + getJndiName() + "]", ex);
}
}
else {
// traditional RMI stub
try {
return RmiClientInterceptorUtils.doInvoke(invocation, stub);
}
catch (InvocationTargetException ex) {
Throwable targetEx = ex.getTargetException();
if (targetEx instanceof RemoteException) {
throw convertRmiAccessException((RemoteException) targetEx, invocation.getMethod());
}
else if (targetEx instanceof SystemException) {
throw convertCorbaAccessException((SystemException) targetEx, invocation.getMethod());
}
else {
throw targetEx;
}
}
}
}
/**
* Apply the given AOP method invocation to the given {@link RmiInvocationHandler}.
* <p>The default implementation delegates to {@link #createRemoteInvocation}.
* @param methodInvocation the current AOP method invocation
* @param invocationHandler the RmiInvocationHandler to apply the invocation to
* @return the invocation result
* @throws RemoteException in case of communication errors
* @throws NoSuchMethodException if the method name could not be resolved
* @throws IllegalAccessException if the method could not be accessed
* @throws InvocationTargetException if the method invocation resulted in an exception
* @see org.springframework.remoting.support.RemoteInvocation
*/
protected Object doInvoke(MethodInvocation methodInvocation, RmiInvocationHandler invocationHandler)
throws RemoteException, NoSuchMethodException, IllegalAccessException, InvocationTargetException {
if (AopUtils.isToStringMethod(methodInvocation.getMethod())) {
return "RMI invoker proxy for service URL [" + getJndiName() + "]";
}
return invocationHandler.invoke(createRemoteInvocation(methodInvocation));
}
/**
* Create a new RemoteInvocation object for the given AOP method invocation.
* <p>The default implementation delegates to the configured
* {@link #setRemoteInvocationFactory RemoteInvocationFactory}.
* This can be overridden in subclasses in order to provide custom RemoteInvocation
* subclasses, containing additional invocation parameters (e.g. user credentials).
* <p>Note that it is preferable to build a custom RemoteInvocationFactory
* as a reusable strategy, instead of overriding this method.
* @param methodInvocation the current AOP method invocation
* @return the RemoteInvocation object
* @see RemoteInvocationFactory#createRemoteInvocation
*/
protected RemoteInvocation createRemoteInvocation(MethodInvocation methodInvocation) {
return getRemoteInvocationFactory().createRemoteInvocation(methodInvocation);
}
/**
* Convert the given RMI RemoteException that happened during remote access
* to Spring's RemoteAccessException if the method signature does not declare
* RemoteException. Else, return the original RemoteException.
* @param method the invoked method
* @param ex the RemoteException that happened
* @return the exception to be thrown to the caller
*/
private Exception convertRmiAccessException(RemoteException ex, Method method) {
return RmiClientInterceptorUtils.convertRmiAccessException(method, ex, isConnectFailure(ex), getJndiName());
}
/**
* Convert the given CORBA SystemException that happened during remote access
* to Spring's RemoteAccessException if the method signature does not declare
* RemoteException. Else, return the SystemException wrapped in a RemoteException.
* @param method the invoked method
* @param ex the RemoteException that happened
* @return the exception to be thrown to the caller
*/
private Exception convertCorbaAccessException(SystemException ex, Method method) {
if (ReflectionUtils.declaresException(method, RemoteException.class)) {
// A traditional RMI service: wrap CORBA exceptions in standard RemoteExceptions.
return new RemoteException("Failed to access CORBA service [" + getJndiName() + "]", ex);
}
else {
if (isConnectFailure(ex)) {
return new RemoteConnectFailureException("Could not connect to CORBA service [" + getJndiName() + "]", ex);
}
else {
return new RemoteAccessException("Could not access CORBA service [" + getJndiName() + "]", ex);
}
}
}
}
| |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.edgent.connectors.command;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.StringTokenizer;
import java.util.concurrent.TimeUnit;
import org.apache.edgent.connectors.command.runtime.CommandReader;
import org.apache.edgent.connectors.command.runtime.CommandWriter;
import org.apache.edgent.function.Consumer;
import org.apache.edgent.function.Supplier;
import org.apache.edgent.topology.TSink;
import org.apache.edgent.topology.TStream;
import org.apache.edgent.topology.Topology;
/**
* Connector for creating a TStream from a Command's / OS Process's output
* and sinking a TStream to a Command's / OS Process's input.
* <P>
* e.g., run a network monitor command (like Tiger Shark) and ingest its output.
*/
public class CommandStreams {
private CommandStreams() {}
/**
* Tokenize the specified {@code cmdString} in the exact same manner as
* done by {@link Runtime#exec(String)}.
* <P>
* This function provides a convenience for creating a {@link ProcessBuilder}
* for use by the other CommandStreams methods.
* </P>
* <P>
* Sample use:
* <pre>{@code
* ProcessBuilder cmd = new ProcessBuilder(tokenize("sh someShellCmd.sh and args"));
* TStream<String> stream = CommandStreams.generate(topology, cmd);
* }</pre>
*
* @param cmdString a command string
* @return the tokens
*/
public static List<String> tokenize(String cmdString) {
List<String> command = new ArrayList<>();
StringTokenizer tok = new StringTokenizer(cmdString);
while (tok.hasMoreTokens())
command.add(tok.nextToken());
return command;
}
/**
* Create an endless {@code TStream<String>} from a long running command's output.
* <P>
* The supplied {@code cmd} is used to start the command.
* A tuple is created for each UTF8 line read from the command's
* {@link Process#getInputStream() output}.
* The tuples contain output from stderr if cmd is configured to
* {@link ProcessBuilder#redirectErrorStream(boolean) redirect stderr to stdout}.
* The command is restarted if a read from the command's output stream
* returns EOF or an error.
* </P>
* <P>
* This is a convenience function equivalent to
* {@code topology.generate(endlessCommandReader(cmd))}.
* </P>
* <P>
* Sample use: create a stream of tuples for the output from a
* continuously running and restartable command:
* <pre>{@code
* ProcessBuilder cmd = new ProcessBuilder("myCommand");
* TStream<String> cmdOutput = CommandStreams.generate(topology, cmd);
* cmdOutput.print();
* }</pre>
*
* @param topology the topology to add the source stream to
* @param cmd the {@link ProcessBuilder} to start the command
* @return the source {@code TStream<String>}
*
* @see #endlessCommandReader(ProcessBuilder)
* @see #tokenize(String)
*/
public static TStream<String> generate(Topology topology, ProcessBuilder cmd) {
return topology.generate(endlessCommandReader(cmd));
}
/**
* Create a {@code TStream<String>} from a periodically run command's output.
* <P>
* The supplied {@code cmd} is used to start the command
* at the specified {@code period}.
* The command's UTF8 {@link Process#getInputStream() output} is read until EOF
* and a {@code List<String>} tuple is created containing the collected output.
* The tuples contain output from stderr if the cmd is configured to
* {@link ProcessBuilder#redirectErrorStream(boolean) redirect stderr to stdout}.
* </P>
* <P>
* This is a convenience function equivalent to
* {@code topology.poll(commandReaderList(cmd), period, units)}.
* </P>
* <P>
* Sample use: create a stream of tuples containing the output
* from a periodically run command:
* <pre>{@code
* ProcessBuilder cmd = new ProcessBuilder("date");
* TStream<List<String>> cmdOutput =
* CommandStreams.periodicSource(topology, cmd, 2, TimeUnit.SECONDS);
* cmdOutput.print();
* }</pre>
*
* @param topology the topology to add the source stream to
* @param cmd the {@link ProcessBuilder} to start the command
* @param period the period to run the command and collect its output
* @param units TimeUnit for {@code period}
* @return the source {@code TStream<List<String>>}
*
* @see #commandReaderList(ProcessBuilder)
* @see #tokenize(String)
*/
public static TStream<List<String>> periodicSource(Topology topology,
ProcessBuilder cmd, long period, TimeUnit units) {
return topology.poll(commandReaderList(cmd), period, units);
}
/**
* Sink a {@code TStream<String>} to a command's input.
* <P>
* The supplied {@code cmd} is used to start the command.
* Each tuple is written as UTF8 and flushed to the command's {@link Process#getOutputStream() input}.
* The command is restarted if a write encounters an error.
* </P>
* <P>
* While each write is followed by a flush() that only helps to
* reduce the time it takes to notice that cmd has failed and restart it.
* Supposedly "successfully written and flushed" values are not guaranteed to
* have been received by a cmd across restarts.
* </P>
* <P>
* This is a convenience function equivalent to
* {@code stream.sink(commandWriter(cmd))}
* </P>
* <P>
* Sample use: write a stream of tuples to the input of a command:
* <pre>{@code
* TStream<String> stream = topology.strings("one", "two", "three");
* ProcessBuilder cmd = new ProcessBuilder("cat").redirectOutput(new File("/dev/stdout"));
* CommandStreams.sink(stream, cmd);
* }</pre>
*
* @param stream the stream to sink
* @param cmd the {@link ProcessBuilder} to start the command
* @return a {@link TSink}
*
* @see #commandWriter(ProcessBuilder)
* @see #tokenize(String)
*/
public static TSink<String> sink(TStream<String> stream, ProcessBuilder cmd) {
return stream.sink(commandWriter(cmd));
}
/**
* Create an endless {@code Supplier<String>} for ingesting a long running command's output.
* <P>
* This method is particularly helpful in creating a sensor or source connector
* class that hides the fact that it uses a command, enabling it to be used
* like any other sensor/connector.
* </P>
* For example:
* <pre><code>
* // ingest the sensor data
* TStream<MySensorData> stream = topology.generate(new MySensor());
*
* // MySensor class
* class MySensor implements Supplier<MySensorData> {
* private String[] cmd = new String[] {"mySensorCmd", "arg1"};
* private Supplier<String> commandReader =
* CommandStreams.endlessCommandReader(new ProcessBuilder(cmd));
*
* // implement Supplier<MySensorData>.get()
* public MySensorData get() {
* // get the next line from the cmd and create a MySensorData tuple from it
* return createMySensorData(commandReader.get());
* }
* }
* </code></pre>
* <P>
* The supplied {@code cmd} is used to start the command.
* A call to {@link Supplier#get()} reads the next UTF8 line from the command's
* {@link Process#getInputStream() output}.
* The returned strings contain output from stderr if the cmd is configured to
* {@link ProcessBuilder#redirectErrorStream(boolean) redirect stderr to stdput}.
* The command is restarted if a read from the command's output stream
* returns EOF or an error.
* </P>
*
* @param cmd the {@link ProcessBuilder} to start the command
* @return the {@code Supplier<String>}
*
* @see #generate(Topology, ProcessBuilder)
* @see #tokenize(String)
*/
public static Supplier<String> endlessCommandReader(ProcessBuilder cmd) {
return new Supplier<String>() {
private static final long serialVersionUID = 1L;
Supplier<Iterable<String>> reader = new CommandReader(cmd, true);
Iterator<String> iter = null;
@Override
public String get() {
if (iter == null) {
iter = reader.get().iterator();
}
if (iter.hasNext()) {
return iter.next();
}
else {
// presumably a shutdown condition
return null;
}
}
};
}
/**
* Create a {@code Supplier<List<String>>} to ingest a command's output.
* <P>
* This method is particularly helpful in creating a sensor or source connector
* class that hides the fact that it uses a command, enabling it to be used
* like any other sensor/connector.
* </P>
* For example:
* <pre><code>
* // ingest the sensor data
* TStream<MySensorData> stream = topology.periodicSource(new MySensor());
*
* // MySensor class
* class MySensor implements Supplier<MySensorData> {
* private String[] cmd = new String[] {"mySensorCmd", "arg1"};
* private Supplier<List<String>> commandReader =
* CommandStreams.commandReaderList(new ProcessBuilder(cmd));
*
* // implement Supplier<MySensorData>.get()
* public MySensorData get() {
* // get the cmd output and create a MySensorData tuple from it
* return createMySensorData(commandReader.get());
* }
* }
* </code></pre>
* <P>
* The supplied {@code cmd} is used to start the command.
* A call to {@link Supplier#get()} reads the command's UTF8
* {@link Process#getInputStream() input stream} until an EOF or error
* and returns a {@code List<String>} of the collected input.
* The tuples contain output from stderr if the cmd is configured to
* {@link ProcessBuilder#redirectErrorStream(boolean) redirect stderr to stdout}.
* </P>
*
* @param cmd the {@link ProcessBuilder} to start the command
* @return the {@code Supplier<List<String>>} for the command
*
* @see #periodicSource(Topology, ProcessBuilder, long, TimeUnit)
* @see #tokenize(String)
*/
public static Supplier<List<String>> commandReaderList(ProcessBuilder cmd) {
return new Supplier<List<String>>() {
private static final long serialVersionUID = 1L;
@Override
public List<String> get() {
try (CommandReader supplier
= new CommandReader(cmd, false))
{
Iterator<String> iter = supplier.get().iterator();
List<String> list = new ArrayList<>();
while (iter.hasNext())
list.add(iter.next());
return list;
}
}
};
}
/**
* Create a {@code Consumer<String>} to write UTF8 string data to a command's input.
* <P>
* This method is particularly helpful in creating a sink connector
* that hides the fact that it uses a command, enabling it to be used
* like a native connector.
* </P>
* For example:
* <pre><code>
* // sink a stream to my connector
* TStream<MySensorData> stream = ...;
* stream.sink(new MySinkConnector());
*
* // MySinkConnector class
* class MySinkConnector implements Consumer<MySensorData> {
* private String[] cmd = new String[] {"mySinkCmd", "arg1"};
* private Consumer<String> commandWriter =
* CommandStreams.commandWriter(new ProcessBuilder(cmd));
*
* // implement Consumer<MySensorData>.accept()
* public void accept(MySensorData data) {
* // convert the data to a string and write it to the cmd
* commandWriter.accept(convertMySensorData(data));
* }
* }
* </code></pre>
* <P>
* The supplied {@link ProcessBuilder cmd} is used to start the command.
* Each call to {@link Consumer#accept(Object) accept(String)} writes a
* UTF8 string to the command's {@link Process#getOutputStream() input}.
* Each write is followed by a flush.
* The command is restarted if a write encounters an error.
* </P>
* <P>
* While each write is followed by a flush() that only helps to
* reduce the time it takes to notice that cmd has failed and restart it.
* Supposedly "successfully written and flushed" values are not guaranteed to
* have been received by a cmd across restarts.
* </P>
*
* @param cmd the {@link ProcessBuilder} to start the command
* @return the {@code Consumer<String>} for the command
*
* @see #sink(TStream, ProcessBuilder)
* @see #tokenize(String)
*/
public static Consumer<String> commandWriter(ProcessBuilder cmd) {
return new CommandWriter(cmd, true);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.testing;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.MoreCollectors;
import com.google.errorprone.annotations.CanIgnoreReturnValue;
import io.airlift.units.Duration;
import io.trino.Session;
import io.trino.cost.CostCalculator;
import io.trino.cost.CostCalculatorUsingExchanges;
import io.trino.cost.CostCalculatorWithEstimatedExchanges;
import io.trino.cost.CostComparator;
import io.trino.cost.TaskCountEstimator;
import io.trino.execution.QueryManagerConfig;
import io.trino.execution.TaskManagerConfig;
import io.trino.execution.warnings.WarningCollector;
import io.trino.metadata.Metadata;
import io.trino.operator.OperatorStats;
import io.trino.spi.QueryId;
import io.trino.spi.type.Type;
import io.trino.spi.type.TypeOperators;
import io.trino.sql.analyzer.FeaturesConfig;
import io.trino.sql.analyzer.FeaturesConfig.JoinDistributionType;
import io.trino.sql.analyzer.QueryExplainer;
import io.trino.sql.parser.SqlParser;
import io.trino.sql.planner.Plan;
import io.trino.sql.planner.PlanFragmenter;
import io.trino.sql.planner.PlanOptimizers;
import io.trino.sql.planner.TypeAnalyzer;
import io.trino.sql.planner.optimizations.PlanNodeSearcher;
import io.trino.sql.planner.optimizations.PlanOptimizer;
import io.trino.sql.planner.plan.FilterNode;
import io.trino.sql.planner.plan.PlanNodeId;
import io.trino.sql.planner.plan.ProjectNode;
import io.trino.sql.planner.plan.TableScanNode;
import io.trino.sql.query.QueryAssertions.QueryAssert;
import io.trino.sql.tree.ExplainType;
import io.trino.testing.TestingAccessControlManager.TestingPrivilege;
import io.trino.util.AutoCloseableCloser;
import org.assertj.core.api.AssertProvider;
import org.intellij.lang.annotations.Language;
import org.testng.SkipException;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.function.Consumer;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.trino.SystemSessionProperties.JOIN_DISTRIBUTION_TYPE;
import static io.trino.SystemSessionProperties.JOIN_REORDERING_STRATEGY;
import static io.trino.sql.ParsingUtil.createParsingOptions;
import static io.trino.sql.SqlFormatter.formatSql;
import static io.trino.transaction.TransactionBuilder.transaction;
import static java.util.Collections.emptyList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.testng.Assert.assertEquals;
public abstract class AbstractTestQueryFramework
{
private QueryRunner queryRunner;
private H2QueryRunner h2QueryRunner;
private SqlParser sqlParser;
private final AutoCloseableCloser afterClassCloser = AutoCloseableCloser.create();
private io.trino.sql.query.QueryAssertions queryAssertions;
@BeforeClass
public void init()
throws Exception
{
queryRunner = afterClassCloser.register(createQueryRunner());
h2QueryRunner = afterClassCloser.register(new H2QueryRunner());
sqlParser = new SqlParser();
queryAssertions = new io.trino.sql.query.QueryAssertions(queryRunner);
}
protected abstract QueryRunner createQueryRunner()
throws Exception;
@AfterClass(alwaysRun = true)
public void close()
throws Exception
{
afterClassCloser.close();
queryRunner = null;
h2QueryRunner = null;
sqlParser = null;
queryAssertions = null;
}
@Test
public void ensureTestNamingConvention()
{
// Enforce a naming convention to make code navigation easier.
assertThat(getClass().getName())
.doesNotEndWith("ConnectorTest")
.doesNotEndWith("ConnectorSmokeTest");
}
protected Session getSession()
{
return queryRunner.getDefaultSession();
}
protected final int getNodeCount()
{
return queryRunner.getNodeCount();
}
protected MaterializedResult computeActual(@Language("SQL") String sql)
{
return computeActual(getSession(), sql);
}
protected MaterializedResult computeActual(Session session, @Language("SQL") String sql)
{
return queryRunner.execute(session, sql).toTestTypes();
}
protected Object computeScalar(@Language("SQL") String sql)
{
return computeActual(sql).getOnlyValue();
}
protected AssertProvider<QueryAssert> query(@Language("SQL") String sql)
{
return query(getSession(), sql);
}
protected AssertProvider<QueryAssert> query(Session session, @Language("SQL") String sql)
{
return queryAssertions.query(session, sql);
}
protected void assertQuery(@Language("SQL") String sql)
{
assertQuery(getSession(), sql);
}
protected void assertQuery(Session session, @Language("SQL") String sql)
{
QueryAssertions.assertQuery(queryRunner, session, sql, h2QueryRunner, sql, false, false);
}
protected void assertQuery(@Language("SQL") String actual, @Language("SQL") String expected)
{
QueryAssertions.assertQuery(queryRunner, getSession(), actual, h2QueryRunner, expected, false, false);
}
protected void assertQuery(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
QueryAssertions.assertQuery(queryRunner, session, actual, h2QueryRunner, expected, false, false);
}
protected void assertQuery(Session session, @Language("SQL") String actual, @Language("SQL") String expected, Consumer<Plan> planAssertion)
{
checkArgument(queryRunner instanceof DistributedQueryRunner, "pattern assertion is only supported for DistributedQueryRunner");
QueryAssertions.assertQuery(queryRunner, session, actual, h2QueryRunner, expected, false, false, planAssertion);
}
protected void assertQueryEventually(Session session, @Language("SQL") String actual, @Language("SQL") String expected, Duration timeout)
{
QueryAssertions.assertQueryEventually(queryRunner, session, actual, h2QueryRunner, expected, false, false, Optional.empty(), timeout);
}
protected void assertQueryOrdered(@Language("SQL") String sql)
{
assertQueryOrdered(getSession(), sql);
}
protected void assertQueryOrdered(Session session, @Language("SQL") String sql)
{
assertQueryOrdered(session, sql, sql);
}
protected void assertQueryOrdered(@Language("SQL") String actual, @Language("SQL") String expected)
{
assertQueryOrdered(getSession(), actual, expected);
}
protected void assertQueryOrdered(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
QueryAssertions.assertQuery(queryRunner, session, actual, h2QueryRunner, expected, true, false);
}
protected void assertUpdate(@Language("SQL") String actual, @Language("SQL") String expected)
{
assertUpdate(getSession(), actual, expected);
}
protected void assertUpdate(Session session, @Language("SQL") String actual, @Language("SQL") String expected)
{
QueryAssertions.assertQuery(queryRunner, session, actual, h2QueryRunner, expected, false, true);
}
protected void assertUpdate(@Language("SQL") String sql)
{
assertUpdate(getSession(), sql);
}
protected void assertUpdate(Session session, @Language("SQL") String sql)
{
QueryAssertions.assertUpdate(queryRunner, session, sql, OptionalLong.empty(), Optional.empty());
}
protected void assertUpdate(@Language("SQL") String sql, long count)
{
assertUpdate(getSession(), sql, count);
}
protected void assertUpdate(Session session, @Language("SQL") String sql, long count)
{
QueryAssertions.assertUpdate(queryRunner, session, sql, OptionalLong.of(count), Optional.empty());
}
protected void assertUpdate(Session session, @Language("SQL") String sql, long count, Consumer<Plan> planAssertion)
{
QueryAssertions.assertUpdate(queryRunner, session, sql, OptionalLong.of(count), Optional.of(planAssertion));
}
protected void assertQuerySucceeds(@Language("SQL") String sql)
{
assertQuerySucceeds(getSession(), sql);
}
protected void assertQuerySucceeds(Session session, @Language("SQL") String sql)
{
QueryAssertions.assertQuerySucceeds(queryRunner, session, sql);
}
protected void assertQueryFailsEventually(@Language("SQL") String sql, @Language("RegExp") String expectedMessageRegExp, Duration timeout)
{
QueryAssertions.assertQueryFailsEventually(queryRunner, getSession(), sql, expectedMessageRegExp, timeout);
}
protected void assertQueryFails(@Language("SQL") String sql, @Language("RegExp") String expectedMessageRegExp)
{
QueryAssertions.assertQueryFails(queryRunner, getSession(), sql, expectedMessageRegExp);
}
protected void assertQueryFails(Session session, @Language("SQL") String sql, @Language("RegExp") String expectedMessageRegExp)
{
QueryAssertions.assertQueryFails(queryRunner, session, sql, expectedMessageRegExp);
}
protected void assertQueryReturnsEmptyResult(@Language("SQL") String sql)
{
assertThat(query(sql))
.returnsEmptyResult();
}
protected void assertQueryReturnsEmptyResult(Session session, @Language("SQL") String sql)
{
assertThat(query(session, sql))
.returnsEmptyResult();
}
protected void assertAccessAllowed(@Language("SQL") String sql, TestingPrivilege... deniedPrivileges)
{
assertAccessAllowed(getSession(), sql, deniedPrivileges);
}
protected void assertAccessAllowed(Session session, @Language("SQL") String sql, TestingPrivilege... deniedPrivileges)
{
executeExclusively(session, sql, deniedPrivileges);
}
private void executeExclusively(Session session, @Language("SQL") String sql, TestingPrivilege[] deniedPrivileges)
{
executeExclusively(() -> {
try {
queryRunner.getAccessControl().deny(deniedPrivileges);
queryRunner.execute(session, sql);
}
finally {
queryRunner.getAccessControl().reset();
}
});
}
protected void assertAccessDenied(@Language("SQL") String sql, @Language("RegExp") String exceptionsMessageRegExp, TestingPrivilege... deniedPrivileges)
{
assertAccessDenied(getSession(), sql, exceptionsMessageRegExp, deniedPrivileges);
}
protected void assertAccessDenied(
Session session,
@Language("SQL") String sql,
@Language("RegExp") String exceptionsMessageRegExp,
TestingPrivilege... deniedPrivileges)
{
assertException(session, sql, ".*Access Denied: " + exceptionsMessageRegExp, deniedPrivileges);
}
private void assertException(Session session, @Language("SQL") String sql, @Language("RegExp") String exceptionsMessageRegExp, TestingPrivilege[] deniedPrivileges)
{
assertThatThrownBy(() -> executeExclusively(session, sql, deniedPrivileges))
.as("Query: " + sql)
.hasMessageMatching(exceptionsMessageRegExp);
}
protected void assertTableColumnNames(String tableName, String... columnNames)
{
MaterializedResult result = computeActual("DESCRIBE " + tableName);
List<String> expected = ImmutableList.copyOf(columnNames);
List<String> actual = result.getMaterializedRows().stream()
.map(row -> (String) row.getField(0))
.collect(toImmutableList());
assertEquals(actual, expected);
}
protected void assertExplain(@Language("SQL") String query, @Language("RegExp") String... expectedExplainRegExps)
{
assertExplain(getSession(), query, expectedExplainRegExps);
}
protected void assertExplain(Session session, @Language("SQL") String query, @Language("RegExp") String... expectedExplainRegExps)
{
assertExplainAnalyze(false, session, query, expectedExplainRegExps);
}
protected void assertExplainAnalyze(@Language("SQL") String query, @Language("RegExp") String... expectedExplainRegExps)
{
assertExplainAnalyze(getSession(), query, expectedExplainRegExps);
}
protected void assertExplainAnalyze(Session session, @Language("SQL") String query, @Language("RegExp") String... expectedExplainRegExps)
{
assertExplainAnalyze(true, session, query, expectedExplainRegExps);
}
private void assertExplainAnalyze(
boolean analyze,
Session session,
@Language("SQL") String query,
@Language("RegExp") String... expectedExplainRegExps)
{
String value = (String) computeActual(session, query).getOnlyValue();
if (analyze) {
// TODO: check that rendered plan is as expected, once stats are collected in a consistent way
// assertTrue(value.contains("Cost: "), format("Expected output to contain \"Cost: \", but it is %s", value));
assertThat(value).containsPattern("CPU:.*, Input:.*, Output");
}
for (String expectedExplainRegExp : expectedExplainRegExps) {
assertThat(value).containsPattern(expectedExplainRegExp);
}
}
protected MaterializedResult computeExpected(@Language("SQL") String sql, List<? extends Type> resultTypes)
{
return h2QueryRunner.execute(getSession(), sql, resultTypes);
}
protected void executeExclusively(Runnable executionBlock)
{
queryRunner.getExclusiveLock().lock();
try {
executionBlock.run();
}
finally {
queryRunner.getExclusiveLock().unlock();
}
}
protected String formatSqlText(String sql)
{
return formatSql(sqlParser.createStatement(sql, createParsingOptions(getSession())));
}
//TODO: should WarningCollector be added?
protected String getExplainPlan(String query, ExplainType.Type planType)
{
QueryExplainer explainer = getQueryExplainer();
return transaction(queryRunner.getTransactionManager(), queryRunner.getAccessControl())
.singleStatement()
.execute(getSession(), session -> {
return explainer.getPlan(session, sqlParser.createStatement(query, createParsingOptions(session)), planType, emptyList(), WarningCollector.NOOP);
});
}
protected String getGraphvizExplainPlan(String query, ExplainType.Type planType)
{
QueryExplainer explainer = getQueryExplainer();
return transaction(queryRunner.getTransactionManager(), queryRunner.getAccessControl())
.singleStatement()
.execute(queryRunner.getDefaultSession(), session -> {
return explainer.getGraphvizPlan(session, sqlParser.createStatement(query, createParsingOptions(session)), planType, emptyList(), WarningCollector.NOOP);
});
}
private QueryExplainer getQueryExplainer()
{
Metadata metadata = queryRunner.getMetadata();
FeaturesConfig featuresConfig = new FeaturesConfig().setOptimizeHashGeneration(true);
boolean forceSingleNode = queryRunner.getNodeCount() == 1;
TaskCountEstimator taskCountEstimator = new TaskCountEstimator(queryRunner::getNodeCount);
CostCalculator costCalculator = new CostCalculatorUsingExchanges(taskCountEstimator);
TypeOperators typeOperators = new TypeOperators();
List<PlanOptimizer> optimizers = new PlanOptimizers(
metadata,
typeOperators,
new TypeAnalyzer(sqlParser, metadata),
new TaskManagerConfig(),
forceSingleNode,
queryRunner.getSplitManager(),
queryRunner.getPageSourceManager(),
queryRunner.getStatsCalculator(),
costCalculator,
new CostCalculatorWithEstimatedExchanges(costCalculator, taskCountEstimator),
new CostComparator(featuresConfig),
taskCountEstimator,
queryRunner.getNodePartitioningManager()).get();
return new QueryExplainer(
optimizers,
new PlanFragmenter(metadata, queryRunner.getNodePartitioningManager(), new QueryManagerConfig()),
metadata,
typeOperators,
queryRunner.getGroupProvider(),
queryRunner.getAccessControl(),
sqlParser,
queryRunner.getStatsCalculator(),
costCalculator,
ImmutableMap.of());
}
protected static void skipTestUnless(boolean requirement)
{
if (!requirement) {
throw new SkipException("requirement not met");
}
}
protected final QueryRunner getQueryRunner()
{
checkState(queryRunner != null, "queryRunner not set");
return queryRunner;
}
protected final DistributedQueryRunner getDistributedQueryRunner()
{
checkState(queryRunner != null, "queryRunner not set");
checkState(queryRunner instanceof DistributedQueryRunner, "queryRunner is not a DistributedQueryRunner");
return (DistributedQueryRunner) queryRunner;
}
protected Session noJoinReordering()
{
return noJoinReordering(JoinDistributionType.PARTITIONED);
}
protected Session noJoinReordering(JoinDistributionType distributionType)
{
return Session.builder(getSession())
.setSystemProperty(JOIN_REORDERING_STRATEGY, FeaturesConfig.JoinReorderingStrategy.NONE.name())
.setSystemProperty(JOIN_DISTRIBUTION_TYPE, distributionType.name())
.build();
}
protected OperatorStats searchScanFilterAndProjectOperatorStats(QueryId queryId, String tableName)
{
Plan plan = getDistributedQueryRunner().getQueryPlan(queryId);
PlanNodeId nodeId = PlanNodeSearcher.searchFrom(plan.getRoot())
.where(node -> {
if (!(node instanceof ProjectNode)) {
return false;
}
ProjectNode projectNode = (ProjectNode) node;
if (!(projectNode.getSource() instanceof FilterNode)) {
return false;
}
FilterNode filterNode = (FilterNode) projectNode.getSource();
if (!(filterNode.getSource() instanceof TableScanNode)) {
return false;
}
TableScanNode tableScanNode = (TableScanNode) filterNode.getSource();
return tableName.equals(tableScanNode.getTable().getConnectorHandle().toString());
})
.findOnlyElement()
.getId();
return getDistributedQueryRunner().getCoordinator()
.getQueryManager()
.getFullQueryInfo(queryId)
.getQueryStats()
.getOperatorSummaries()
.stream()
.filter(summary -> nodeId.equals(summary.getPlanNodeId()) && summary.getOperatorType().equals("ScanFilterAndProjectOperator"))
.collect(MoreCollectors.onlyElement());
}
@CanIgnoreReturnValue
protected final <T extends AutoCloseable> T closeAfterClass(T resource)
{
return afterClassCloser.register(resource);
}
}
| |
/*
* MIT License
*
* Copyright (c) 2016 BotMill.io
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package co.aurasphere.botmill.fb.test.autoreply.template;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import co.aurasphere.botmill.core.BotBeanState;
import co.aurasphere.botmill.core.annotation.Bot;
import co.aurasphere.botmill.fb.FbBot;
import co.aurasphere.botmill.fb.api.MessengerProfileApi;
import co.aurasphere.botmill.fb.api.UploadApi;
import co.aurasphere.botmill.fb.autoreply.AutoReply;
import co.aurasphere.botmill.fb.autoreply.MessageAutoReply;
import co.aurasphere.botmill.fb.event.FbBotMillEventType;
import co.aurasphere.botmill.fb.model.annotation.FbBotMillController;
import co.aurasphere.botmill.fb.model.annotation.FbBotMillInit;
import co.aurasphere.botmill.fb.model.api.upload.UploadAttachmentResponse;
import co.aurasphere.botmill.fb.model.base.AttachmentType;
import co.aurasphere.botmill.fb.model.incoming.MessageEnvelope;
import co.aurasphere.botmill.fb.model.outcoming.FbBotMillResponse;
import co.aurasphere.botmill.fb.model.outcoming.action.TypingAction;
import co.aurasphere.botmill.fb.model.outcoming.factory.ButtonFactory;
import co.aurasphere.botmill.fb.model.outcoming.factory.ReplyFactory;
import co.aurasphere.botmill.fb.model.outcoming.template.button.Button;
import co.aurasphere.botmill.fb.model.outcoming.template.button.PaymentType;
import co.aurasphere.botmill.fb.model.outcoming.template.button.RequestedUserInfo;
/**
* The Class TemplateBehavior.
*/
@Bot(state=BotBeanState.PROTOTYPE)
public class AnnotatedTemplatedBehaviourTest extends FbBot {
/**
* Initialize.
*/
@FbBotMillInit
public void initialize() {
List<Button> buttons = new ArrayList<Button>();
buttons.add(ButtonFactory.createPostbackButton("Postback Button", "PPB Payload"));
buttons.add(ButtonFactory.createUrlButton("URL Button", "http://www.aurasphere.co"));
// Sets the thread settings.
MessengerProfileApi.setGetStartedButton("get_started");
MessengerProfileApi.setGreetingMessage("hello");
}
/**
* Gets the started.
*
* @return the started
*/
@FbBotMillController(eventType = FbBotMillEventType.POSTBACK, postback = "get_started")
public void getStarted(MessageEnvelope envelope) {
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
botMillSession().buildSession(envelope.getRecipient().getId());
return ReplyFactory.addTextMessageOnly("Hi!").build(envelope);
}
});
}
@FbBotMillController(eventType = FbBotMillEventType.IMAGE)
public void getImage(MessageEnvelope envelope) {
reply(new MessageAutoReply("image received"));
}
/**
* Catch all post back.
*/
// This method catches all the postback, saves it to a variable and execute
// the response.
@FbBotMillController(eventType = FbBotMillEventType.POSTBACK_PATTERN, postbackPattern = ".*.")
public void catchAllPostBack(MessageEnvelope envelope) {
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
botMillSession().addKeyValuePair("postback_option_value", envelope.getMessage().getText());
return null;
}
});
}
/**
* Catch text and reply.
*/
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "Hi!", caseSensitive = true)
public void catchTextAndReply(MessageEnvelope envelope) {
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
String greetingMessage = "Hey There! Ahoy ";
return ReplyFactory.addTextMessageOnly(greetingMessage).build(envelope);
}
});
}
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "Hi with Image!", caseSensitive = true)
public void catchTextAndReplyWithImage(MessageEnvelope envelope) {
//
UploadAttachmentResponse response = UploadApi
.uploadAttachment(
AttachmentType.IMAGE,
"http://vignette2.wikia.nocookie.net/nickelodeon/images/2/27/Spongebob_PNG.png/revision/latest?cb=20120702055752");
String attachmentId = response.getAttachmentId();
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
String greetingMessage = "Hey There! ";
return ReplyFactory.addTextMessageOnly(greetingMessage).build(envelope);
}
});
}
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "reuse image", caseSensitive = true)
public void replyWithImageAttachment(MessageEnvelope envelope) {
UploadAttachmentResponse response = UploadApi
.uploadAttachment(
AttachmentType.IMAGE,
"http://vignette2.wikia.nocookie.net/nickelodeon/images/2/27/Spongebob_PNG.png/revision/latest?cb=20120702055752");
final String attachmentId = response.getAttachmentId();
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.getReusableImageAttachment(attachmentId).build(envelope);
}
});
}
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "reuse video", caseSensitive = true)
public void replyWithVideoAttachment(MessageEnvelope envelope) {
UploadAttachmentResponse response = UploadApi
.uploadAttachment(
AttachmentType.VIDEO,
"http://www.engr.colostate.edu/me/facil/dynamics/files/drop.avi");
final String attachmentId = response.getAttachmentId();
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.getReusableVideoAttachment(attachmentId).build(envelope);
}
});
}
/**
* Reply text.
*/
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "text message", caseSensitive = true)
public void replyText(MessageEnvelope envelope) {
reply(new MessageAutoReply("simple text message"));
}
/**
* Initial greeting.
*/
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE_PATTERN, pattern = "(?i:hi)|(?i:hello)|(?i:hey)|(?i:good day)|(?i:home)")
public void initialGreeting(MessageEnvelope envelope) {
addReply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.addTypingAction(TypingAction.TYPING_ON).build(envelope);
}
});
addReply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
String greetingMessage = "Hey There! ";
return ReplyFactory.addTextMessageOnly(greetingMessage).build(envelope);
}
});
addReply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
String greetingMessage = "Hey There Again! ";
return ReplyFactory.addTextMessageOnly(greetingMessage).build(envelope);
}
});
executeReplies();
}
/**
* Reply with button template.
*
* @param envelope
* the envelope
*/
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "button template", caseSensitive = false)
public void replyWithButtonTemplate(MessageEnvelope envelope) {
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.addButtonTemplate("Test button template")
.addPostbackButton("postback button", "postback button payload")
.addPhoneNumberButton("phone number button", "+393541247844")
.addUrlButton("web url button", "https://github.com/BotMill/fb-botmill").build(envelope);
}
});
}
/**
* Reply with lis template.
*
* @param envelope
* the envelope
*/
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "list template", caseSensitive = false)
public void replyWithLisTemplate(MessageEnvelope envelope) {
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.addListTemplate().addElement("Classic T-Shirt Collection")
.setSubtitle("See all our colors")
.addButton(ButtonFactory.createUrlButton("View",
"https://peterssendreceiveapp.ngrok.io/collection"))
.setImageUrl("https://peterssendreceiveapp.ngrok.io/img/collection.png")
.setDefaultAction(ButtonFactory
.createDefaultActionButton("https://peterssendreceiveapp.ngrok.io/shop_collection"))
.endElement().addElement("Classic White T-Shirt").setSubtitle("100% Cotton, 200% Comfortable")
.addButton(ButtonFactory.createUrlButton("Shop Now",
"https://peterssendreceiveapp.ngrok.io/shop?item=100"))
.setImageUrl("https://peterssendreceiveapp.ngrok.io/img/white-t-shirt.png")
.setDefaultAction(ButtonFactory
.createDefaultActionButton("https://peterssendreceiveapp.ngrok.io/view?item=100"))
.endElement().addElement("Classic Blue T-Shirt").setSubtitle("100% Cotton, 200% Comfortable")
.addButton(ButtonFactory.createUrlButton("Shop Now",
"https://peterssendreceiveapp.ngrok.io/shop?item=101"))
.setImageUrl("https://peterssendreceiveapp.ngrok.io/img/blue-t-shirt.png")
.setDefaultAction(ButtonFactory
.createDefaultActionButton("https://peterssendreceiveapp.ngrok.io/view?item=101"))
.endElement().addElement("Classic Black T-Shirt").setSubtitle("100% Cotton, 200% Comfortable")
.addButton(ButtonFactory.createUrlButton("Shop Now",
"https://peterssendreceiveapp.ngrok.io/shop?item=102"))
.setImageUrl("https://peterssendreceiveapp.ngrok.io/img/black-t-shirt.png")
.setDefaultAction(ButtonFactory
.createDefaultActionButton("https://peterssendreceiveapp.ngrok.io/view?item=102"))
.endElement().addButton(ButtonFactory.createPostbackButton("View more", "view"))
.build(envelope);
}
});
}
/**
* Replywith quick replies.
*
* @param envelope
* the envelope
*/
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "quick replies", caseSensitive = false)
public void replywithQuickReplies(MessageEnvelope envelope) {
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.addTextMessageOnly("Text message with quick replies")
.addQuickReply("Quick reply 1", "Payload for quick reply 1").build(envelope);
}
}, new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.addTextMessageOnly("Text message with quick replies")
.addQuickReply("Quick reply 2", "Payload for quick reply 1").build(envelope);
}
}, new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.addTextMessageOnly("Text message with quick replies")
.addQuickReply("Quick reply 3", "Payload for quick reply 1").build(envelope);
}
});
}
/**
* Reply with receipt template.
*
* @param envelope
* the envelope
*/
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "receipt template", caseSensitive = true)
public void replyWithReceiptTemplate(MessageEnvelope envelope) {
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.addReceiptTemplate("Donato Rimenti", "15", "EUR", "Visa 1234")
.setSummary(new BigDecimal(201), new BigDecimal(10), new BigDecimal(13), new BigDecimal(240))
.setMerchantName("Aurasphere co").addElement("Element 1").setCurrency("EUR").setQuantity(29)
.setPrice(new BigDecimal(200)).setSubtitle("Element 1 subtitle").endElement()
.setTimestamp("1243").build(envelope);
}
});
}
@FbBotMillController(eventType = FbBotMillEventType.MESSAGE, text = "buy button", caseSensitive = true)
public void replyWithBuyButton(MessageEnvelope envelope) {
reply(new AutoReply() {
@Override
public FbBotMillResponse createResponse(MessageEnvelope envelope) {
return ReplyFactory.addGenericTemplate().addElement("A simple Button Template with a Buy Button")
.addButton(ButtonFactory.createBuyButton("buy_button_payload")
.setPaymentSummary("USD", PaymentType.FIXED_AMOUNT, "BotMill.io")
.addPriceLabel("A price label", "2").setTestPayment(true)
.addRequestedUserInfo(RequestedUserInfo.CONTACT_PHONE).build())
.endElement().build(envelope);
}
});
}
}
| |
/*
* Copyright (C) 2010 Google Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.appengine.datanucleus.jdo;
import com.google.appengine.api.datastore.Entity;
import com.google.appengine.api.datastore.EntityNotFoundException;
import com.google.appengine.api.datastore.KeyFactory;
import com.google.appengine.datanucleus.test.jdo.UnidirectionalOneToManySubclassesJDO.SubChild;
import com.google.appengine.datanucleus.test.jdo.UnidirectionalOneToManySubclassesJDO.SubParentWithSubChild;
import com.google.appengine.datanucleus.test.jdo.UnidirectionalOneToManySubclassesJDO.SubParentWithSuperChild;
import com.google.appengine.datanucleus.test.jdo.UnidirectionalOneToManySubclassesJDO.SuperChild;
import com.google.appengine.datanucleus.test.jdo.UnidirectionalOneToManySubclassesJDO.SuperParentWithSubChild;
import com.google.appengine.datanucleus.test.jdo.UnidirectionalOneToManySubclassesJDO.SuperParentWithSuperChild;
import java.util.Collections;
/**
* @author Max Ross <max.ross@gmail.com>
*/
public class JDOUnidirectionalOneToManySubclassTest extends JDOTestCase {
public void testSubParentWithSubChild() throws EntityNotFoundException {
// insertion
SubParentWithSubChild parent = new SubParentWithSubChild();
parent.setSuperParentString("super parent string");
parent.setSubParentString("sub parent string");
SubChild subChild = new SubChild();
subChild.setAString("a string");
subChild.setBString("b string");
parent.getSuperParentSubChildren().add(subChild);
beginTxn();
pm.makePersistent(parent);
commitTxn();
Entity parentEntity =
ds.get(KeyFactory.createKey(kindForClass(parent.getClass()), parent.getId()));
Entity superParentSubChildEntity = ds.get(subChild.getId());
assertEquals(3, parentEntity.getProperties().size());
assertEquals("super parent string", parentEntity.getProperty("superParentString"));
assertEquals("sub parent string", parentEntity.getProperty("subParentString"));
assertEquals(Collections.singletonList(superParentSubChildEntity.getKey()),
parentEntity.getProperty("subChildren"));
assertEquals(2, superParentSubChildEntity.getProperties().size());
assertEquals("a string", superParentSubChildEntity.getProperty("aString"));
assertEquals("b string", superParentSubChildEntity.getProperty("bString"));
// lookup
beginTxn();
parent = pm.getObjectById(parent.getClass(), parent.getId());
assertEquals("super parent string", parent.getSuperParentString());
assertEquals("sub parent string", parent.getSubParentString());
assertEquals(1, parent.getSuperParentSubChildren().size());
assertEquals(subChild.getId(), parent.getSuperParentSubChildren().get(0).getId());
commitTxn();
beginTxn();
subChild = pm.getObjectById(subChild.getClass(), subChild.getId());
assertEquals("a string", subChild.getAString());
assertEquals("b string", subChild.getBString());
commitTxn();
// cascade delete
beginTxn();
pm.deletePersistent(parent);
commitTxn();
assertEquals(0, countForClass(parent.getClass()));
assertEquals(0, countForClass(subChild.getClass()));
}
public void testSubParentWithSuperChild() throws EntityNotFoundException {
// insertion
SubParentWithSuperChild parent = new SubParentWithSuperChild();
parent.setSuperParentString("super parent string");
parent.setSubParentString("sub parent string");
SuperChild superChild = new SuperChild();
superChild.setAString("a string");
parent.getSuperParentSuperChildren().add(superChild);
beginTxn();
pm.makePersistent(parent);
commitTxn();
Entity parentEntity =
ds.get(KeyFactory.createKey(kindForClass(parent.getClass()), parent.getId()));
Entity superParentSuperChildEntity = ds.get(superChild.getId());
assertEquals(3, parentEntity.getProperties().size());
assertEquals("super parent string", parentEntity.getProperty("superParentString"));
assertEquals("sub parent string", parentEntity.getProperty("subParentString"));
assertEquals(Collections.singletonList(superParentSuperChildEntity.getKey()),
parentEntity.getProperty("superChildren"));
assertEquals(1, superParentSuperChildEntity.getProperties().size());
assertEquals("a string", superParentSuperChildEntity.getProperty("aString"));
// lookup
beginTxn();
parent = pm.getObjectById(parent.getClass(), parent.getId());
assertEquals("super parent string", parent.getSuperParentString());
assertEquals("sub parent string", parent.getSubParentString());
assertEquals(1, parent.getSuperParentSuperChildren().size());
assertEquals(superChild.getId(), parent.getSuperParentSuperChildren().get(0).getId());
commitTxn();
beginTxn();
superChild = pm.getObjectById(superChild.getClass(), superChild.getId());
assertEquals("a string", superChild.getAString());
commitTxn();
// cascade delete
beginTxn();
pm.deletePersistent(parent);
commitTxn();
assertEquals(0, countForClass(parent.getClass()));
assertEquals(0, countForClass(superChild.getClass()));
}
public void testSuperParentWithSuperChild() throws EntityNotFoundException {
// insertion
SuperParentWithSuperChild parent = new SuperParentWithSuperChild();
parent.setSuperParentString("super parent string");
SuperChild superChild = new SuperChild();
superChild.setAString("a string");
parent.getSuperParentSuperChildren().add(superChild);
beginTxn();
pm.makePersistent(parent);
commitTxn();
Entity parentEntity =
ds.get(KeyFactory.createKey(kindForClass(parent.getClass()), parent.getId()));
Entity superParentSuperChildEntity = ds.get(superChild.getId());
assertEquals(2, parentEntity.getProperties().size());
assertEquals("super parent string", parentEntity.getProperty("superParentString"));
assertEquals(Collections.singletonList(superParentSuperChildEntity.getKey()),
parentEntity.getProperty("superChildren"));
assertEquals(1, superParentSuperChildEntity.getProperties().size());
assertEquals("a string", superParentSuperChildEntity.getProperty("aString"));
// lookup
beginTxn();
parent = pm.getObjectById(parent.getClass(), parent.getId());
assertEquals("super parent string", parent.getSuperParentString());
assertEquals(1, parent.getSuperParentSuperChildren().size());
assertEquals(superChild.getId(), parent.getSuperParentSuperChildren().get(0).getId());
commitTxn();
beginTxn();
superChild = pm.getObjectById(superChild.getClass(), superChild.getId());
assertEquals("a string", superChild.getAString());
commitTxn();
// cascade delete
beginTxn();
pm.deletePersistent(parent);
commitTxn();
assertEquals(0, countForClass(parent.getClass()));
assertEquals(0, countForClass(superChild.getClass()));
}
public void testSuperParentWithSubChild() throws EntityNotFoundException {
// insertion
SuperParentWithSubChild parent = new SuperParentWithSubChild();
parent.setSuperParentString("super parent string");
SubChild subChild = new SubChild();
subChild.setAString("a string");
subChild.setBString("b string");
parent.getSuperParentSubChildren().add(subChild);
beginTxn();
pm.makePersistent(parent);
commitTxn();
Entity parentEntity =
ds.get(KeyFactory.createKey(kindForClass(parent.getClass()), parent.getId()));
Entity superParentSubChildEntity = ds.get(subChild.getId());
assertEquals(2, parentEntity.getProperties().size());
assertEquals("super parent string", parentEntity.getProperty("superParentString"));
assertEquals(Collections.singletonList(superParentSubChildEntity.getKey()),
parentEntity.getProperty("subChildren"));
assertEquals(2, superParentSubChildEntity.getProperties().size());
assertEquals("a string", superParentSubChildEntity.getProperty("aString"));
assertEquals("b string", superParentSubChildEntity.getProperty("bString"));
// lookup
beginTxn();
parent = pm.getObjectById(parent.getClass(), parent.getId());
assertEquals("super parent string", parent.getSuperParentString());
assertEquals(1, parent.getSuperParentSubChildren().size());
assertEquals(subChild.getId(), parent.getSuperParentSubChildren().get(0).getId());
commitTxn();
beginTxn();
subChild = pm.getObjectById(subChild.getClass(), subChild.getId());
assertEquals("a string", subChild.getAString());
assertEquals("b string", subChild.getBString());
commitTxn();
// cascade delete
beginTxn();
pm.deletePersistent(parent);
commitTxn();
assertEquals(0, countForClass(parent.getClass()));
assertEquals(0, countForClass(subChild.getClass()));
}
public void testWrongChildType() {
SuperParentWithSuperChild parent = new SuperParentWithSuperChild();
parent.setSuperParentString("a string");
SubChild child = new SubChild();
parent.getSuperParentSuperChildren().add(child);
beginTxn();
try {
pm.makePersistent(parent);
fail("expected exception");
} catch (UnsupportedOperationException uoe) {
// good
}
rollbackTxn();
}
public void testWrongChildType_Update() throws InstantiationException, IllegalAccessException {
// need a non-txn datasource so we can access multiple entity groups
switchDatasource(PersistenceManagerFactoryName.nontransactional);
SubChild child = SubChild.class.newInstance();
SuperParentWithSuperChild parent = new SuperParentWithSuperChild();
parent.setSuperParentString("a string");
beginTxn();
pm.makePersistent(parent);
commitTxn();
parent = pm.getObjectById(parent.getClass(), parent.getId());
try {
parent.getSuperParentSuperChildren().add(child);
pm.close();
fail("expected exception");
} catch (UnsupportedOperationException uoe) {
// good
}
}
}
| |
/**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.equity.variance.pricing;
import static com.opengamma.analytics.financial.model.volatility.smile.fitting.interpolation.SurfaceArrayUtils.getLowerBoundIndex;
import org.apache.commons.lang.ObjectUtils;
import com.opengamma.analytics.financial.model.interestrate.curve.YieldAndDiscountCurve;
import com.opengamma.analytics.math.function.Function1D;
import com.opengamma.util.ArgumentChecker;
/**
* Contains forward, interest rate and dividend information.
*/
public class EquityDividendsCurvesBundle {
/** The forward curve function */
private final Function1D<Double, Double> _f;
/** The growth factor curve function */
private final Function1D<Double, Double> _r;
/** The growth factor discounted cash dividends curve function */
private final Function1D<Double, Double> _d;
/**
* @param spot The spot, greater than zero
* @param discountCurve The discount curve, not null
* @param dividends The dividends, not null
*/
public EquityDividendsCurvesBundle(final double spot, final YieldAndDiscountCurve discountCurve, final AffineDividends dividends) {
ArgumentChecker.isTrue(spot > 0, "Negative spot. S_0 = {}", spot);
ArgumentChecker.notNull(discountCurve, "null discount curve");
ArgumentChecker.notNull(dividends, "null dividends");
_r = getGrowthFactorCurve(discountCurve, dividends);
_d = getDiscountedCashDividendsCurve(_r, dividends);
_f = getForwardCurve(spot, _r, dividends);
}
/**
* Gets the forward curve.
* @return the forward curve
*/
public Function1D<Double, Double> getF() {
return _f;
}
/**
* Gets the growth factor curve.
* @return the growth factor curve
*/
public Function1D<Double, Double> getR() {
return _r;
}
/**
* Gets the growth factor discounted cash dividends curve.
* @return the growth factor discounted cash dividends curve
*/
public Function1D<Double, Double> getD() {
return _d;
}
/**
* Gets the forward value.
* @param t time
* @return the forward value
*/
public double getF(final double t) {
return _f.evaluate(t);
}
/**
* Gets the Growth Factor value.
* @param t time
* @return the growth factor value
*/
public double getR(final double t) {
return _r.evaluate(t);
}
/**
* Gets the growth factor discounted cash dividends value.
* @param t time
* @return the growth factor discounted cash dividends value
*/
public double getD(final double t) {
return _d.evaluate(t);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + _d.hashCode();
result = prime * result + _f.hashCode();
result = prime * result + _r.hashCode();
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final EquityDividendsCurvesBundle other = (EquityDividendsCurvesBundle) obj;
if (!ObjectUtils.equals(_d, other._d)) {
return false;
}
if (!ObjectUtils.equals(_f, other._f)) {
return false;
}
if (!ObjectUtils.equals(_r, other._r)) {
return false;
}
return true;
}
private static Function1D<Double, Double> getForwardCurve(final double spot, final Function1D<Double, Double> growthFactorCurve,
final AffineDividends dividends) {
if (dividends.getNumberOfDividends() == 0) {
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double t) {
return spot * growthFactorCurve.evaluate(t);
}
};
}
final int n = dividends.getNumberOfDividends();
final double[] accum = new double[n];
double sum = dividends.getAlpha(0) / growthFactorCurve.evaluate(dividends.getTau(0));
accum[0] = sum;
for (int i = 1; i < n; i++) {
sum += dividends.getAlpha(i) / growthFactorCurve.evaluate(dividends.getTau(i));
accum[i] = sum;
}
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double t) {
if (t < dividends.getTau(0)) {
return spot * growthFactorCurve.evaluate(t);
}
final int index = getLowerBoundIndex(dividends.getTau(), t);
final double total = accum[index];
return growthFactorCurve.evaluate(t) * (spot - total);
}
};
}
private static Function1D<Double, Double> getDiscountedCashDividendsCurve(final Function1D<Double, Double> growthFactorCurve,
final AffineDividends dividends) {
if (dividends.getNumberOfDividends() == 0) {
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double t) {
return 0.0;
}
};
}
final int n = dividends.getNumberOfDividends();
final double[] accum = new double[n];
double sum = dividends.getAlpha(n - 1) / growthFactorCurve.evaluate(dividends.getTau(n - 1));
accum[n - 1] = sum;
for (int i = n - 2; i >= 0; i--) {
sum += dividends.getAlpha(i) / growthFactorCurve.evaluate(dividends.getTau(i));
accum[i] = sum;
}
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double t) {
if (t >= dividends.getTau(n - 1)) {
return 0.0;
}
if (t < dividends.getTau(0)) {
return growthFactorCurve.evaluate(t) * accum[0];
}
final int index = getLowerBoundIndex(dividends.getTau(), t) + 1;
final double total = accum[index];
return growthFactorCurve.evaluate(t) * total;
}
};
}
private static Function1D<Double, Double> getGrowthFactorCurve(final YieldAndDiscountCurve discCurve, final AffineDividends dividends) {
if (dividends.getNumberOfDividends() == 0) {
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double t) {
return 1.0 / discCurve.getDiscountFactor(t);
}
};
}
final int n = dividends.getNumberOfDividends();
final double[] accum = new double[n];
double prod = 1 - dividends.getBeta(0);
accum[0] = prod;
for (int i = 1; i < n; i++) {
prod *= 1 - dividends.getBeta(i);
accum[i] = prod;
}
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double t) {
if (t < dividends.getTau(0)) {
return 1.0 / discCurve.getDiscountFactor(t);
}
final int index = getLowerBoundIndex(dividends.getTau(), t);
final double total = accum[index];
return total / discCurve.getDiscountFactor(t);
}
};
}
}
| |
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.transition;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import android.content.Context;
import android.graphics.Color;
import android.view.Gravity;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import androidx.test.filters.LargeTest;
import androidx.test.platform.app.InstrumentationRegistry;
import org.hamcrest.Description;
import org.hamcrest.Matcher;
import org.hamcrest.TypeSafeMatcher;
import org.junit.Before;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
@LargeTest
public class ExplodeTest extends BaseTransitionTest {
private TranslationView mRedSquare;
private TranslationView mGreenSquare;
private TranslationView mBlueSquare;
private TranslationView mYellowSquare;
@Override
Transition createTransition() {
return new Explode();
}
@Before
public void prepareViews() throws Throwable {
final Context context = rule.getActivity();
mRedSquare = new TranslationView(context);
mGreenSquare = new TranslationView(context);
mBlueSquare = new TranslationView(context);
mYellowSquare = new TranslationView(context);
rule.runOnUiThread(new Runnable() {
@Override
public void run() {
final FrameLayout frame = new FrameLayout(rule.getActivity());
mRedSquare.setBackgroundColor(Color.RED);
frame.addView(mRedSquare,
new FrameLayout.LayoutParams(100, 100, Gravity.LEFT | Gravity.TOP));
mGreenSquare.setBackgroundColor(Color.GREEN);
frame.addView(mGreenSquare,
new FrameLayout.LayoutParams(100, 100, Gravity.RIGHT | Gravity.TOP));
mBlueSquare.setBackgroundColor(Color.BLUE);
frame.addView(mBlueSquare,
new FrameLayout.LayoutParams(100, 100, Gravity.RIGHT | Gravity.BOTTOM));
mYellowSquare.setBackgroundColor(Color.YELLOW);
frame.addView(mYellowSquare,
new FrameLayout.LayoutParams(100, 100, Gravity.LEFT | Gravity.BOTTOM));
mRoot.addView(frame,
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.MATCH_PARENT);
}
});
}
@Test
public void testExplode() throws Throwable {
rule.runOnUiThread(new Runnable() {
@Override
public void run() {
TransitionManager.beginDelayedTransition(mRoot, mTransition);
mRedSquare.setVisibility(View.INVISIBLE);
mGreenSquare.setVisibility(View.INVISIBLE);
mBlueSquare.setVisibility(View.INVISIBLE);
mYellowSquare.setVisibility(View.INVISIBLE);
}
});
waitForStart();
assertThat(mRedSquare, hasVisibility(View.VISIBLE));
assertThat(mGreenSquare, hasVisibility(View.VISIBLE));
assertThat(mBlueSquare, hasVisibility(View.VISIBLE));
assertThat(mYellowSquare, hasVisibility(View.VISIBLE));
waitForEnd();
assertThat(mRedSquare.getHistoricalTranslationX(), is(decreasing()));
assertThat(mRedSquare.getHistoricalTranslationY(), is(decreasing()));
assertThat(mGreenSquare.getHistoricalTranslationX(), is(increasing()));
assertThat(mGreenSquare.getHistoricalTranslationY(), is(decreasing()));
assertThat(mBlueSquare.getHistoricalTranslationX(), is(increasing()));
assertThat(mBlueSquare.getHistoricalTranslationY(), is(increasing()));
assertThat(mYellowSquare.getHistoricalTranslationX(), is(decreasing()));
assertThat(mYellowSquare.getHistoricalTranslationY(), is(increasing()));
assertThat(mRedSquare, allOf(hasVisibility(View.INVISIBLE), hasNoTranslations()));
assertThat(mGreenSquare, allOf(hasVisibility(View.INVISIBLE), hasNoTranslations()));
assertThat(mBlueSquare, allOf(hasVisibility(View.INVISIBLE), hasNoTranslations()));
assertThat(mYellowSquare, allOf(hasVisibility(View.INVISIBLE), hasNoTranslations()));
}
@Test
public void testImplode() throws Throwable {
rule.runOnUiThread(new Runnable() {
@Override
public void run() {
mRedSquare.setVisibility(View.INVISIBLE);
mGreenSquare.setVisibility(View.INVISIBLE);
mBlueSquare.setVisibility(View.INVISIBLE);
mYellowSquare.setVisibility(View.INVISIBLE);
}
});
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
rule.runOnUiThread(new Runnable() {
@Override
public void run() {
TransitionManager.beginDelayedTransition(mRoot, mTransition);
mRedSquare.setVisibility(View.VISIBLE);
mGreenSquare.setVisibility(View.VISIBLE);
mBlueSquare.setVisibility(View.VISIBLE);
mYellowSquare.setVisibility(View.VISIBLE);
}
});
waitForStart();
assertThat(mRedSquare, hasVisibility(View.VISIBLE));
assertThat(mGreenSquare, hasVisibility(View.VISIBLE));
assertThat(mBlueSquare, hasVisibility(View.VISIBLE));
assertThat(mYellowSquare, hasVisibility(View.VISIBLE));
waitForEnd();
assertThat(mRedSquare.getHistoricalTranslationX(), is(increasing()));
assertThat(mRedSquare.getHistoricalTranslationY(), is(increasing()));
assertThat(mGreenSquare.getHistoricalTranslationX(), is(decreasing()));
assertThat(mGreenSquare.getHistoricalTranslationY(), is(increasing()));
assertThat(mBlueSquare.getHistoricalTranslationX(), is(decreasing()));
assertThat(mBlueSquare.getHistoricalTranslationY(), is(decreasing()));
assertThat(mYellowSquare.getHistoricalTranslationX(), is(increasing()));
assertThat(mYellowSquare.getHistoricalTranslationY(), is(decreasing()));
assertThat(mRedSquare, allOf(hasVisibility(View.VISIBLE), hasNoTranslations()));
assertThat(mGreenSquare, allOf(hasVisibility(View.VISIBLE), hasNoTranslations()));
assertThat(mBlueSquare, allOf(hasVisibility(View.VISIBLE), hasNoTranslations()));
assertThat(mYellowSquare, allOf(hasVisibility(View.VISIBLE), hasNoTranslations()));
}
@Test
public void precondition() {
assertThat(Arrays.asList(1f, 2f, 3f), is(increasing()));
assertThat(Arrays.asList(3f, 2f, 1f), is(decreasing()));
assertThat(Arrays.asList(1f, 9f, 3f), is(not(increasing())));
assertThat(Arrays.asList(1f, 9f, 3f), is(not(decreasing())));
}
private Matcher<View> hasVisibility(final int visibility) {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("has visibility " + visibility);
}
@Override
protected boolean matchesSafely(View item) {
return item.getVisibility() == visibility;
}
};
}
private Matcher<View> hasNoTranslations() {
return new TypeSafeMatcher<View>() {
@Override
public void describeTo(Description description) {
description.appendText("has no translationX or Y");
}
@Override
protected boolean matchesSafely(View item) {
return item.getTranslationX() == 0f && item.getTranslationY() == 0f;
}
};
}
private ListOrderingMatcher increasing() {
return new ListOrderingMatcher(false);
}
private ListOrderingMatcher decreasing() {
return new ListOrderingMatcher(true);
}
private static class ListOrderingMatcher extends TypeSafeMatcher<List<Float>> {
private final boolean mDesc;
ListOrderingMatcher(boolean desc) {
mDesc = desc;
}
@Override
public void describeTo(Description description) {
description.appendText("List is ordered");
}
@Override
protected boolean matchesSafely(List<Float> item) {
for (int i = 0, max = item.size() - 1; i < max; i++) {
if (mDesc) {
if (item.get(i) < item.get(i + 1)) {
return false;
}
} else {
if (item.get(i) > item.get(i + 1)) {
return false;
}
}
}
return true;
}
}
}
| |
//
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license.
//
// Project Oxford: http://ProjectOxford.ai
//
// ProjectOxford SDK Github:
// https://github.com/Microsoft/ProjectOxfordSDK-Windows
//
// Copyright (c) Microsoft Corporation
// All rights reserved.
//
// MIT License:
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED ""AS IS"", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
package com.microsoft.AzureIntelligentServicesExample;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import com.microsoft.projectoxford.speechrecognition.Contract;
import com.microsoft.projectoxford.speechrecognition.DataRecognitionClient;
import com.microsoft.projectoxford.speechrecognition.DataRecognitionClientWithIntent;
import com.microsoft.projectoxford.speechrecognition.ISpeechRecognitionServerEvents;
import com.microsoft.projectoxford.speechrecognition.MicrophoneRecognitionClient;
import com.microsoft.projectoxford.speechrecognition.MicrophoneRecognitionClientWithIntent;
import com.microsoft.projectoxford.speechrecognition.RecognitionResult;
import com.microsoft.projectoxford.speechrecognition.RecognitionStatus;
import com.microsoft.projectoxford.speechrecognition.SpeechRecognitionMode;
import com.microsoft.projectoxford.speechrecognition.SpeechRecognitionServiceFactory;
import java.io.IOException;
import java.io.InputStream;
import java.util.concurrent.TimeUnit;
public class MainActivity extends Activity implements ISpeechRecognitionServerEvents
{
int m_waitSeconds = 0;
DataRecognitionClient m_dataClient = null;
MicrophoneRecognitionClient m_micClient = null;
boolean m_isMicrophoneReco;
SpeechRecognitionMode m_recoMode;
boolean m_isIntent;
FinalResponseStatus isReceivedResponse = FinalResponseStatus.NotReceived;
public enum FinalResponseStatus { NotReceived, OK, Timeout }
public void onPartialResponseReceived(final String response)
{
EditText myEditText = (EditText) findViewById(R.id.editText1);
myEditText.append("********* Partial Result *********\n");
myEditText.append(response + "\n");
}
public void onFinalResponseReceived(final RecognitionResult response)
{
boolean isFinalDicationMessage = m_recoMode == SpeechRecognitionMode.LongDictation &&
(response.RecognitionStatus == RecognitionStatus.EndOfDictation ||
response.RecognitionStatus == RecognitionStatus.DictationEndSilenceTimeout);
if (m_isMicrophoneReco && ((m_recoMode == SpeechRecognitionMode.ShortPhrase) || isFinalDicationMessage)) {
// we got the final result, so it we can end the mic reco. No need to do this
// for dataReco, since we already called endAudio() on it as soon as we were done
// sending all the data.
m_micClient.endMicAndRecognition();
}
if ((m_recoMode == SpeechRecognitionMode.ShortPhrase) || isFinalDicationMessage) {
Button startButton = (Button) findViewById(R.id.button1);
startButton.setEnabled(true);
this.isReceivedResponse = FinalResponseStatus.OK;
}
if (!isFinalDicationMessage) {
EditText myEditText = (EditText) findViewById(R.id.editText1);
myEditText.append("***** Final NBEST Results *****\n");
for (int i = 0; i < response.Results.length; i++) {
myEditText.append(i + " Confidence=" + response.Results[i].Confidence +
" Text=\"" + response.Results[i].DisplayText + "\"\n");
}
}
}
/**
* Called when a final response is received and its intent is parsed
*/
public void onIntentReceived(final String payload)
{
EditText myEditText = (EditText) findViewById(R.id.editText1);
myEditText.append("********* Final Intent *********\n");
myEditText.append(payload + "\n");
}
public void onError(final int errorCode, final String response)
{
Button startButton = (Button) findViewById(R.id.button1);
startButton.setEnabled(true);
EditText myEditText = (EditText) findViewById(R.id.editText1);
myEditText.append("********* Error Detected *********\n");
myEditText.append(errorCode + " " + response + "\n");
}
/**
* Invoked when the audio recording state has changed.
*
* @param recording The current recording state
*/
public void onAudioEvent(boolean recording)
{
if (!recording) {
m_micClient.endMicAndRecognition();
Button startButton = (Button) findViewById(R.id.button1);
startButton.setEnabled(true);
}
EditText myEditText = (EditText) findViewById(R.id.editText1);
myEditText.append("********* Microphone status: " + recording + " *********\n");
}
/**
* Speech recognition with data (for example from a file or audio source).
* The data is broken up into buffers and each buffer is sent to the Speech Recognition Service.
* No modification is done to the buffers, so the user can apply their
* own VAD (Voice Activation Detection) or Silence Detection
*
* @param dataClient
* @param recoMode
* @param filename
*/
private class RecognitionTask extends AsyncTask<Void, Void, Void> {
DataRecognitionClient dataClient;
SpeechRecognitionMode recoMode;
String filename;
RecognitionTask(DataRecognitionClient dataClient, SpeechRecognitionMode recoMode, String filename) {
this.dataClient = dataClient;
this.recoMode = recoMode;
this.filename = filename;
}
@Override
protected Void doInBackground(Void... params) {
try {
// Note for wave files, we can just send data from the file right to the server.
// In the case you are not an audio file in wave format, and instead you have just
// raw data (for example audio coming over bluetooth), then before sending up any
// audio data, you must first send up an SpeechAudioFormat descriptor to describe
// the layout and format of your raw audio data via DataRecognitionClient's sendAudioFormat() method.
// String filename = recoMode == SpeechRecognitionMode.ShortPhrase ? "whatstheweatherlike.wav" : "batman.wav";
InputStream fileStream = getAssets().open(filename);
int bytesRead = 0;
byte[] buffer = new byte[1024];
do {
// Get Audio data to send into byte buffer.
bytesRead = fileStream.read(buffer);
if (bytesRead > -1) {
// Send of audio data to service.
dataClient.sendAudio(buffer, bytesRead);
}
} while (bytesRead > 0);
}
catch(IOException ex) {
Contract.fail();
}
finally {
dataClient.endAudio();
}
return null;
}
}
void initializeRecoClient()
{
String language = "en-us";
String subscriptionKey = this.getString(R.string.subscription_key);
String luisAppID = this.getString(R.string.luisAppID);
String luisSubscriptionID = this.getString(R.string.luisSubscriptionID);
if (m_isMicrophoneReco && null == m_micClient) {
if (!m_isIntent) {
m_micClient = SpeechRecognitionServiceFactory.createMicrophoneClient(this,
m_recoMode,
language,
this,
subscriptionKey);
}
else {
MicrophoneRecognitionClientWithIntent intentMicClient;
intentMicClient = SpeechRecognitionServiceFactory.createMicrophoneClientWithIntent(this,
language,
this,
subscriptionKey,
luisAppID,
luisSubscriptionID);
m_micClient = intentMicClient;
}
}
else if (!m_isMicrophoneReco && null == m_dataClient) {
if (!m_isIntent) {
m_dataClient = SpeechRecognitionServiceFactory.createDataClient(this,
m_recoMode,
language,
this,
subscriptionKey);
}
else {
DataRecognitionClientWithIntent intentDataClient;
intentDataClient = SpeechRecognitionServiceFactory.createDataClientWithIntent(this,
language,
this,
subscriptionKey,
luisAppID,
luisSubscriptionID);
m_dataClient = intentDataClient;
}
}
}
void addListenerOnButton()
{
final Button startButton = (Button) findViewById(R.id.button1);
startButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0)
{
EditText myEditText = (EditText) findViewById(R.id.editText1);
myEditText.setText("");
startButton.setEnabled(false);
if (m_isMicrophoneReco) {
// Speech recognition from the microphone. The microphone is turned on and data from the microphone
// is sent to the Speech Recognition Service. A built in Silence Detector
// is applied to the microphone data before it is sent to the recognition service.
m_micClient.startMicAndRecognition();
}
else {
String filename = m_recoMode == SpeechRecognitionMode.ShortPhrase ? "whatstheweatherlike.wav" : "batman.wav";
RecognitionTask doDataReco = new RecognitionTask(m_dataClient, m_recoMode, filename);
try
{
doDataReco.execute().get(m_waitSeconds, TimeUnit.SECONDS);
}
catch (Exception e)
{
doDataReco.cancel(true);
isReceivedResponse = FinalResponseStatus.Timeout;
}
}
}
});
final Context appContext = this;
Button finalResponseButton = (Button) findViewById(R.id.button2);
finalResponseButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0)
{
AlertDialog alertDialog;
alertDialog = new AlertDialog.Builder(appContext).create();
alertDialog.setTitle("Final Response");
EditText myEditText = (EditText) findViewById(R.id.editText1);
if (m_micClient != null) {
while(isReceivedResponse == FinalResponseStatus.NotReceived) {}
m_micClient.endMicAndRecognition();
String msg = isReceivedResponse == FinalResponseStatus.OK ? "See TextBox below for response. App Done" : "Timed out. App Done";
alertDialog.setMessage(msg);
startButton.setEnabled(false);
try {
m_micClient.finalize();
} catch (Throwable e) {
myEditText.append(e + "\n");
}
}
else if (m_dataClient != null) {
String msg = isReceivedResponse == FinalResponseStatus.OK ? "See TextBox below for response. App Done" : "Timed out. App Done";
alertDialog.setMessage(msg);
startButton.setEnabled(false);
try {
m_dataClient.finalize();
} catch (Throwable e) {
myEditText.append(e + "\n");
}
}
else {
alertDialog.setMessage("Press Start first please!");
}
alertDialog.show();
}
});
}
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (getString(R.string.subscription_key).startsWith("Please")) {
new AlertDialog.Builder(this)
.setTitle(getString(R.string.add_subscription_key_tip_title))
.setMessage(getString(R.string.add_subscription_key_tip))
.setCancelable(false)
.show();
}
// Set the mode and microphone flag to your liking
// m_recoMode can be SpeechRecognitionMode.ShortPhrase or SpeechRecognitionMode.LongDictation
m_recoMode = SpeechRecognitionMode.ShortPhrase;
m_isMicrophoneReco = true;
m_isIntent = false;
m_waitSeconds = m_recoMode == SpeechRecognitionMode.ShortPhrase ? 20 : 200;
initializeRecoClient();
// setup the buttons
addListenerOnButton();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
| |
/**
*
*/
package co.gptech.mmlapplication.common.model;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.EmbeddedId;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import co.gptech.mmlapplication.common.model.pk.PersonPk;
/**
* The persistent class for the <code>MML_PERSONS</code> database table.<br>
* <br>
* Table of persons in the application.
*
* @since mml-application-1.0
* @version 1.0 - <code>Person.java</code> Mar 26, 2014 9:39:34 PM
* @author Jonathan Garzón Peña - jgarzon@gptech.com.co
*
*/
@Entity
@Table(name = "MML_PERSONS")
public class Person implements Serializable {
/**
*
*/
private static final long serialVersionUID = -4886538121506337089L;
/**
* Table field ID
*/
@EmbeddedId
private PersonPk id;
/**
* Table field persName
*/
@Column(name = "persName", length = 30)
private String firstName;
/**
* Table field persSname
*/
@Column(name = "persSname", length = 30)
private String middleName;
/**
* Table field persLastName
*/
@Column(name = "persLastName", length = 30)
private String lastName;
/**
* Table field persSlastName
*/
@Column(name = "persSlastName", length = 30)
private String secondLastName;
/**
* Table field persBirthDate
*/
@Temporal(TemporalType.DATE)
@Column(name = "persBirthDate")
private Date birthDate;
/**
* Table field persEmail
*/
@Column(name = "persEmail", length = 100)
private String email;
/**
* Table field persSex
*/
@Column(name = "persSex", length = 1)
private String sex;
/**
* Table field persType
*/
@Column(name = "persType", length = 1)
private String type;
/**
* Table field persPassword
*/
@Column(name = "persPassword", length = 255)
private String password;
/**
* Table field ustyId
*/
@ManyToOne
@JoinColumn(name = "ustyId")
private UserType userType;
/**
* Table field persRegisterDate
*/
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "persRegisterDate", insertable = false)
private Date registerDate;
/**
*
*/
public Person() {
}
/**
* @param id
*/
public Person(PersonPk id) {
this.id = id;
}
/**
* @return the id
*/
public PersonPk getId() {
return id;
}
/**
* @param id
* the id to set
*/
public void setId(PersonPk id) {
this.id = id;
}
/**
* @return the firstName
*/
public String getFirstName() {
return firstName;
}
/**
* @param firstName
* the firstName to set
*/
public void setFirstName(String firstName) {
this.firstName = firstName;
}
/**
* @return the middleName
*/
public String getMiddleName() {
return middleName;
}
/**
* @param middleName
* the middleName to set
*/
public void setMiddleName(String middleName) {
this.middleName = middleName;
}
/**
* @return the lastName
*/
public String getLastName() {
return lastName;
}
/**
* @param lastName
* the lastName to set
*/
public void setLastName(String lastName) {
this.lastName = lastName;
}
/**
* @return the secondLastName
*/
public String getSecondLastName() {
return secondLastName;
}
/**
* @param secondLastName
* the secondLastName to set
*/
public void setSecondLastName(String secondLastName) {
this.secondLastName = secondLastName;
}
/**
* @return the birthDate
*/
public Date getBirthDate() {
return birthDate;
}
/**
* @param birthDate
* the birthDate to set
*/
public void setBirthDate(Date birthDate) {
this.birthDate = birthDate;
}
/**
* @return the email
*/
public String getEmail() {
return email;
}
/**
* @param email
* the email to set
*/
public void setEmail(String email) {
this.email = email;
}
/**
* @return the sex
*/
public String getSex() {
return sex;
}
/**
* @param sex
* the sex to set
*/
public void setSex(String sex) {
this.sex = sex;
}
/**
* @return the type
*/
public String getType() {
return type;
}
/**
* @param type
* the type to set
*/
public void setType(String type) {
this.type = type;
}
/**
* @return the password
*/
public String getPassword() {
return password;
}
/**
* @param password
* the password to set
*/
public void setPassword(String password) {
this.password = password;
}
/**
* @return the userType
*/
public UserType getUserType() {
return userType;
}
/**
* @param userType
* the userType to set
*/
public void setUserType(UserType userType) {
this.userType = userType;
}
/**
* @return the registerDate
*/
public Date getRegisterDate() {
return registerDate;
}
/**
* @param registerDate
* the registerDate to set
*/
public void setRegisterDate(Date registerDate) {
this.registerDate = registerDate;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((id == null) ? 0 : id.hashCode());
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null || !(obj instanceof Person)) {
return false;
}
Person other = (Person) obj;
if ((this.id == null && other.id != null) ||
(this.id != null && !this.id.equals(other.id))) {
return false;
}
return true;
}
/* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder string = new StringBuilder();
string.append("Person: {").append(id).append("}");
return string.toString();
}
}
| |
/**
* Copyright (C) 2012-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ninja.cache;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamClass;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import net.spy.memcached.AddrUtil;
import net.spy.memcached.ConnectionFactory;
import net.spy.memcached.ConnectionFactoryBuilder;
import net.spy.memcached.MemcachedClient;
import net.spy.memcached.auth.AuthDescriptor;
import net.spy.memcached.auth.PlainCallbackHandler;
import net.spy.memcached.transcoders.SerializingTranscoder;
import ninja.lifecycle.Dispose;
import ninja.utils.NinjaConstant;
import ninja.utils.NinjaProperties;
import org.slf4j.Logger;
import com.google.inject.Inject;
import com.google.inject.Singleton;
/**
* Memcached implementation (using http://code.google.com/p/spymemcached/)
* expiration is specified in seconds.
*
* Heavily inspired by excellent Play! 1.2.5 implementation.
*/
@Singleton
public class CacheMemcachedImpl implements Cache {
private final Logger logger;
private final MemcachedClient client;
private final SerializingTranscoder tc;
private final NinjaProperties ninjaProperties;
@Inject
public CacheMemcachedImpl(
final Logger logger,
final NinjaProperties ninjaProperties) throws Exception {
this.logger = logger;
this.ninjaProperties = ninjaProperties;
this.tc = new SerializingTranscoder() {
@Override
protected Object deserialize(byte[] data) {
try {
return new ObjectInputStream(new ByteArrayInputStream(data)) {
@Override
protected Class<?> resolveClass(ObjectStreamClass desc)
throws IOException, ClassNotFoundException {
return Class.forName(desc.getName(), false, Thread.currentThread().getContextClassLoader());
}
}.readObject();
} catch (Exception e) {
logger.error("Could not deserialize", e);
}
return null;
}
@Override
protected byte[] serialize(Object object) {
try {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
new ObjectOutputStream(bos).writeObject(object);
return bos.toByteArray();
} catch (IOException e) {
logger.error("Could not serialize", e);
}
return null;
}
};
System.setProperty("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.Log4JLogger");
List<InetSocketAddress> addrs;
String allMemcachedHosts = ninjaProperties.getOrDie(NinjaConstant.MEMCACHED_HOST);
addrs = AddrUtil.getAddresses(allMemcachedHosts);
String memcachedUser = ninjaProperties.get(NinjaConstant.MEMCACHED_USER);
if (memcachedUser != null) {
String memcachePassword = ninjaProperties.getOrDie(NinjaConstant.MEMCACHED_PASSWORD);
// Use plain SASL to connect to memcached
AuthDescriptor ad = new AuthDescriptor(new String[]{"PLAIN"},
new PlainCallbackHandler(memcachedUser, memcachePassword));
ConnectionFactory cf = new ConnectionFactoryBuilder()
.setProtocol(ConnectionFactoryBuilder.Protocol.BINARY)
.setAuthDescriptor(ad)
.build();
client = new MemcachedClient(cf, addrs);
} else {
client = new MemcachedClient(addrs);
}
}
public void add(String key, Object value, int expiration) {
client.add(key, expiration, value, tc);
}
public Object get(String key) {
Future<Object> future = client.asyncGet(key, tc);
try {
return future.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(false);
}
return null;
}
public void clear() {
client.flush();
}
public void delete(String key) {
client.delete(key);
}
public Map<String, Object> get(String[] keys) {
Future<Map<String, Object>> future = client.asyncGetBulk(tc, keys);
try {
return future.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(false);
}
return Collections.<String, Object>emptyMap();
}
public long incr(String key, int by) {
return client.incr(key, by, 0);
}
public long decr(String key, int by) {
return client.decr(key, by, 0);
}
public void replace(String key, Object value, int expiration) {
client.replace(key, expiration, value, tc);
}
public boolean safeAdd(String key, Object value, int expiration) {
Future<Boolean> future = client.add(key, expiration, value, tc);
try {
return future.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(false);
}
return false;
}
public boolean safeDelete(String key) {
Future<Boolean> future = client.delete(key);
try {
return future.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(false);
}
return false;
}
public boolean safeReplace(String key, Object value, int expiration) {
Future<Boolean> future = client.replace(key, expiration, value, tc);
try {
return future.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(false);
}
return false;
}
public boolean safeSet(String key, Object value, int expiration) {
Future<Boolean> future = client.set(key, expiration, value, tc);
try {
return future.get(1, TimeUnit.SECONDS);
} catch (Exception e) {
future.cancel(false);
}
return false;
}
public void set(String key, Object value, int expiration) {
client.set(key, expiration, value, tc);
}
@Dispose
public void stop() {
client.shutdown();
}
}
| |
/*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.web.controller;
import static org.hamcrest.Matchers.hasKey;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import com.navercorp.pinpoint.web.dao.UserDao;
import com.navercorp.pinpoint.web.vo.User;
/**
* @author minwoo.jung
*/
@Ignore
@RunWith(SpringJUnit4ClassRunner.class)
@WebAppConfiguration
@ContextConfiguration(locations = {"classpath:servlet-context.xml", "classpath:applicationContext-web.xml"})
public class UserControllerTest {
private final static String USER_ID = "naver00";
private final static String USER_NAME = "minwoo";
private final static String USER_NAME_UPDATED = "minwoo.jung";
private final static String USER_DEPARTMENT = "Web platfrom development team";
private final static String USER_PHONENUMBER = "01012347890";
private final static String USER_PHONENUMBER_UPDATED = "01000000000";
private final static String USER_EMAIL = "min@naver.com";
private final static String USER_EMAIL_UPDATED = "minwoo@naver.com";
@Autowired
private WebApplicationContext wac;
@Autowired
private UserDao userDao;
private MockMvc mockMvc;
@Before
public void setup() {
this.mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build();
User user = new User();
user.setUserId(USER_ID);
userDao.deleteUser(user);
}
@Test
public void insertAndSelectAndDeleteUser() throws Exception {
String jsonParm = "{" +
"\"userId\" : \"" + USER_ID + "\"," +
"\"name\" : \"" + USER_NAME + "\"," +
"\"department\" : \"" + USER_DEPARTMENT + "\"," +
"\"phoneNumber\" : \"" + USER_PHONENUMBER + "\"," +
"\"email\" : \"" + USER_EMAIL + "\"" +
"}";
this.mockMvc.perform(post("/user.pinpoint").contentType(MediaType.APPLICATION_JSON).content(jsonParm))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$", hasKey("result")))
.andExpect(jsonPath("$.result").value("SUCCESS"))
.andReturn();
this.mockMvc.perform(get("/user.pinpoint").contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$[0]", hasKey("userId")))
.andExpect(jsonPath("$[0]", hasKey("name")))
.andExpect(jsonPath("$[0]", hasKey("department")))
.andExpect(jsonPath("$[0]", hasKey("phoneNumber")))
.andExpect(jsonPath("$[0]", hasKey("email")))
.andReturn();
this.mockMvc.perform(get("/user.pinpoint?searchKey=" + USER_NAME).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$[0]", hasKey("userId")))
.andExpect(jsonPath("$[0]", hasKey("name")))
.andExpect(jsonPath("$[0]", hasKey("department")))
.andExpect(jsonPath("$[0]", hasKey("phoneNumber")))
.andExpect(jsonPath("$[0]", hasKey("email")))
.andReturn();
this.mockMvc.perform(get("/user.pinpoint?userId=" + USER_ID).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$[0]", hasKey("userId")))
.andExpect(jsonPath("$[0]", hasKey("name")))
.andExpect(jsonPath("$[0]", hasKey("department")))
.andExpect(jsonPath("$[0]", hasKey("phoneNumber")))
.andExpect(jsonPath("$[0]", hasKey("email")))
.andReturn();
this.mockMvc.perform(get("/user.pinpoint?searchKey=" + USER_DEPARTMENT).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$[0]", hasKey("userId")))
.andExpect(jsonPath("$[0]", hasKey("name")))
.andExpect(jsonPath("$[0]", hasKey("department")))
.andExpect(jsonPath("$[0]", hasKey("phoneNumber")))
.andExpect(jsonPath("$[0]", hasKey("email")))
.andReturn();
this.mockMvc.perform(delete("/user.pinpoint").contentType(MediaType.APPLICATION_JSON).content("{\"userId\" : \"" + USER_ID + "\"}"))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$", hasKey("result")))
.andExpect(jsonPath("$.result").value("SUCCESS"))
.andReturn();
}
@Test
public void selectUser() throws Exception {
String jsonParm = "{" +
"\"userId\" : \"" + USER_ID + "\"," +
"\"name\" : \"" + USER_NAME + "\"," +
"\"department\" : \"" + USER_DEPARTMENT + "\"," +
"\"phoneNumber\" : \"" + USER_PHONENUMBER + "\"," +
"\"email\" : \"" + USER_EMAIL + "\"" +
"}";
this.mockMvc.perform(post("/user.pinpoint").contentType(MediaType.APPLICATION_JSON).content(jsonParm))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$", hasKey("result")))
.andExpect(jsonPath("$.result").value("SUCCESS"))
.andReturn();
this.mockMvc.perform(get("/user.pinpoint?userId=" + USER_ID).contentType(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$[0]", hasKey("userId")))
.andExpect(jsonPath("$[0].userId").value(USER_ID))
.andExpect(jsonPath("$[0]", hasKey("name")))
.andExpect(jsonPath("$[0]", hasKey("department")))
.andExpect(jsonPath("$[0]", hasKey("phoneNumber")))
.andExpect(jsonPath("$[0]", hasKey("email")))
.andReturn();
this.mockMvc.perform(delete("/user.pinpoint").contentType(MediaType.APPLICATION_JSON).content("{\"userId\" : \"" + USER_ID + "\"}"))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$", hasKey("result")))
.andExpect(jsonPath("$.result").value("SUCCESS"))
.andReturn();
}
@Test
public void updateUser() throws Exception {
String jsonParamforUserInfo = "{" +
"\"userId\" : \"" + USER_ID + "\"," +
"\"name\" : \"" + USER_NAME + "\"," +
"\"department\" : \"" + USER_DEPARTMENT + "\"," +
"\"phoneNumber\" : \"" + USER_PHONENUMBER + "\"," +
"\"email\" : \"" + USER_EMAIL + "\"" +
"}";
this.mockMvc.perform(post("/user.pinpoint").contentType(MediaType.APPLICATION_JSON).content(jsonParamforUserInfo))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$", hasKey("result")))
.andExpect(jsonPath("$.result").value("SUCCESS"))
.andReturn();
String jsonParmForUserInfoUpdated = "{" +
"\"userId\" : \"" + USER_ID + "\"," +
"\"name\" : \"" + USER_NAME_UPDATED + "\"," +
"\"department\" : \"" + USER_DEPARTMENT + "\"," +
"\"phoneNumber\" : \"" + USER_PHONENUMBER_UPDATED + "\"," +
"\"email\" : \"" + USER_EMAIL_UPDATED + "\"" +
"}";
this.mockMvc.perform(put("/user.pinpoint").contentType(MediaType.APPLICATION_JSON).content(jsonParmForUserInfoUpdated))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$", hasKey("result")))
.andExpect(jsonPath("$.result").value("SUCCESS"))
.andReturn();
this.mockMvc.perform(delete("/user.pinpoint").contentType(MediaType.APPLICATION_JSON).content("{\"userId\" : \"" + USER_ID + "\"}"))
.andExpect(status().isOk())
.andExpect(content().contentType("application/json;charset=UTF-8"))
.andExpect(jsonPath("$", hasKey("result")))
.andExpect(jsonPath("$.result").value("SUCCESS"))
.andReturn();
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2016, CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.jenkinsci.plugins.pipeline.maven.publishers;
import hudson.Extension;
import hudson.FilePath;
import hudson.Launcher;
import hudson.model.Run;
import hudson.model.StreamBuildListener;
import hudson.model.TaskListener;
import hudson.plugins.findbugs.FindBugsPublisher;
import org.jenkinsci.Symbol;
import org.jenkinsci.plugins.pipeline.maven.MavenPublisher;
import org.jenkinsci.plugins.pipeline.maven.MavenSpyLogProcessor;
import org.jenkinsci.plugins.pipeline.maven.util.XmlUtils;
import org.jenkinsci.plugins.workflow.steps.StepContext;
import org.kohsuke.stapler.DataBoundConstructor;
import org.w3c.dom.Element;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.annotation.Nonnull;
/**
* @author <a href="mailto:cleclerc@cloudbees.com">Cyrille Le Clerc</a>
*/
public class FindbugsAnalysisPublisher extends MavenPublisher {
private static final Logger LOGGER = Logger.getLogger(FindbugsAnalysisPublisher.class.getName());
private static final long serialVersionUID = 1L;
@DataBoundConstructor
public FindbugsAnalysisPublisher() {
}
/*
<ExecutionEvent type="MojoStarted" class="org.apache.maven.lifecycle.internal.DefaultExecutionEvent" _time="2017-02-05 19:46:26.956">
<project baseDir="/Users/cleclerc/git/cyrille-leclerc/multi-module-maven-project" file="/Users/cleclerc/git/cyrille-leclerc/multi-module-maven-project/pom.xml" groupId="com.example" name="demo-pom" artifactId="demo-pom" version="0.0.1-SNAPSHOT">
<build directory="/Users/cleclerc/git/cyrille-leclerc/multi-module-maven-project/target"/>
</project>
<plugin executionId="findbugs" goal="findbugs" groupId="org.codehaus.mojo" artifactId="findbugs-maven-plugin" version="3.0.4">
<classFilesDirectory>${project.build.outputDirectory}</classFilesDirectory>
<compileSourceRoots>${project.compileSourceRoots}</compileSourceRoots>
<debug>${findbugs.debug}</debug>
<effort>${findbugs.effort}</effort>
<excludeBugsFile>${findbugs.excludeBugsFile}</excludeBugsFile>
<excludeFilterFile>${findbugs.excludeFilterFile}</excludeFilterFile>
<failOnError>${findbugs.failOnError}</failOnError>
<findbugsXmlOutput>true</findbugsXmlOutput>
<findbugsXmlOutputDirectory>${project.build.directory}</findbugsXmlOutputDirectory>
<fork>${findbugs.fork}</fork>
<includeFilterFile>${findbugs.includeFilterFile}</includeFilterFile>
<includeTests>${findbugs.includeTests}</includeTests>
<jvmArgs>${findbugs.jvmArgs}</jvmArgs>
<localRepository>${localRepository}</localRepository>
<maxHeap>${findbugs.maxHeap}</maxHeap>
<maxRank>${findbugs.maxRank}</maxRank>
<nested>${findbugs.nested}</nested>
<omitVisitors>${findbugs.omitVisitors}</omitVisitors>
<onlyAnalyze>${findbugs.onlyAnalyze}</onlyAnalyze>
<outputDirectory>${project.reporting.outputDirectory}</outputDirectory>
<outputEncoding>${outputEncoding}</outputEncoding>
<pluginArtifacts>${plugin.artifacts}</pluginArtifacts>
<pluginList>${findbugs.pluginList}</pluginList>
<project>${project}</project>
<relaxed>${findbugs.relaxed}</relaxed>
<remoteArtifactRepositories>${project.remoteArtifactRepositories}</remoteArtifactRepositories>
<remoteRepositories>${project.remoteArtifactRepositories}</remoteRepositories>
<skip>${findbugs.skip}</skip>
<skipEmptyReport>${findbugs.skipEmptyReport}</skipEmptyReport>
<sourceEncoding>${encoding}</sourceEncoding>
<testClassFilesDirectory>${project.build.testOutputDirectory}</testClassFilesDirectory>
<testSourceRoots>${project.testCompileSourceRoots}</testSourceRoots>
<threshold>${findbugs.threshold}</threshold>
<timeout>${findbugs.timeout}</timeout>
<trace>${findbugs.trace}</trace>
<userPrefs>${findbugs.userPrefs}</userPrefs>
<visitors>${findbugs.visitors}</visitors>
<xmlEncoding>UTF-8</xmlEncoding>
<xmlOutput>${findbugs.xmlOutput}</xmlOutput>
<xmlOutputDirectory>${project.build.directory}</xmlOutputDirectory>
<xrefLocation>${project.reporting.outputDirectory}/xref</xrefLocation>
<xrefTestLocation>${project.reporting.outputDirectory}/xref-test</xrefTestLocation>
</plugin>
</ExecutionEvent>
<ExecutionEvent type="MojoSucceeded" class="org.apache.maven.lifecycle.internal.DefaultExecutionEvent" _time="2017-02-05 19:46:28.108">
<project baseDir="/Users/cleclerc/git/cyrille-leclerc/multi-module-maven-project" file="/Users/cleclerc/git/cyrille-leclerc/multi-module-maven-project/pom.xml" groupId="com.example" name="demo-pom" artifactId="demo-pom" version="0.0.1-SNAPSHOT">
<build directory="/Users/cleclerc/git/cyrille-leclerc/multi-module-maven-project/target"/>
</project>
<plugin executionId="findbugs" goal="findbugs" groupId="org.codehaus.mojo" artifactId="findbugs-maven-plugin" version="3.0.4">
<classFilesDirectory>${project.build.outputDirectory}</classFilesDirectory>
<compileSourceRoots>${project.compileSourceRoots}</compileSourceRoots>
<debug>${findbugs.debug}</debug>
<effort>${findbugs.effort}</effort>
<excludeBugsFile>${findbugs.excludeBugsFile}</excludeBugsFile>
<excludeFilterFile>${findbugs.excludeFilterFile}</excludeFilterFile>
<failOnError>${findbugs.failOnError}</failOnError>
<findbugsXmlOutput>true</findbugsXmlOutput>
<findbugsXmlOutputDirectory>${project.build.directory}</findbugsXmlOutputDirectory>
<fork>${findbugs.fork}</fork>
<includeFilterFile>${findbugs.includeFilterFile}</includeFilterFile>
<includeTests>${findbugs.includeTests}</includeTests>
<jvmArgs>${findbugs.jvmArgs}</jvmArgs>
<localRepository>${localRepository}</localRepository>
<maxHeap>${findbugs.maxHeap}</maxHeap>
<maxRank>${findbugs.maxRank}</maxRank>
<nested>${findbugs.nested}</nested>
<omitVisitors>${findbugs.omitVisitors}</omitVisitors>
<onlyAnalyze>${findbugs.onlyAnalyze}</onlyAnalyze>
<outputDirectory>${project.reporting.outputDirectory}</outputDirectory>
<outputEncoding>${outputEncoding}</outputEncoding>
<pluginArtifacts>${plugin.artifacts}</pluginArtifacts>
<pluginList>${findbugs.pluginList}</pluginList>
<project>${project}</project>
<relaxed>${findbugs.relaxed}</relaxed>
<remoteArtifactRepositories>${project.remoteArtifactRepositories}</remoteArtifactRepositories>
<remoteRepositories>${project.remoteArtifactRepositories}</remoteRepositories>
<skip>${findbugs.skip}</skip>
<skipEmptyReport>${findbugs.skipEmptyReport}</skipEmptyReport>
<sourceEncoding>${encoding}</sourceEncoding>
<testClassFilesDirectory>${project.build.testOutputDirectory}</testClassFilesDirectory>
<testSourceRoots>${project.testCompileSourceRoots}</testSourceRoots>
<threshold>${findbugs.threshold}</threshold>
<timeout>${findbugs.timeout}</timeout>
<trace>${findbugs.trace}</trace>
<userPrefs>${findbugs.userPrefs}</userPrefs>
<visitors>${findbugs.visitors}</visitors>
<xmlEncoding>UTF-8</xmlEncoding>
<xmlOutput>${findbugs.xmlOutput}</xmlOutput>
<xmlOutputDirectory>${project.build.directory}</xmlOutputDirectory>
<xrefLocation>${project.reporting.outputDirectory}/xref</xrefLocation>
<xrefTestLocation>${project.reporting.outputDirectory}/xref-test</xrefTestLocation>
</plugin>
</ExecutionEvent>
*/
@Override
public void process(@Nonnull StepContext context, @Nonnull Element mavenSpyLogsElt) throws IOException, InterruptedException {
TaskListener listener = context.get(TaskListener.class);
if (listener == null) {
LOGGER.warning("TaskListener is NULL, default to stderr");
listener = new StreamBuildListener((OutputStream) System.err);
}
FilePath workspace = context.get(FilePath.class);
Run run = context.get(Run.class);
Launcher launcher = context.get(Launcher.class);
List<Element> findbugsEvents = XmlUtils.getExecutionEvents(mavenSpyLogsElt, "org.codehaus.mojo", "findbugs-maven-plugin", "findbugs");
if (findbugsEvents.isEmpty()) {
LOGGER.log(Level.FINE, "No org.codehaus.mojo:findbugs-maven-plugin:findbugs execution found");
return;
}
try {
Class.forName("hudson.plugins.findbugs.FindBugsPublisher");
} catch (ClassNotFoundException e) {
listener.getLogger().print("[withMaven] Jenkins ");
listener.hyperlink("https://wiki.jenkins-ci.org/display/JENKINS/FindBugs+Plugin", "FindBugs Plugin");
listener.getLogger().println(" not found, don't display org.codehaus.mojo:findbugs-maven-plugin:findbugs results in pipeline screen.");
return;
}
for (Element findBugsTestEvent : findbugsEvents) {
String findBugsEventType = findBugsTestEvent.getAttribute("type");
if (!findBugsEventType.equals("MojoSucceeded") && !findBugsEventType.equals("MojoFailed")) {
continue;
}
Element pluginElt = XmlUtils.getUniqueChildElement(findBugsTestEvent, "plugin");
Element xmlOutputDirectoryElt = XmlUtils.getUniqueChildElementOrNull(pluginElt, "xmlOutputDirectory");
Element projectElt = XmlUtils.getUniqueChildElement(findBugsTestEvent, "project");
MavenSpyLogProcessor.MavenArtifact mavenArtifact = XmlUtils.newMavenArtifact(projectElt);
MavenSpyLogProcessor.PluginInvocation pluginInvocation = XmlUtils.newPluginInvocation(pluginElt);
if (xmlOutputDirectoryElt == null) {
listener.getLogger().println("[withMaven] No <xmlOutputDirectoryElt> element found for <plugin> in " + XmlUtils.toString(findBugsTestEvent));
continue;
}
String xmlOutputDirectory = xmlOutputDirectoryElt.getTextContent().trim();
if (xmlOutputDirectory.contains("${project.build.directory}")) {
String projectBuildDirectory = XmlUtils.getProjectBuildDirectory(projectElt);
if (projectBuildDirectory == null || projectBuildDirectory.isEmpty()) {
listener.getLogger().println("[withMaven] '${project.build.directory}' found for <project> in " + XmlUtils.toString(findBugsTestEvent));
continue;
}
xmlOutputDirectory = xmlOutputDirectory.replace("${project.build.directory}", projectBuildDirectory);
} else if (xmlOutputDirectory.contains("${basedir}")) {
String baseDir = projectElt.getAttribute("baseDir");
if (baseDir.isEmpty()) {
listener.getLogger().println("[withMaven] '${basedir}' found for <project> in " + XmlUtils.toString(findBugsTestEvent));
continue;
}
xmlOutputDirectory = xmlOutputDirectory.replace("${basedir}", baseDir);
}
xmlOutputDirectory = XmlUtils.getPathInWorkspace(xmlOutputDirectory, workspace);
String findBugsResultsFile = xmlOutputDirectory + "/findbugsXml.xml";
listener.getLogger().println("[withMaven] findbugsPublisher - Archive FindBugs analysis results for Maven artifact " + mavenArtifact.toString() + " generated by " +
pluginInvocation + ": " + findBugsResultsFile);
FindBugsPublisher archiver = new FindBugsPublisher();
archiver.setPattern(findBugsResultsFile);
try {
archiver.perform(run, workspace, launcher, listener);
} catch (Exception e) {
listener.error("[withMaven] findbugsPublisher - Silently ignore exception archiving FindBugs results for Maven artifact " + mavenArtifact.toString() + " generated by " +
pluginInvocation + ": " + e);
LOGGER.log(Level.WARNING, "Exception processing " + XmlUtils.toString(findBugsTestEvent), e);
}
}
}
/**
* Don't use symbol "findbugs", it would collide with hudson.plugins.findbugs.FindBugsPublisher
*/
@Symbol("findbugsPublisher")
@Extension
public static class DescriptorImpl extends MavenPublisher.DescriptorImpl {
@Nonnull
@Override
public String getDisplayName() {
return "Findbugs Publisher";
}
@Override
public int ordinal() {
return 20;
}
@Nonnull
@Override
public String getSkipFileName() {
return ".skip-publish-findbugs-results";
}
}
}
| |
//========================================================================
//
//File: $RCSfile: TestCRUD_Generics.java,v $
//Version: $Revision: 1.6 $
//Modified: $Date: 2013/01/17 03:38:08 $
//
//(c) Copyright 2006-2014 by Mentor Graphics Corp. All rights reserved.
//
//========================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//========================================================================
//
package org.xtuml.bp.als.oal.test;
import org.junit.After;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.xtuml.bp.core.Association_c;
import org.xtuml.bp.core.Block_c;
import org.xtuml.bp.core.CreateNoVariable_c;
import org.xtuml.bp.core.Create_c;
import org.xtuml.bp.core.Delete_c;
import org.xtuml.bp.core.InstanceHandle_c;
import org.xtuml.bp.core.ModelClass_c;
import org.xtuml.bp.core.RelateUsing_c;
import org.xtuml.bp.core.Relate_c;
import org.xtuml.bp.core.Statement_c;
import org.xtuml.bp.core.UnrelateUsing_c;
import org.xtuml.bp.core.Unrelate_c;
import org.xtuml.bp.core.Value_c;
import org.xtuml.bp.core.Variable_c;
import org.xtuml.bp.test.common.OrderedRunner;
import antlr.RecognitionException;
import antlr.TokenStreamException;
import junit.framework.TestCase;
@RunWith(OrderedRunner.class)
public class TestCRUD_Generics extends TestCase {
@After
public void tearDown() throws Exception {
try {
super.tearDown();
OalParserTest_Generics.tearDownActionData();
} catch (RecognitionException re) {
// do nothing
} catch (TokenStreamException te) {
// do nothing
}
}
@Test
public void testObjectKeyletterFound() throws RecognitionException, TokenStreamException {
String x = OalParserTest_Generics.parseAction("create object instance d1 of D_D; ", OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$
assertEquals("", x); //$NON-NLS-1$
Block_c[] blk = Block_c.BlockInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, blk.length);
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, st.length);
Create_c[] s = Create_c.CreateInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, s.length);
assertTrue(s[0].getIs_implicit());
ModelClass_c[] obj_set = ModelClass_c.ModelClassInstances(OalParserTest_Generics.modelRoot);
int offset = -1;
for(int i = 0; i < obj_set.length; i++) {
if (obj_set[i].getKey_lett().equals("D_D")) {
offset = i;
break;
}
}
assertFalse("Disk class not found", offset == -1);
assertEquals(st[0].getStatement_id(), s[0].getStatement_id());
Variable_c[] t = Variable_c.VariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, t.length);
assertEquals("d1", t[0].getName()); //$NON-NLS-1$
InstanceHandle_c[] iv = InstanceHandle_c.InstanceHandleInstances(OalParserTest_Generics.modelRoot);
ModelClass_c mc = ModelClass_c.getOneO_OBJOnR818(iv[0]);
assertEquals("D_D", mc.getKey_lett()); //$NON-NLS-1$
}
@Test
public void testObjectKeyletterFoundNoVar() throws RecognitionException, TokenStreamException {
String x = OalParserTest_Generics.parseAction("create object instance of D_D; ", OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$
assertEquals("", x); //$NON-NLS-1$
Block_c[] blk = Block_c.BlockInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, blk.length);
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, st.length);
CreateNoVariable_c[] s = CreateNoVariable_c.CreateNoVariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, s.length);
ModelClass_c[] obj_set = ModelClass_c.ModelClassInstances(OalParserTest_Generics.modelRoot);
int offset = -1;
for(int i = 0; i < obj_set.length; i++) {
if (obj_set[i].getKey_lett().equals("D_D")) {
offset = i;
break;
}
}
assertFalse("Disk class not found", offset == -1);
assertEquals(st[0].getStatement_id(), s[0].getStatement_id());
InstanceHandle_c[] iv = InstanceHandle_c.InstanceHandleInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, iv.length);
}
@Test
public void testObjectKeyletterNotFound() throws RecognitionException, TokenStreamException {
String x = OalParserTest_Generics.parseAction("create object instance nsi of XX ; ", OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$
String lines[] = x.split("\n"); //$NON-NLS-1$
assertEquals(":1:31-32: Cannot find specified class key letters ->XX<-.", lines[0]); //$NON-NLS-1$
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, st.length);
Create_c[] cs = Create_c.CreateInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, cs.length);
CreateNoVariable_c[] s = CreateNoVariable_c.CreateNoVariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, s.length);
InstanceHandle_c[] iv = InstanceHandle_c.InstanceHandleInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, iv.length);
}
@Test
public void testObjectKeyletterNotFoundNoVar() throws RecognitionException, TokenStreamException {
String x = OalParserTest_Generics.parseAction("create object instance of XX ; ", OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$
String lines[] = x.split("\n"); //$NON-NLS-1$
assertEquals(":1:27-28: Cannot find specified class key letters ->XX<-.", lines[0]); //$NON-NLS-1$
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, st.length);
Create_c[] cs = Create_c.CreateInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, cs.length);
CreateNoVariable_c[] s = CreateNoVariable_c.CreateNoVariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, s.length);
InstanceHandle_c[] iv = InstanceHandle_c.InstanceHandleInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, iv.length);
}
@Test
public void testCreateWrongVarType() throws RecognitionException, TokenStreamException {
String x = OalParserTest_Generics.parseAction("x = 1; create object instance x of D_D;", OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":1:36-38: Variable ->x<- does not exist in scope as an object instance variable", lines[0]); //$NON-NLS-1$
assertEquals("line 1:40: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
Block_c[] blk = Block_c.BlockInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, blk.length);
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, st.length);
assertEquals(blk[0].getBlock_id(), st[0].getBlock_id());
Variable_c[] t = Variable_c.VariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, t.length);
Value_c[] val = Value_c.ValueInstances(OalParserTest_Generics.modelRoot);
assertEquals(2, val.length); // the literal 1 and transient x
Create_c[] cre = Create_c.CreateInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, cre.length);
}
@Test
public void testCreateWrongClassType() throws RecognitionException, TokenStreamException {
String x = OalParserTest_Generics.parseAction("create object instance x of D_D;\ncreate object instance x of D_H;", OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":2:29-31: Variable ->x<- already exists as a different type", lines[0]); //$NON-NLS-1$
assertEquals("line 2:33: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
Block_c[] blk = Block_c.BlockInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, blk.length);
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, st.length);
assertEquals(blk[0].getBlock_id(), st[0].getBlock_id());
Variable_c[] t = Variable_c.VariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, t.length);
Value_c[] val = Value_c.ValueInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, val.length);
Create_c[] cre = Create_c.CreateInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, cre.length);
}
@Test
public void testDelete() throws RecognitionException, TokenStreamException {
String x = OalParserTest_Generics.parseAction("create object instance d1 of D_D; delete object instance d1;", OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$
assertEquals("", x); //$NON-NLS-1$
Block_c[] blk = Block_c.BlockInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, blk.length);
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(2, st.length);
assertEquals(blk[0].getBlock_id(), st[0].getBlock_id());
assertEquals(blk[0].getBlock_id(), st[1].getBlock_id());
Variable_c[] t = Variable_c.VariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, t.length);
assertEquals("d1", t[0].getName());//$NON-NLS-1$
Value_c[] val = Value_c.ValueInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, val.length);
Delete_c[] del = Delete_c.DeleteInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, del.length);
assertEquals(st[1].getStatement_id(), del[0].getStatement_id());
assertEquals(t[0].getVar_id(), del[0].getVar_id());
}
@Test
public void testDeleteI() throws RecognitionException, TokenStreamException {
String x = OalParserTest_Generics.parseAction("x = 5; delete object instance x;", OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":1:31-31: Variable ->x<- does not exist in scope as an object instance variable", lines[0]); //$NON-NLS-1$
assertEquals("line 1:33: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
Block_c[] blk = Block_c.BlockInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, blk.length);
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, st.length);
assertEquals(blk[0].getBlock_id(), st[0].getBlock_id());
Variable_c[] t = Variable_c.VariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, t.length);
assertEquals("x", t[0].getName());//$NON-NLS-1$
Value_c[] val = Value_c.ValueInstances(OalParserTest_Generics.modelRoot);
assertEquals(2, val.length);
Delete_c[] del = Delete_c.DeleteInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, del.length);
}
private void goodRelateValidate(String[] vars, int relNum, int numStmt, String phrase, boolean using)
throws RecognitionException, TokenStreamException {
Block_c[] blk = Block_c.BlockInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, blk.length);
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(numStmt, st.length);
for (int i = 0; i < numStmt; ++i)
assertEquals(blk[0].getBlock_id(), st[i].getBlock_id());
Variable_c[] t = Variable_c.VariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(vars.length, t.length);
for (int i = 0; i < vars.length; ++i)
assertEquals(vars[i], t[i].getName());
if (using) {
RelateUsing_c rel[] = RelateUsing_c.RelateUsingInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, rel.length);
assertEquals(st[numStmt - 1].getStatement_id(), rel[0].getStatement_id());
assertEquals(t[0].getVar_id(), rel[0].getOne_side_var_id());
assertEquals(t[1].getVar_id(), rel[0].getOther_side_var_id());
assertEquals(t[2].getVar_id(), rel[0].getAssociative_var_id());
assertEquals(phrase, rel[0].getRelationship_phrase());
Association_c r_rel = Association_c.getOneR_RELOnR654(rel[0]);
assertEquals(relNum, r_rel.getNumb());
} else {
Relate_c rel[] = Relate_c.RelateInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, rel.length);
assertEquals(st[2].getStatement_id(), rel[0].getStatement_id());
assertEquals(t[0].getVar_id(), rel[0].getOne_side_var_id());
assertEquals(t[1].getVar_id(), rel[0].getOther_side_var_id());
assertEquals(phrase, rel[0].getRelationship_phrase());
Association_c r_rel = Association_c.getOneR_RELOnR653(rel[0]);
assertEquals(relNum, r_rel.getNumb());
}
}
private void goodUnrelateValidate(String[] vars, int relNum, int numStmt, String phrase, boolean using)
throws RecognitionException, TokenStreamException {
Block_c[] blk = Block_c.BlockInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, blk.length);
Statement_c[] st = Statement_c.StatementInstances(OalParserTest_Generics.modelRoot);
assertEquals(numStmt, st.length);
for (int i = 0; i < numStmt; ++i)
assertEquals(blk[0].getBlock_id(), st[i].getBlock_id());
Value_c[] val = Value_c.ValueInstances(OalParserTest_Generics.modelRoot);
assertEquals(0, val.length);
Variable_c[] t = Variable_c.VariableInstances(OalParserTest_Generics.modelRoot);
assertEquals(vars.length, t.length);
for (int i = 0; i < vars.length; ++i)
assertEquals(vars[i], t[i].getName());
if (using) {
UnrelateUsing_c rel[] = UnrelateUsing_c.UnrelateUsingInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, rel.length);
assertEquals(st[numStmt - 1].getStatement_id(), rel[0].getStatement_id());
assertEquals(t[0].getVar_id(), rel[0].getOne_side_var_id());
assertEquals(t[1].getVar_id(), rel[0].getOther_side_var_id());
assertEquals(t[2].getVar_id(), rel[0].getAssociative_var_id());
assertEquals(phrase, rel[0].getRelationship_phrase());
Association_c r_rel = Association_c.getOneR_RELOnR656(rel[0]);
assertEquals(relNum, r_rel.getNumb());
} else {
Unrelate_c rel[] = Unrelate_c.UnrelateInstances(OalParserTest_Generics.modelRoot);
assertEquals(1, rel.length);
assertEquals(st[2].getStatement_id(), rel[0].getStatement_id());
assertEquals(t[0].getVar_id(), rel[0].getOne_side_var_id());
assertEquals(t[1].getVar_id(), rel[0].getOther_side_var_id());
assertEquals(phrase, rel[0].getRelationship_phrase());
Association_c r_rel = Association_c.getOneR_RELOnR655(rel[0]);
assertEquals(relNum, r_rel.getNumb());
}
}
@Test
public void testRelateBinary() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance h of D_H;\nrelate d1 to h across R4;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "d1", "h" };//$NON-NLS-1$//$NON-NLS-2$
goodRelateValidate(var_list, 4, 3, "", false);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 4, 3, "", false);//$NON-NLS-1$
}
@Test
public void testRelateBinaryGoodPhraseNotReflexive() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance h of D_H;\nrelate d1 to h across R4.'is permanently assigned to';"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "d1", "h" };//$NON-NLS-1$//$NON-NLS-2$
goodRelateValidate(var_list, 4, 3, "'is permanently assigned to'", false);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 4, 3, "'is permanently assigned to'", false);//$NON-NLS-1$
}
@Test
public void testRelateBinaryGoodReflexive() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance d2 of D_D;\nrelate d1 to d2 across R12.'precedes';"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "d1", "d2" };//$NON-NLS-1$//$NON-NLS-2$
goodRelateValidate(var_list, 12, 3, "'precedes'", false);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 12, 3, "'precedes'", false);//$NON-NLS-1$
}
@Test
public void testRelateBinaryGoodReflexive2() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance d2 of D_D;\nrelate d1 to d2 across R12.'follows';"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "d1", "d2" };//$NON-NLS-1$//$NON-NLS-2$
goodRelateValidate(var_list, 12, 3, "'follows'", false);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 12, 3, "'follows'", false);//$NON-NLS-1$
}
@Test
public void testRelateSubtypeGood() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance od of D_OD;\nrelate d1 to od across R3;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "d1", "od" };//$NON-NLS-1$//$NON-NLS-2$
goodRelateValidate(var_list, 3, 3, "", false);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 3, 3, "", false);//$NON-NLS-1$
}
@Test
public void testRelateAssocGood() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance q of D_QP;\ncreate object instance dq of D_DQ;\nrelate d1 to q across R1 using dq;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "d1", "q", "dq" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
goodRelateValidate(var_list, 1, 4, "", true);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 1, 4, "", true);//$NON-NLS-1$
}
@Test
public void testRelateAssocGood2() throws RecognitionException, TokenStreamException {
String act = "create object instance q of D_QP;\ncreate object instance d1 of D_D;\ncreate object instance dq of D_DQ;\nrelate q to d1 across R1 using dq;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "q", "d1", "dq" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
goodRelateValidate(var_list, 1, 4, "", true);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 1, 4, "", true);//$NON-NLS-1$
}
@Test
public void testRelateAssocGoodWithPhrase() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance q of D_QP;\ncreate object instance dq of D_DQ;\nrelate d1 to q across R1.'needed by' using dq;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "d1", "q", "dq" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
goodRelateValidate(var_list, 1, 4, "'needed by'", true);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 1, 4, "'needed by'", true);//$NON-NLS-1$
}
@Test
public void testRelateAssocGoodReflexive() throws RecognitionException, TokenStreamException {
String act = "create object instance qp1 of D_QP;\ncreate object instance qp2 of D_QP;\ncreate object instance qpo of D_QPO;\nrelate qp1 to qp2 across R13.'precedes' using qpo;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "qp1", "qp2", "qpo" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
goodRelateValidate(var_list, 13, 4, "'precedes'", true);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 13, 4, "'precedes'", true);//$NON-NLS-1$
}
@Test
public void testRelateAssocGoodReflexive2() throws RecognitionException, TokenStreamException {
String act = "create object instance qp1 of D_QP;\ncreate object instance qp2 of D_QP;\ncreate object instance qpo of D_QPO;\nrelate qp1 to qp2 across R13.'follows' using qpo;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
assertEquals("", x); //$NON-NLS-1$
String[] var_list = { "qp1", "qp2", "qpo" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
goodRelateValidate(var_list, 13, 4, "'follows'", true);//$NON-NLS-1$
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
assertEquals("", x); //$NON-NLS-1$
goodUnrelateValidate(var_list, 13, 4, "'follows'", true);//$NON-NLS-1$
}
@Test
public void testRelateBinaryWrongPhraseNotReflexive() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance h of D_H;\nrelate d1 to h across R4.'is permanent home for';"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:26-48: Class ->Permanent Home in Library<- in association ->R4<- does not contain destination association phrase ->is permanent home for<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:50: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "h" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:30-52: Class ->Permanent Home in Library<- in association ->R4<- does not contain destination association phrase ->is permanent home for<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:54: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateBinaryBadPhraseNotReflexive() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance h of D_H;\nrelate d1 to h across R4.'bad phrase';"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:26-37: Class ->Permanent Home in Library<- in association ->R4<- does not contain destination association phrase ->bad phrase<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:39: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "h" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:30-41: Class ->Permanent Home in Library<- in association ->R4<- does not contain destination association phrase ->bad phrase<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:43: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateBinaryBadPhraseReflexive() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance d2 of D_D;\nrelate d1 to d2 across R12.'neither';"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:28-36: Class ->Disk<- in reflexive association ->R12<- does not contain destination association phrase ->neither<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:38: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "d2" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:32-40: Class ->Disk<- in reflexive association ->R12<- does not contain destination association phrase ->neither<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:42: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateBinaryBadNoPhraseReflexive() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance d2 of D_D;\nrelate d1 to d2 across R12;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:24-26: The destination association phrase must be specified for reflexive association ->R12<- between classes ->Disk<- and ->Disk<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:28: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "d2" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:28-30: The destination association phrase must be specified for reflexive association ->R12<- between classes ->Disk<- and ->Disk<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:32: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateBinaryPhraseSubtype() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance od of D_OD;\nrelate d1 to od across R3.'is a';"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:27-32: A destination association phrase is not allowed for a supertype/subtype association", lines[0]); //$NON-NLS-1$
assertEquals("line 3:34: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "od" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:31-36: A destination association phrase is not allowed for a supertype/subtype association", lines[0]); //$NON-NLS-1$
assertEquals("line 3:38: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateBinaryTwoSubtypes() throws RecognitionException, TokenStreamException {
String act = "create object instance od1 of D_OND;\ncreate object instance od2 of D_OD;\nrelate od1 to od2 across R3;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:26-27: The classes ->Online Disk<- and ->Offline Disk<- are subtype classes in the association ->R3<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:29: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "od1", "od2" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:30-31: The classes ->Online Disk<- and ->Offline Disk<- are subtype classes in the association ->R3<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:33: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateBinaryBadRelNum() throws RecognitionException, TokenStreamException {
String act = "create object instance x of D_D;\ncreate object instance y of D_D;\nrelate x to y across R99;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:22-24: Cannot find specified association ->R99<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:26: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "x", "y" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:26-28: Cannot find specified association ->R99<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:30: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateBinaryWrongRelNum() throws RecognitionException, TokenStreamException {
String act = "create object instance x of D_D;\ncreate object instance y of D_OD;\nrelate x to y across R4;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:22-23: The specified association ->R4<- does not exist between classes ->Disk<- and ->Offline Disk<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:25: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "x", "y" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:26-27: The specified association ->R4<- does not exist between classes ->Disk<- and ->Offline Disk<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:29: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateBinaryBadWithUsing() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance h of D_H;\ncreate object instance dq of D_DQ;\nrelate d1 to h across R4 using dq;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":4:32-33: Association ->R4<- is not linked", lines[0]); //$NON-NLS-1$
assertEquals("line 4:35: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "h", "dq" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":4:36-37: Association ->R4<- is not linked", lines[0]); //$NON-NLS-1$
assertEquals("line 4:39: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
}
@Test
public void testRelateAssocBadNoUsing() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance q of D_QP;\nrelate d1 to q across R1;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":3:23-24: Need USING clause for link classes in association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:26: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "q" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":3:27-28: Need USING clause for link classes in association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 3:30: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 0, 0);
}
@Test
public void testRelateAssocBadAone() throws RecognitionException, TokenStreamException {
String act = "create object instance h of D_H;\ncreate object instance qp of D_QP;\ncreate object instance dq of D_DQ;\nrelate h to qp across R1 using dq;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":4:32-33: Class ->Permanent Home in Library<- does not participate in the link association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:35: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "h", "qp", "dq" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":4:36-37: Class ->Permanent Home in Library<- does not participate in the link association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:39: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
}
@Test
public void testRelateAssocBadAoth() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance h of D_H;\ncreate object instance dq of D_DQ;\nrelate d1 to h across R1 using dq;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":4:32-33: Class ->Permanent Home in Library<- does not participate in the link association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:35: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "h", "dq" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":4:36-37: Class ->Permanent Home in Library<- does not participate in the link association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:39: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
}
@Test
public void testRelateAssocBadAssr() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance qp of D_QP;\ncreate object instance h of D_H;\nrelate d1 to qp across R1 using h;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":4:33-33: Class ->Permanent Home in Library<- is not the link class between classes ->Disk<- and ->Qualified Process<- in link association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:35: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "qp", "h" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":4:37-37: Class ->Permanent Home in Library<- is not the link class between classes ->Disk<- and ->Qualified Process<- in link association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:39: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
}
@Test
public void testRelateAssocWrongOrderAssr() throws RecognitionException, TokenStreamException {
String act = "create object instance d1 of D_D;\ncreate object instance qp of D_QP;\ncreate object instance dq of D_DQ;\nrelate d1 to dq across R1 using qp;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":4:33-34: Class ->Qualified Process<- is incorrectly specified in link association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:36: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "d1", "qp", "dq" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":4:37-38: Class ->Qualified Process<- is incorrectly specified in link association ->R1<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:40: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
}
@Test
public void testRelateAssocBadReflexiveNoPhrase() throws RecognitionException, TokenStreamException {
String act = "create object instance qp1 of D_QP;\ncreate object instance qp2 of D_QP;\ncreate object instance qpo of D_QPO;\nrelate qp1 to qp2 across R13 using qpo;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":4:36-38: The destination association phrase must be specified for reflexive association ->R13<- between classes ->Qualified Process<- and ->Qualified Process<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:40: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "qp1", "qp2", "qpo" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":4:40-42: The destination association phrase must be specified for reflexive association ->R13<- between classes ->Qualified Process<- and ->Qualified Process<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:44: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
}
@Test
public void testRelateAssocBadReflexiveBadPhrase() throws RecognitionException, TokenStreamException {
String act = "create object instance qp1 of D_QP;\ncreate object instance qp2 of D_QP;\ncreate object instance qpo of D_QPO;\nrelate qp1 to qp2 across R13.'neither' using qpo;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(":4:46-48: Class ->Qualified Process<- in reflexive association ->R13<- does not contain destination association phrase ->neither<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:50: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
String[] var_list = { "qp1", "qp2", "qpo" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(":4:50-52: Class ->Qualified Process<- in reflexive association ->R13<- does not contain destination association phrase ->neither<-", lines[0]); //$NON-NLS-1$
assertEquals("line 4:54: expecting Semicolon, found 'null'", lines[1]); //$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 0, 0);
}
@Test
public void testInvalidRelSpecInChain() throws RecognitionException, TokenStreamException {
String act = "select any h from instances of D_H; select one d related by h->D_D[R];"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(4, lines.length);
assertEquals("line 1:68: expecting TOK_RELID, found 'R'", lines[0]);
assertEquals(":1:69-69: Invalid association specification", lines[1]);
assertEquals("line 1:71: unexpected token: null", lines[2]);
assertEquals("line 1:71: expecting Semicolon, found 'null'", lines[3]);
String[] var_list = { "h" };//$NON-NLS-1$
OalParserTest_Generics.badRelationshipValidate(var_list, 1, 1, 0);
}
@Test
public void testInvalidRelSpecInRelate() throws RecognitionException, TokenStreamException {
String act = "select any h from instances of D_H; select any d from instances of D_D; relate d to h across R;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(3, lines.length);
assertEquals("line 1:94: expecting TOK_RELID, found 'R'", lines[0]);
assertEquals(":1:94-94: Invalid association specification", lines[1]);
assertEquals("line 1:96: expecting Semicolon, found 'null'", lines[2]);
String[] var_list = { "h", "d" };//$NON-NLS-1$//$NON-NLS-2$
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 2, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(3, lines.length);
assertEquals("line 1:98: expecting TOK_RELID, found 'R'", lines[0]);
assertEquals(":1:98-98: Invalid association specification", lines[1]);
assertEquals("line 1:100: expecting Semicolon, found 'null'", lines[2]);
OalParserTest_Generics.badRelationshipValidate(var_list, 2, 2, 0);
}
@Test
public void testInvalidRelSpecInRelateUsing() throws RecognitionException, TokenStreamException {
String act = "select any h from instances of D_H; select any d from instances of D_D; create object instance dq of D_DQ;\nrelate d to h across R using dq;"; //$NON-NLS-1$
String x = OalParserTest_Generics.parseAction(act, OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM);
String lines[] = x.split("\n");//$NON-NLS-1$
assertEquals(3, lines.length);
assertEquals("line 2:22: expecting TOK_RELID, found 'R'", lines[0]);
assertEquals(":2:30-31: Invalid association specification", lines[1]);
assertEquals("line 2:33: expecting Semicolon, found 'null'", lines[2]);
String[] var_list = { "h", "d", "dq" };//$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 2, 0);
x = OalParserTest_Generics.parseAction(act.replaceFirst("relate", "unrelate").replaceFirst(" to ", " from "), OalParserTest_Generics.ACTIVITY_TYPE_FUNC, OalParserTest_Generics.TEST_VOID_NO_PARM); //$NON-NLS-1$//$NON-NLS-2$//$NON-NLS-3$//$NON-NLS-4$
lines = x.split("\n");//$NON-NLS-1$
assertEquals(3, lines.length);
assertEquals("line 2:26: expecting TOK_RELID, found 'R'", lines[0]);
assertEquals(":2:34-35: Invalid association specification", lines[1]);
assertEquals("line 2:37: expecting Semicolon, found 'null'", lines[2]);
OalParserTest_Generics.badRelationshipValidate(var_list, 3, 2, 0);
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2015 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.continuation;
import java.util.ArrayList;
import java.util.List;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
import javax.servlet.ServletResponseWrapper;
import org.mortbay.log.Log;
import org.mortbay.log.Logger;
/* ------------------------------------------------------------ */
/**
* This implementation of Continuation is used by {@link ContinuationSupport}
* when it detects that the application is deployed in a jetty-6 server.
* This continuation requires the {@link ContinuationFilter} to be deployed.
*/
public class Jetty6Continuation implements ContinuationFilter.FilteredContinuation
{
private static final Logger LOG = Log.getLogger(Jetty6Continuation.class.getName());
// Exception reused for all continuations
// Turn on debug in ContinuationFilter to see real stack trace.
private final static ContinuationThrowable __exception = new ContinuationThrowable();
private final ServletRequest _request;
private ServletResponse _response;
private final org.mortbay.util.ajax.Continuation _j6Continuation;
private Throwable _retry;
private int _timeout;
private boolean _initial=true;
private volatile boolean _completed=false;
private volatile boolean _resumed=false;
private volatile boolean _expired=false;
private boolean _responseWrapped=false;
private List<ContinuationListener> _listeners;
public Jetty6Continuation(ServletRequest request, org.mortbay.util.ajax.Continuation continuation)
{
if (!ContinuationFilter._initialized)
{
LOG.warn("!ContinuationFilter installed",null,null);
throw new IllegalStateException("!ContinuationFilter installed");
}
_request=request;
_j6Continuation=continuation;
}
public void addContinuationListener(final ContinuationListener listener)
{
if (_listeners==null)
_listeners=new ArrayList<ContinuationListener>();
_listeners.add(listener);
}
public void complete()
{
synchronized(this)
{
if (_resumed)
throw new IllegalStateException();
_completed=true;
if (_j6Continuation.isPending())
_j6Continuation.resume();
}
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#getAttribute(java.lang.String)
*/
public Object getAttribute(String name)
{
return _request.getAttribute(name);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#removeAttribute(java.lang.String)
*/
public void removeAttribute(String name)
{
_request.removeAttribute(name);
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#setAttribute(java.lang.String, java.lang.Object)
*/
public void setAttribute(String name, Object attribute)
{
_request.setAttribute(name,attribute);
}
/* ------------------------------------------------------------ */
public ServletResponse getServletResponse()
{
return _response;
}
/* ------------------------------------------------------------ */
public boolean isExpired()
{
return _expired;
}
/* ------------------------------------------------------------ */
public boolean isInitial()
{
return _initial;
}
/* ------------------------------------------------------------ */
public boolean isResumed()
{
return _resumed;
}
/* ------------------------------------------------------------ */
public boolean isSuspended()
{
return _retry!=null;
}
/* ------------------------------------------------------------ */
public void resume()
{
synchronized(this)
{
if (_completed)
throw new IllegalStateException();
_resumed=true;
if (_j6Continuation.isPending())
_j6Continuation.resume();
}
}
/* ------------------------------------------------------------ */
public void setTimeout(long timeoutMs)
{
_timeout=(timeoutMs>Integer.MAX_VALUE)?Integer.MAX_VALUE:(int)timeoutMs;
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#suspend(javax.servlet.ServletResponse)
*/
public void suspend(ServletResponse response)
{
try
{
_response=response;
_responseWrapped=_response instanceof ServletResponseWrapper;
_resumed=false;
_expired=false;
_completed=false;
_j6Continuation.suspend(_timeout);
}
catch(Throwable retry)
{
_retry=retry;
}
}
/* ------------------------------------------------------------ */
public void suspend()
{
try
{
_response=null;
_responseWrapped=false;
_resumed=false;
_expired=false;
_completed=false;
_j6Continuation.suspend(_timeout);
}
catch(Throwable retry)
{
_retry=retry;
}
}
/* ------------------------------------------------------------ */
public boolean isResponseWrapped()
{
return _responseWrapped;
}
/* ------------------------------------------------------------ */
/**
* @see org.eclipse.jetty.continuation.Continuation#undispatch()
*/
public void undispatch()
{
if (isSuspended())
{
if (ContinuationFilter.__debug)
throw new ContinuationThrowable();
throw __exception;
}
throw new IllegalStateException("!suspended");
}
/* ------------------------------------------------------------ */
public boolean enter(ServletResponse response)
{
_response=response;
_expired=!_j6Continuation.isResumed();
if (_initial)
return true;
_j6Continuation.reset();
if (_expired)
{
if (_listeners!=null)
{
for (ContinuationListener l: _listeners)
l.onTimeout(this);
}
}
return !_completed;
}
/* ------------------------------------------------------------ */
public boolean exit()
{
_initial=false;
Throwable th=_retry;
_retry=null;
if (th instanceof Error)
throw (Error)th;
if (th instanceof RuntimeException)
throw (RuntimeException)th;
if (_listeners!=null)
{
for (ContinuationListener l: _listeners)
l.onComplete(this);
}
return true;
}
}
| |
package com.planet_ink.coffee_mud.WebMacros;
import com.planet_ink.coffee_web.interfaces.*;
import com.planet_ink.coffee_web.util.CWThread;
import com.planet_ink.coffee_web.util.CWConfig;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.CMSecurity.DbgFlag;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2004-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class ControlPanel extends StdWebMacro
{
@Override public String name() { return "ControlPanel"; }
@Override public boolean isAdminMacro() {return true;}
@Override
public String runMacro(HTTPRequest httpReq, String parm, HTTPResponse httpResp)
{
final java.util.Map<String,String> parms=parseParms(parm);
final String lastDisable=httpReq.getUrlParameter("DISABLEFLAG");
if(parms.containsKey("DISABLERESET"))
{
if(lastDisable!=null)
httpReq.removeUrlParameter("DISABLEFLAG");
return "";
}
if(parms.containsKey("DISABLENEXT"))
{
String lastID="";
for(final CMSecurity.DisFlag flag : CMSecurity.DisFlag.values())
{
if((lastDisable==null)||((lastDisable.length()>0)&&(lastDisable.equals(lastID))&&(!flag.toString().equals(lastID))))
{
httpReq.addFakeUrlParameter("DISABLEFLAG",flag.toString());
return "";
}
lastID=flag.toString();
}
httpReq.addFakeUrlParameter("DISABLEFLAG","");
if(parms.containsKey("EMPTYOK"))
return "<!--EMPTY-->";
return " @break@";
}
if(parms.containsKey("DISABLEID"))
{
if(lastDisable==null)
return " @break@";
return lastDisable;
}
if(parms.containsKey("DISABLEDESC"))
{
if(lastDisable==null)
return " @break@";
final CMSecurity.DisFlag flag = (CMSecurity.DisFlag)CMath.s_valueOf(CMSecurity.DisFlag.values(), lastDisable);
if(flag==null)
return " @break@";
return flag.description();
}
final String lastDebug=httpReq.getUrlParameter("DEBUGFLAG");
if(parms.containsKey("ISDEBUGGING"))
{
return Log.debugChannelOn()?"true":"false";
}
if(parms.containsKey("DEBUGRESET"))
{
if(lastDebug!=null)
httpReq.removeUrlParameter("DEBUGFLAG");
return "";
}
if(parms.containsKey("DEBUGNEXT"))
{
String lastID="";
for(final CMSecurity.DbgFlag flag : CMSecurity.DbgFlag.values())
{
if((lastDebug==null)||((lastDebug.length()>0)&&(lastDebug.equals(lastID))&&(!flag.toString().equals(lastID))))
{
httpReq.addFakeUrlParameter("DEBUGFLAG",flag.toString());
return "";
}
lastID=flag.toString();
}
httpReq.addFakeUrlParameter("DEBUGFLAG","");
if(parms.containsKey("EMPTYOK"))
return "<!--EMPTY-->";
return " @break@";
}
if(parms.containsKey("DEBUGID"))
{
if(lastDebug==null)
return " @break@";
return lastDebug;
}
if(parms.containsKey("DEBUGDESC"))
{
if(lastDebug==null)
return " @break@";
final CMSecurity.DbgFlag flag = (CMSecurity.DbgFlag)CMath.s_valueOf(CMSecurity.DbgFlag.values(), lastDebug);
if(flag==null)
return " @break@";
return flag.description();
}
final String query=parms.get("QUERY");
if((query==null)||(query.length()==0))
return "";
if(query.equalsIgnoreCase("DISABLE"))
{
final String field=parms.get("FIELD");
if((field==null)||(field.length()==0))
return "";
if(field.equalsIgnoreCase("MISC"))
{
StringBuilder str=new StringBuilder("");
final MultiEnumeration<String> enums = new MultiEnumeration<String>(CMSecurity.getDisabledAbilitiesEnum(true));
enums.addEnumeration(CMSecurity.getDisabledCommandsEnum(true));
enums.addEnumeration(CMSecurity.getDisabledExpertisesEnum(true));
enums.addEnumeration(CMSecurity.getDisabledFactionsEnum(true));
str.append(CMParms.toListString(enums));
return str.toString();
}
else
{
final CMSecurity.DisFlag flag = (CMSecurity.DisFlag)CMath.s_valueOf(CMSecurity.DisFlag.values(), field.toUpperCase().trim());
if((flag!=null)&&(CMSecurity.isDisabled(flag)))
return " CHECKED ";
}
return "";
}
else
if(query.equalsIgnoreCase("DEBUG"))
{
final String field=parms.get("FIELD");
if((field==null)||(field.length()==0))
return "";
final CMSecurity.DbgFlag flag = (CMSecurity.DbgFlag)CMath.s_valueOf(CMSecurity.DbgFlag.values(), field.toUpperCase().trim());
if((flag!=null)&&(CMSecurity.isDebugging(flag)))
return " CHECKED ";
return "";
}
else
if(query.equalsIgnoreCase("CHANGEDISABLE"))
{
final String field=parms.get("FIELD");
if((field==null)||(field.length()==0))
return "";
final String value=parms.get("VALUE");
if(field.equalsIgnoreCase("MISC") && (value != null))
{
for(Enumeration<String> s=CMSecurity.getDisabledSpecialsEnum(true);s.hasMoreElements();)
CMSecurity.removeAnyDisableVar(s.nextElement());
for(String s : CMParms.parseCommas(value,true))
CMSecurity.setAnyDisableVar(s);
}
else
{
if((value!=null)&&(value.equalsIgnoreCase("on"))) // on means remove?!
CMSecurity.removeAnyDisableVar(field);
else
CMSecurity.setAnyDisableVar(field);
}
return "";
}
else
if(query.equalsIgnoreCase("CHANGEDEBUG"))
{
final String field=parms.get("FIELD");
if((field==null)||(field.length()==0))
return "";
final String value=parms.get("VALUE");
DbgFlag flag = null;
if((value!=null)&&(value.equalsIgnoreCase("on")))
{
if(CMSecurity.removeDebugVar(field))
{
flag=(DbgFlag)CMath.s_valueOf(DbgFlag.class, field.toUpperCase().trim());
}
}
else
{
if(CMSecurity.setDebugVar(field))
{
flag=(DbgFlag)CMath.s_valueOf(DbgFlag.class, field.toUpperCase().trim());
}
}
if((Thread.currentThread() instanceof CWThread)
&&((flag==DbgFlag.HTTPACCESS)||(flag==DbgFlag.HTTPREQ)))
{
final CWConfig config=((CWThread)Thread.currentThread()).getConfig();
if(CMSecurity.isDebugging(DbgFlag.HTTPREQ))
config.setDebugFlag(CMProps.instance().getStr("DBGMSGS"));
if(CMSecurity.isDebugging(DbgFlag.HTTPACCESS))
config.setAccessLogFlag(CMProps.instance().getStr("ACCMSGS"));
}
return "";
}
else
if(query.equalsIgnoreCase("QUERY"))
{
final String field=parms.get("FIELD");
if((field==null)||(field.length()==0))
return "";
if(field.equalsIgnoreCase("DATABASE"))
return "Database Status: "+CMLib.database().errorStatus();
return "";
}
else
if(query.equalsIgnoreCase("RESET"))
{
final String field=parms.get("FIELD");
if((field==null)||(field.length()==0))
return "";
if(field.equalsIgnoreCase("DATABASE"))
{
CMLib.database().resetConnections();
return "Database successfully reset";
}
else
if(field.equalsIgnoreCase("SAVETHREAD"))
{
}
return "";
}
return "";
}
}
| |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.observable;
import static org.junit.Assert.*;
import static org.junit.Assert.assertNotEquals;
import static org.mockito.Mockito.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.*;
import org.mockito.InOrder;
import io.reactivex.*;
import io.reactivex.Observable;
import io.reactivex.Observer;
import io.reactivex.exceptions.TestException;
import io.reactivex.functions.*;
import io.reactivex.internal.functions.Functions;
import io.reactivex.observers.*;
import io.reactivex.schedulers.*;
import io.reactivex.subjects.PublishSubject;
public class ObservableDelayTest {
private Observer<Long> observer;
private Observer<Long> observer2;
private TestScheduler scheduler;
@Before
public void before() {
observer = TestHelper.mockObserver();
observer2 = TestHelper.mockObserver();
scheduler = new TestScheduler();
}
@Test
public void testDelay() {
Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).take(3);
Observable<Long> delayed = source.delay(500L, TimeUnit.MILLISECONDS, scheduler);
delayed.subscribe(observer);
InOrder inOrder = inOrder(observer);
scheduler.advanceTimeTo(1499L, TimeUnit.MILLISECONDS);
verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(1500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(0L);
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(2400L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(2500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(1L);
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(3400L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(3500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(2L);
verify(observer, times(1)).onComplete();
verify(observer, never()).onError(any(Throwable.class));
}
@Test
public void testLongDelay() {
Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).take(3);
Observable<Long> delayed = source.delay(5L, TimeUnit.SECONDS, scheduler);
delayed.subscribe(observer);
InOrder inOrder = inOrder(observer);
scheduler.advanceTimeTo(5999L, TimeUnit.MILLISECONDS);
verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(6000L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(0L);
scheduler.advanceTimeTo(6999L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, never()).onNext(anyLong());
scheduler.advanceTimeTo(7000L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(1L);
scheduler.advanceTimeTo(7999L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, never()).onNext(anyLong());
scheduler.advanceTimeTo(8000L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(2L);
inOrder.verify(observer, times(1)).onComplete();
inOrder.verify(observer, never()).onNext(anyLong());
inOrder.verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
}
@Test
public void testDelayWithError() {
Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler)
.map(new Function<Long, Long>() {
@Override
public Long apply(Long value) {
if (value == 1L) {
throw new RuntimeException("error!");
}
return value;
}
});
Observable<Long> delayed = source.delay(1L, TimeUnit.SECONDS, scheduler);
delayed.subscribe(observer);
InOrder inOrder = inOrder(observer);
scheduler.advanceTimeTo(1999L, TimeUnit.MILLISECONDS);
verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(2000L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onError(any(Throwable.class));
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
scheduler.advanceTimeTo(5000L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, never()).onNext(anyLong());
inOrder.verify(observer, never()).onError(any(Throwable.class));
verify(observer, never()).onComplete();
}
@Test
public void testDelayWithMultipleSubscriptions() {
Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).take(3);
Observable<Long> delayed = source.delay(500L, TimeUnit.MILLISECONDS, scheduler);
delayed.subscribe(observer);
delayed.subscribe(observer2);
InOrder inOrder = inOrder(observer);
InOrder inOrder2 = inOrder(observer2);
scheduler.advanceTimeTo(1499L, TimeUnit.MILLISECONDS);
verify(observer, never()).onNext(anyLong());
verify(observer2, never()).onNext(anyLong());
scheduler.advanceTimeTo(1500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(0L);
inOrder2.verify(observer2, times(1)).onNext(0L);
scheduler.advanceTimeTo(2499L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, never()).onNext(anyLong());
inOrder2.verify(observer2, never()).onNext(anyLong());
scheduler.advanceTimeTo(2500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(1L);
inOrder2.verify(observer2, times(1)).onNext(1L);
verify(observer, never()).onComplete();
verify(observer2, never()).onComplete();
scheduler.advanceTimeTo(3500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(2L);
inOrder2.verify(observer2, times(1)).onNext(2L);
inOrder.verify(observer, never()).onNext(anyLong());
inOrder2.verify(observer2, never()).onNext(anyLong());
inOrder.verify(observer, times(1)).onComplete();
inOrder2.verify(observer2, times(1)).onComplete();
verify(observer, never()).onError(any(Throwable.class));
verify(observer2, never()).onError(any(Throwable.class));
}
@Test
public void testDelaySubscription() {
Observable<Integer> result = Observable.just(1, 2, 3).delaySubscription(100, TimeUnit.MILLISECONDS, scheduler);
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
result.subscribe(o);
inOrder.verify(o, never()).onNext(any());
inOrder.verify(o, never()).onComplete();
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
inOrder.verify(o, times(1)).onNext(1);
inOrder.verify(o, times(1)).onNext(2);
inOrder.verify(o, times(1)).onNext(3);
inOrder.verify(o, times(1)).onComplete();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void testDelaySubscriptionDisposeBeforeTime() {
Observable<Integer> result = Observable.just(1, 2, 3).delaySubscription(100, TimeUnit.MILLISECONDS, scheduler);
Observer<Object> o = TestHelper.mockObserver();
TestObserver<Object> ts = new TestObserver<Object>(o);
result.subscribe(ts);
ts.dispose();
scheduler.advanceTimeBy(100, TimeUnit.MILLISECONDS);
verify(o, never()).onNext(any());
verify(o, never()).onComplete();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void testDelayWithObservableNormal1() {
PublishSubject<Integer> source = PublishSubject.create();
final List<PublishSubject<Integer>> delays = new ArrayList<PublishSubject<Integer>>();
final int n = 10;
for (int i = 0; i < n; i++) {
PublishSubject<Integer> delay = PublishSubject.create();
delays.add(delay);
}
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return delays.get(t1);
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(delayFunc).subscribe(o);
for (int i = 0; i < n; i++) {
source.onNext(i);
delays.get(i).onNext(i);
inOrder.verify(o).onNext(i);
}
source.onComplete();
inOrder.verify(o).onComplete();
inOrder.verifyNoMoreInteractions();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void testDelayWithObservableSingleSend1() {
PublishSubject<Integer> source = PublishSubject.create();
final PublishSubject<Integer> delay = PublishSubject.create();
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return delay;
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(delayFunc).subscribe(o);
source.onNext(1);
delay.onNext(1);
delay.onNext(2);
inOrder.verify(o).onNext(1);
inOrder.verifyNoMoreInteractions();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void testDelayWithObservableSourceThrows() {
PublishSubject<Integer> source = PublishSubject.create();
final PublishSubject<Integer> delay = PublishSubject.create();
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return delay;
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(delayFunc).subscribe(o);
source.onNext(1);
source.onError(new TestException());
delay.onNext(1);
inOrder.verify(o).onError(any(TestException.class));
inOrder.verifyNoMoreInteractions();
verify(o, never()).onNext(any());
verify(o, never()).onComplete();
}
@Test
public void testDelayWithObservableDelayFunctionThrows() {
PublishSubject<Integer> source = PublishSubject.create();
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
throw new TestException();
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(delayFunc).subscribe(o);
source.onNext(1);
inOrder.verify(o).onError(any(TestException.class));
inOrder.verifyNoMoreInteractions();
verify(o, never()).onNext(any());
verify(o, never()).onComplete();
}
@Test
public void testDelayWithObservableDelayThrows() {
PublishSubject<Integer> source = PublishSubject.create();
final PublishSubject<Integer> delay = PublishSubject.create();
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return delay;
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(delayFunc).subscribe(o);
source.onNext(1);
delay.onError(new TestException());
inOrder.verify(o).onError(any(TestException.class));
inOrder.verifyNoMoreInteractions();
verify(o, never()).onNext(any());
verify(o, never()).onComplete();
}
@Test
public void testDelayWithObservableSubscriptionNormal() {
PublishSubject<Integer> source = PublishSubject.create();
final PublishSubject<Integer> delay = PublishSubject.create();
Callable<Observable<Integer>> subFunc = new Callable<Observable<Integer>>() {
@Override
public Observable<Integer> call() {
return delay;
}
};
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return delay;
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(Observable.defer(subFunc), delayFunc).subscribe(o);
source.onNext(1);
delay.onNext(1);
source.onNext(2);
delay.onNext(2);
inOrder.verify(o).onNext(2);
inOrder.verifyNoMoreInteractions();
verify(o, never()).onError(any(Throwable.class));
verify(o, never()).onComplete();
}
@Test
public void testDelayWithObservableSubscriptionFunctionThrows() {
PublishSubject<Integer> source = PublishSubject.create();
final PublishSubject<Integer> delay = PublishSubject.create();
Callable<Observable<Integer>> subFunc = new Callable<Observable<Integer>>() {
@Override
public Observable<Integer> call() {
throw new TestException();
}
};
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return delay;
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(Observable.defer(subFunc), delayFunc).subscribe(o);
source.onNext(1);
delay.onNext(1);
source.onNext(2);
inOrder.verify(o).onError(any(TestException.class));
inOrder.verifyNoMoreInteractions();
verify(o, never()).onNext(any());
verify(o, never()).onComplete();
}
@Test
public void testDelayWithObservableSubscriptionThrows() {
PublishSubject<Integer> source = PublishSubject.create();
final PublishSubject<Integer> delay = PublishSubject.create();
Callable<Observable<Integer>> subFunc = new Callable<Observable<Integer>>() {
@Override
public Observable<Integer> call() {
return delay;
}
};
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return delay;
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(Observable.defer(subFunc), delayFunc).subscribe(o);
source.onNext(1);
delay.onError(new TestException());
source.onNext(2);
inOrder.verify(o).onError(any(TestException.class));
inOrder.verifyNoMoreInteractions();
verify(o, never()).onNext(any());
verify(o, never()).onComplete();
}
@Test
public void testDelayWithObservableEmptyDelayer() {
PublishSubject<Integer> source = PublishSubject.create();
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return Observable.empty();
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(delayFunc).subscribe(o);
source.onNext(1);
source.onComplete();
inOrder.verify(o).onNext(1);
inOrder.verify(o).onComplete();
inOrder.verifyNoMoreInteractions();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void testDelayWithObservableSubscriptionRunCompletion() {
PublishSubject<Integer> source = PublishSubject.create();
final PublishSubject<Integer> sdelay = PublishSubject.create();
final PublishSubject<Integer> delay = PublishSubject.create();
Callable<Observable<Integer>> subFunc = new Callable<Observable<Integer>>() {
@Override
public Observable<Integer> call() {
return sdelay;
}
};
Function<Integer, Observable<Integer>> delayFunc = new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return delay;
}
};
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
source.delay(Observable.defer(subFunc), delayFunc).subscribe(o);
source.onNext(1);
sdelay.onComplete();
source.onNext(2);
delay.onNext(2);
inOrder.verify(o).onNext(2);
inOrder.verifyNoMoreInteractions();
verify(o, never()).onError(any(Throwable.class));
verify(o, never()).onComplete();
}
@Test
public void testDelayWithObservableAsTimed() {
Observable<Long> source = Observable.interval(1L, TimeUnit.SECONDS, scheduler).take(3);
final Observable<Long> delayer = Observable.timer(500L, TimeUnit.MILLISECONDS, scheduler);
Function<Long, Observable<Long>> delayFunc = new Function<Long, Observable<Long>>() {
@Override
public Observable<Long> apply(Long t1) {
return delayer;
}
};
Observable<Long> delayed = source.delay(delayFunc);
delayed.subscribe(observer);
InOrder inOrder = inOrder(observer);
scheduler.advanceTimeTo(1499L, TimeUnit.MILLISECONDS);
verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(1500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(0L);
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(2400L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(2500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(1L);
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(3400L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, never()).onNext(anyLong());
verify(observer, never()).onComplete();
verify(observer, never()).onError(any(Throwable.class));
scheduler.advanceTimeTo(3500L, TimeUnit.MILLISECONDS);
inOrder.verify(observer, times(1)).onNext(2L);
verify(observer, times(1)).onComplete();
verify(observer, never()).onError(any(Throwable.class));
}
@Test
public void testDelayWithObservableReorder() {
int n = 3;
PublishSubject<Integer> source = PublishSubject.create();
final List<PublishSubject<Integer>> subjects = new ArrayList<PublishSubject<Integer>>();
for (int i = 0; i < n; i++) {
subjects.add(PublishSubject.<Integer> create());
}
Observable<Integer> result = source.delay(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer t1) {
return subjects.get(t1);
}
});
Observer<Object> o = TestHelper.mockObserver();
InOrder inOrder = inOrder(o);
result.subscribe(o);
for (int i = 0; i < n; i++) {
source.onNext(i);
}
source.onComplete();
inOrder.verify(o, never()).onNext(anyInt());
inOrder.verify(o, never()).onComplete();
for (int i = n - 1; i >= 0; i--) {
subjects.get(i).onComplete();
inOrder.verify(o).onNext(i);
}
inOrder.verify(o).onComplete();
verify(o, never()).onError(any(Throwable.class));
}
@Test
public void testDelayEmitsEverything() {
Observable<Integer> source = Observable.range(1, 5);
Observable<Integer> delayed = source.delay(500L, TimeUnit.MILLISECONDS, scheduler);
delayed = delayed.doOnEach(new Consumer<Notification<Integer>>() {
@Override
public void accept(Notification<Integer> t1) {
System.out.println(t1);
}
});
TestObserver<Integer> observer = new TestObserver<Integer>();
delayed.subscribe(observer);
// all will be delivered after 500ms since range does not delay between them
scheduler.advanceTimeBy(500L, TimeUnit.MILLISECONDS);
observer.assertValues(1, 2, 3, 4, 5);
}
@Test
public void testBackpressureWithTimedDelay() {
TestObserver<Integer> ts = new TestObserver<Integer>();
Observable.range(1, Flowable.bufferSize() * 2)
.delay(100, TimeUnit.MILLISECONDS)
.observeOn(Schedulers.computation())
.map(new Function<Integer, Integer>() {
int c;
@Override
public Integer apply(Integer t) {
if (c++ <= 0) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
return t;
}
}).subscribe(ts);
ts.awaitTerminalEvent();
ts.assertNoErrors();
assertEquals(Flowable.bufferSize() * 2, ts.valueCount());
}
@Test
public void testBackpressureWithSubscriptionTimedDelay() {
TestObserver<Integer> ts = new TestObserver<Integer>();
Observable.range(1, Flowable.bufferSize() * 2)
.delaySubscription(100, TimeUnit.MILLISECONDS)
.delay(100, TimeUnit.MILLISECONDS)
.observeOn(Schedulers.computation())
.map(new Function<Integer, Integer>() {
int c;
@Override
public Integer apply(Integer t) {
if (c++ <= 0) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
return t;
}
}).subscribe(ts);
ts.awaitTerminalEvent();
ts.assertNoErrors();
assertEquals(Flowable.bufferSize() * 2, ts.valueCount());
}
@Test
public void testBackpressureWithSelectorDelay() {
TestObserver<Integer> ts = new TestObserver<Integer>();
Observable.range(1, Flowable.bufferSize() * 2)
.delay(new Function<Integer, Observable<Long>>() {
@Override
public Observable<Long> apply(Integer i) {
return Observable.timer(100, TimeUnit.MILLISECONDS);
}
})
.observeOn(Schedulers.computation())
.map(new Function<Integer, Integer>() {
int c;
@Override
public Integer apply(Integer t) {
if (c++ <= 0) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
return t;
}
}).subscribe(ts);
ts.awaitTerminalEvent();
ts.assertNoErrors();
assertEquals(Flowable.bufferSize() * 2, ts.valueCount());
}
@Test
public void testBackpressureWithSelectorDelayAndSubscriptionDelay() {
TestObserver<Integer> ts = new TestObserver<Integer>();
Observable.range(1, Flowable.bufferSize() * 2)
.delay(Observable.timer(500, TimeUnit.MILLISECONDS)
, new Function<Integer, Observable<Long>>() {
@Override
public Observable<Long> apply(Integer i) {
return Observable.timer(100, TimeUnit.MILLISECONDS);
}
})
.observeOn(Schedulers.computation())
.map(new Function<Integer, Integer>() {
int c;
@Override
public Integer apply(Integer t) {
if (c++ <= 0) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
return t;
}
}).subscribe(ts);
ts.awaitTerminalEvent();
ts.assertNoErrors();
assertEquals(Flowable.bufferSize() * 2, ts.valueCount());
}
@Test
public void testErrorRunsBeforeOnNext() {
TestScheduler test = new TestScheduler();
PublishSubject<Integer> ps = PublishSubject.create();
TestObserver<Integer> ts = new TestObserver<Integer>();
ps.delay(1, TimeUnit.SECONDS, test).subscribe(ts);
ps.onNext(1);
test.advanceTimeBy(500, TimeUnit.MILLISECONDS);
ps.onError(new TestException());
test.advanceTimeBy(1, TimeUnit.SECONDS);
ts.assertNoValues();
ts.assertError(TestException.class);
ts.assertNotComplete();
}
@Test
public void testDelaySupplierSimple() {
final PublishSubject<Integer> ps = PublishSubject.create();
Observable<Integer> source = Observable.range(1, 5);
TestObserver<Integer> ts = new TestObserver<Integer>();
source.delaySubscription(ps).subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotComplete();
ps.onNext(1);
ts.assertValues(1, 2, 3, 4, 5);
ts.assertComplete();
ts.assertNoErrors();
}
@Test
public void testDelaySupplierCompletes() {
final PublishSubject<Integer> ps = PublishSubject.create();
Observable<Integer> source = Observable.range(1, 5);
TestObserver<Integer> ts = new TestObserver<Integer>();
source.delaySubscription(ps).subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotComplete();
// FIXME should this complete the source instead of consuming it?
ps.onComplete();
ts.assertValues(1, 2, 3, 4, 5);
ts.assertComplete();
ts.assertNoErrors();
}
@Test
public void testDelaySupplierErrors() {
final PublishSubject<Integer> ps = PublishSubject.create();
Observable<Integer> source = Observable.range(1, 5);
TestObserver<Integer> ts = new TestObserver<Integer>();
source.delaySubscription(ps).subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotComplete();
ps.onError(new TestException());
ts.assertNoValues();
ts.assertNotComplete();
ts.assertError(TestException.class);
}
@Test
public void delayWithTimeDelayError() throws Exception {
Observable.just(1).concatWith(Observable.<Integer>error(new TestException()))
.delay(100, TimeUnit.MILLISECONDS, true)
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertFailure(TestException.class, 1);
}
@Test
public void testOnErrorCalledOnScheduler() throws Exception {
final CountDownLatch latch = new CountDownLatch(1);
final AtomicReference<Thread> thread = new AtomicReference<Thread>();
Observable.<String>error(new Exception())
.delay(0, TimeUnit.MILLISECONDS, Schedulers.newThread())
.doOnError(new Consumer<Throwable>() {
@Override
public void accept(Throwable throwable) throws Exception {
thread.set(Thread.currentThread());
latch.countDown();
}
})
.onErrorResumeNext(Observable.<String>empty())
.subscribe();
latch.await();
assertNotEquals(Thread.currentThread(), thread.get());
}
@Test
public void dispose() {
TestHelper.checkDisposed(PublishSubject.create().delay(1, TimeUnit.SECONDS));
TestHelper.checkDisposed(PublishSubject.create().delay(Functions.justFunction(Observable.never())));
}
@Test
public void doubleOnSubscribe() {
TestHelper.checkDoubleOnSubscribeObservable(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Object> o) throws Exception {
return o.delay(1, TimeUnit.SECONDS);
}
});
TestHelper.checkDoubleOnSubscribeObservable(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Object> o) throws Exception {
return o.delay(Functions.justFunction(Observable.never()));
}
});
}
@Test
public void onCompleteFinal() {
TestScheduler scheduler = new TestScheduler();
Observable.empty()
.delay(1, TimeUnit.MILLISECONDS, scheduler)
.subscribe(new DisposableObserver<Object>() {
@Override
public void onNext(Object value) {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onComplete() {
throw new TestException();
}
});
try {
scheduler.advanceTimeBy(1, TimeUnit.SECONDS);
fail("Should have thrown");
} catch (TestException ex) {
// expected
}
}
@Test
public void onErrorFinal() {
TestScheduler scheduler = new TestScheduler();
Observable.error(new TestException())
.delay(1, TimeUnit.MILLISECONDS, scheduler)
.subscribe(new DisposableObserver<Object>() {
@Override
public void onNext(Object value) {
}
@Override
public void onError(Throwable e) {
throw new TestException();
}
@Override
public void onComplete() {
}
});
try {
scheduler.advanceTimeBy(1, TimeUnit.SECONDS);
fail("Should have thrown");
} catch (TestException ex) {
// expected
}
}
}
| |
package net.graphical.model.causality.learning.gies.operation;
import net.graphical.model.causality.graph.model.AdjImpl.Dag;
import net.graphical.model.causality.graph.model.intervention.InterventionFamily;
import net.graphical.model.causality.scoreFunction.ScoreFunction;
import net.graphical.model.causality.graph.algorithm.transformer.EssentialGraphGenerator;
import net.graphical.model.causality.graph.model.AdjImpl.ChainGraph;
import net.graphical.model.causality.graph.model.Edge;
import net.graphical.model.causality.graph.model.EdgeType;
import net.graphical.model.causality.graph.model.Node;
import net.graphical.model.causality.graph.model.intervention.Intervention;
import org.junit.Test;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.assertEquals;
/**
* Created by sli on 11/30/15.
*/
public class DeletionTest {
//REF_HB
@Test
public void testDeletion_propose_nonEssential() throws Exception {
ScoreFunction scoreFunction = new ScoreFunction(null) {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return -5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
ChainGraph ed = getEssentialD();
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
Operation deletion = new Deletion(interventionFamily, ed, new Node(2), new Node(5), scoreFunction).propose();
assertEquals(deletion.cliqueArgMaxC.size(), 0);
}
@Test
public void testDeletion_commit_nonEssential() throws Exception {
ScoreFunction scoreFunction = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return -5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
ChainGraph ed = getEssentialD();
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
Operation deletion = new Deletion(interventionFamily, ed, new Node(2), new Node(5), scoreFunction).propose();
deletion.commit();
assertEquals(ed.noOfUndirectEdge(), 1);
assertEquals(ed.noOfDirectEdge(), 8);
}
//REF_HB
@Test
public void testDeletion_propose_Essential() throws Exception {
ScoreFunction scoreFunction = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return -5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
ChainGraph ed = getEssentialD();
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
Operation deletion = new Deletion(interventionFamily, ed, new Node(2), new Node(6), scoreFunction).propose();
assertEquals(deletion.cliqueArgMaxC.size(), 0);
}
@Test
public void testDeletion_commit_Essential() throws Exception {
ScoreFunction scoreFunction = new ScoreFunction() {
@Override
public double getLocalScore(Node node, List<Node> parentNodes) {
return -5 * parentNodes.size();
}
@Override
public double score(ChainGraph dag) throws Exception {
return 0;
}
};
ChainGraph ed = getEssentialD();
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily interventionFamily = new InterventionFamily(Arrays.asList(inv0, inv1));
Operation deletion = new Deletion(interventionFamily, ed, new Node(2), new Node(6), scoreFunction).propose();
deletion.commit();
assertEquals(ed.noOfUndirectEdge(), 4);
assertEquals(ed.noOfDirectEdge(), 5);
}
@Test
public void testDeletion_commit_Essential_check() throws Exception {
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7)
);
List<Edge> edges = Arrays.asList(
new Edge(1,2, EdgeType.DIRECTED_MINUS),new Edge(1,5, EdgeType.DIRECTED_MINUS)
,new Edge(2,3, EdgeType.DIRECTED_PLUS),new Edge(2,5, EdgeType.DIRECTED_PLUS)
,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS)
,new Edge(4,7, EdgeType.DIRECTED_PLUS)
,new Edge(5,6, EdgeType.DIRECTED_PLUS)
);
Dag dag = new Dag(nodes, edges);
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1));
EssentialGraphGenerator generator = new EssentialGraphGenerator(dag);
ChainGraph result = generator.toEssentialGraph(invf);
assertEquals(result.noOfUndirectEdge(), 4);
assertEquals(result.noOfDirectEdge(), 5);
}
//REF_HB
@Test
public void testEssentialGraphGenerator_fig5a() throws Exception {
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7)
);
List<Edge> edges = Arrays.asList(
new Edge(1,2, EdgeType.DIRECTED_MINUS),new Edge(1,5, EdgeType.DIRECTED_MINUS)
,new Edge(2,3, EdgeType.DIRECTED_PLUS),new Edge(2,5, EdgeType.DIRECTED_PLUS),new Edge(2,6, EdgeType.DIRECTED_PLUS)
,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS)
,new Edge(4,7, EdgeType.DIRECTED_PLUS)
,new Edge(5,6, EdgeType.DIRECTED_PLUS)
);
Dag dag = new Dag(nodes, edges);
Intervention inv0 = new Intervention(Arrays.asList());
Intervention inv1 = new Intervention( Arrays.asList(new Node(4)));
InterventionFamily invf = new InterventionFamily(Arrays.asList(inv0, inv1));
EssentialGraphGenerator generator = new EssentialGraphGenerator(dag);
ChainGraph result = generator.toEssentialGraph(invf);
assertEquals(result.noOfUndirectEdge(), 4);
}
private ChainGraph getEssentialD(){
List<Node> nodes = Arrays.asList(new Node(1), new Node(2), new Node(3), new Node(4), new Node(5), new Node(6), new Node(7)
);
List<Edge> edges = Arrays.asList(
new Edge(1,2, EdgeType.UNDIRECTED),new Edge(1,5, EdgeType.UNDIRECTED)
,new Edge(2,3, EdgeType.UNDIRECTED),new Edge(2,5, EdgeType.UNDIRECTED),new Edge(2,6, EdgeType.DIRECTED_PLUS)
,new Edge(3,4, EdgeType.DIRECTED_PLUS),new Edge(3,7, EdgeType.DIRECTED_PLUS),new Edge(3,6, EdgeType.DIRECTED_PLUS)
,new Edge(4,7, EdgeType.DIRECTED_PLUS)
,new Edge(5,6, EdgeType.DIRECTED_PLUS)
);
return new ChainGraph(nodes, edges);
}
}
| |
/*
*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.apimgt.core;
import org.apache.commons.collections.map.HashedMap;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.SystemUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.apimgt.core.dao.PolicyDAO;
import org.wso2.carbon.apimgt.core.dao.impl.DAOFactory;
import org.wso2.carbon.apimgt.core.exception.APIMgtDAOException;
import org.wso2.carbon.apimgt.core.models.API;
import org.wso2.carbon.apimgt.core.models.APIStatus;
import org.wso2.carbon.apimgt.core.models.Application;
import org.wso2.carbon.apimgt.core.models.BlockConditions;
import org.wso2.carbon.apimgt.core.models.BusinessInformation;
import org.wso2.carbon.apimgt.core.models.Comment;
import org.wso2.carbon.apimgt.core.models.CompositeAPI;
import org.wso2.carbon.apimgt.core.models.CorsConfiguration;
import org.wso2.carbon.apimgt.core.models.DocumentInfo;
import org.wso2.carbon.apimgt.core.models.Endpoint;
import org.wso2.carbon.apimgt.core.models.Function;
import org.wso2.carbon.apimgt.core.models.Label;
import org.wso2.carbon.apimgt.core.models.Rating;
import org.wso2.carbon.apimgt.core.models.UriTemplate;
import org.wso2.carbon.apimgt.core.models.WorkflowStatus;
import org.wso2.carbon.apimgt.core.models.policy.APIPolicy;
import org.wso2.carbon.apimgt.core.models.policy.ApplicationPolicy;
import org.wso2.carbon.apimgt.core.models.policy.BandwidthLimit;
import org.wso2.carbon.apimgt.core.models.policy.Condition;
import org.wso2.carbon.apimgt.core.models.policy.CustomPolicy;
import org.wso2.carbon.apimgt.core.models.policy.HeaderCondition;
import org.wso2.carbon.apimgt.core.models.policy.IPCondition;
import org.wso2.carbon.apimgt.core.models.policy.JWTClaimsCondition;
import org.wso2.carbon.apimgt.core.models.policy.Pipeline;
import org.wso2.carbon.apimgt.core.models.policy.Policy;
import org.wso2.carbon.apimgt.core.models.policy.PolicyConstants;
import org.wso2.carbon.apimgt.core.models.policy.QueryParameterCondition;
import org.wso2.carbon.apimgt.core.models.policy.QuotaPolicy;
import org.wso2.carbon.apimgt.core.models.policy.RequestCountLimit;
import org.wso2.carbon.apimgt.core.models.policy.SubscriptionPolicy;
import org.wso2.carbon.apimgt.core.util.APIFileUtils;
import org.wso2.carbon.apimgt.core.util.APIMgtConstants;
import org.wso2.carbon.apimgt.core.util.APIMgtConstants.WorkflowConstants;
import org.wso2.carbon.apimgt.core.util.APIUtils;
import org.wso2.carbon.apimgt.core.workflow.ApplicationCreationWorkflow;
import org.wso2.carbon.apimgt.core.workflow.Workflow;
import org.wso2.carbon.lcm.core.impl.LifecycleState;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import static org.wso2.carbon.apimgt.core.dao.impl.PolicyDAOImpl.SECONDS_TIMUNIT;
import static org.wso2.carbon.apimgt.core.dao.impl.PolicyDAOImpl.UNLIMITED_TIER;
import static org.wso2.carbon.apimgt.core.models.policy.PolicyConstants.REQUEST_COUNT_TYPE;
import static org.wso2.carbon.apimgt.core.util.APIMgtConstants.SANDBOX_ENDPOINT;
public class SampleTestObjectCreator {
private static final String HTTP = "http";
private static final String HTTPS = "https";
private static final String GOLD_TIER = "Gold";
private static final String SILVER_TIER = "Silver";
private static final String BRONZE_TIER = "Bronze";
private static final String ADMIN = "admin";
private static final String TAG_CLIMATE = "climate";
private static final String TAG_FOOD = "food";
private static final String TAG_BEVERAGE = "beverage";
private static final String API_VERSION = "1.0.0";
private static final String NAME_BUSINESS_OWNER_1 = "John Doe";
private static final String NAME_BUSINESS_OWNER_2 = "Jane Doe";
private static final String EMAIL_BUSINESS_OWNER_1 = "john.doe@annonymous.com";
private static final String EMAIL_BUSINESS_OWNER_2 = "jane.doe@annonymous.com";
private static final String EMPTY_STRING = "";
private static final String SAMPLE_API_POLICY = "SampleAPIPolicy";
private static final String SAMPLE_API_POLICY_DESCRIPTION = "SampleAPIPolicy Description";
private static final String SAMPLE_APP_POLICY = "SampleAppPolicy";
private static final String SAMPLE_APP_POLICY_DESCRIPTION = "SampleAppPolicy Description";
private static final String SAMPLE_SUBSCRIPTION_POLICY = "SampleSubscriptionPolicy";
private static final String SAMPLE_SUBSCRIPTION_POLICY_DESCRIPTION = "SampleSubscriptionPolicy Description";
private static final String UPDATED_SAMPLE_API_POLICY = "UpdatedSampleAPIPolicy";
private static final String UPDATED_SAMPLE_API_POLICY_DESCRIPTION = "Updated NewSampleAPIPolicy Description";
private static final String UPDATED_SAMPLE_APP_POLICY = "UpdatedSampleAppPolicy";
private static final String UPDATED_SAMPLE_APP_POLICY_DESCRIPTION = "Updated SampleAppPolicy Description";
private static final String UPDATED_SAMPLE_SUBSCRIPTION_POLICY = "Updated SampleSubscriptionPolicy";
private static final String UPDATED_SAMPLE_SUBSCRIPTION_POLICY_DESCRIPTION = "Updated SampleSubscriptionPolicy "
+ "Description";
private static final String SAMPLE_CUSTOM_ATTRIBUTE = "CUSTOM ATTRIBUTE SAMPLE";
private static final String PRODUCTION_ENDPOINT = "production";
private static final String FIFTY_PER_MIN_TIER = "50PerMin";
private static final String TWENTY_PER_MIN_TIER = "20PerMin";
private static final String TIME_UNIT_SECONDS = "s";
private static final String TIME_UNIT_MONTH = "Month";
private static final String CUSTOMER_ROLE = "customer";
private static final String EMPLOYEE_ROLE = "employee";
private static final String MANAGER_ROLE = "manager";
private static final String ALLOWED_HEADER_AUTHORIZATION = "Authorization";
private static final String ALLOWED_HEADER_CUSTOM = "X-Custom";
private static final String API_CREATOR = "Adam Doe";
private static final String SAMPLE_DOC_NAME = "CalculatorDoc";
private static final String TEST_APP_1 = "TestApp";
private static final String TEST_APP_2 = "TestApp2";
private static final String TEMPLATE_ID = "getApisApiIdGet";
private static final String ALT_SWAGGER_PATH = "api/alternativeSwagger.json";
private static final String SAMPLE_GTW_CONFIG_PATH = "api/sampleGatewayConfig.bal";
private static final String ALT_GTW_CONFIG_PATH = "api/alternativeGatewayConfig.bal";
private static final String PATH_THUMBNAIL_IMG_1 = "api/thumbnail1.jpg";
private static final String PATH_THUMBNAIL_IMG_2 = "api/thumbnail2.jpg";
private static final String PATH_INLINE_DOC_1 = "document/inline1.txt";
private static final String PATH_INLINE_DOC_2 = "document/inline2.txt";
private static final String PATH_FILE_DOC_1 = "document/pdf-sample.pdf";
private static final String PATH_WSDL11_File_1 = "wsdl/WeatherForecast.wsdl";
private static final String PATH_WSDL11_File_2 = "wsdl/stockQuote.wsdl";
private static final String PATH_WSDL20_File_1 = "wsdl/myServiceWsdl2.wsdl";
private static final String PATH_WSDL11_ZIP_1 = "wsdl/WSDL11Files_1.zip";
private static final String PATH_WSDL11_ZIP_2 = "wsdl/WSDL11Files_2.zip";
private static final String PATH_WSDL20_ZIP_1 = "wsdl/WSDL20Files.zip";
private static final String SAMPLE_IP_1 = "12.32.45.3";
private static final String SAMPLE_IP_2 = "24.34.1.45";
private static final String SAMPLE_CUSTOM_RULE = "Sample Custom Rule";
public static final String ADMIN_ROLE_ID = "cfbde56e-4352-498e-b6dc-85a6f1f8b058";
public static final String DEVELOPER_ROLE_ID = "cfdce56e-8434-498e-b6dc-85a6f2d8f035";
public static final String ACCESS_URL = "https://test.";
public static final String ORIGINAL_ENDPOINT_WEATHER = "http://www.webservicex.net/WeatherForecast.asmx";
public static final String ORIGINAL_ENDPOINT_STOCK_QUOTE = "http://www.webservicex.net/stockquote.asmx";
public static final String BUSINESS_PLAN_FREE = "FREE";
public static APIPolicy unlimitedApiPolicy = new APIPolicy(UUID.randomUUID().toString(), UNLIMITED_TIER);
public static APIPolicy goldApiPolicy = new APIPolicy(UUID.randomUUID().toString(), GOLD_TIER);
public static APIPolicy silverApiPolicy = new APIPolicy(UUID.randomUUID().toString(), SILVER_TIER);
public static APIPolicy bronzeApiPolicy = new APIPolicy(UUID.randomUUID().toString(), BRONZE_TIER);
public static SubscriptionPolicy unlimitedSubscriptionPolicy =
new SubscriptionPolicy(UUID.randomUUID().toString(), UNLIMITED_TIER);
public static SubscriptionPolicy goldSubscriptionPolicy =
new SubscriptionPolicy(UUID.randomUUID().toString(), GOLD_TIER);
public static SubscriptionPolicy silverSubscriptionPolicy =
new SubscriptionPolicy(UUID.randomUUID().toString(), SILVER_TIER);
public static SubscriptionPolicy bronzeSubscriptionPolicy =
new SubscriptionPolicy(UUID.randomUUID().toString(), BRONZE_TIER);
public static ApplicationPolicy fiftyPerMinApplicationPolicy =
new ApplicationPolicy(UUID.randomUUID().toString(), FIFTY_PER_MIN_TIER);
public static ApplicationPolicy twentyPerMinApplicationPolicy =
new ApplicationPolicy(UUID.randomUUID().toString(), TWENTY_PER_MIN_TIER);
public static String apiDefinition;
public static InputStream inputStream;
private static final Logger log = LoggerFactory.getLogger(SampleTestObjectCreator.class);
public static String endpointId = UUID.randomUUID().toString();
static {
try {
inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("swagger.json");
apiDefinition = IOUtils.toString(inputStream);
} catch (IOException e) {
log.error(e.getMessage());
}
}
public static API.APIBuilder createDefaultAPI() {
Set<String> transport = new HashSet<>();
transport.add(HTTP);
transport.add(HTTPS);
Set<String> tags = new HashSet<>();
tags.add(TAG_CLIMATE);
Set<Policy> policies = new HashSet<>();
policies.add(goldSubscriptionPolicy);
policies.add(silverSubscriptionPolicy);
policies.add(bronzeSubscriptionPolicy);
BusinessInformation businessInformation = new BusinessInformation();
CorsConfiguration corsConfiguration = new CorsConfiguration();
String permissionJson = "[{\"groupId\" : \"developer\", \"permission\" : "
+ "[\"READ\",\"UPDATE\"]},{\"groupId\" : \"admin\", \"permission\" : [\"READ\",\"UPDATE\"," +
"\"DELETE\", \"MANAGE_SUBSCRIPTION\"]}]";
API.APIBuilder apiBuilder = new API.APIBuilder(ADMIN, "WeatherAPI", API_VERSION).
id(UUID.randomUUID().toString()).
context("weather").
description("Get Weather Info").
lifeCycleStatus(APIStatus.CREATED.getStatus()).
lifecycleInstanceId(UUID.randomUUID().toString()).
endpoint(Collections.emptyMap()).
isResponseCachingEnabled(false).
cacheTimeout(60).
isDefaultVersion(false).
apiPolicy(unlimitedApiPolicy).
transport(transport).
tags(tags).
policies(policies).
visibility(API.Visibility.PUBLIC).
visibleRoles(new HashSet<>()).
businessInformation(businessInformation).
corsConfiguration(corsConfiguration).
createdTime(LocalDateTime.now()).
createdBy(ADMIN).
updatedBy(ADMIN).
lastUpdatedTime(LocalDateTime.now()).
apiPermission(permissionJson).
uriTemplates(getMockUriTemplates()).
apiDefinition(apiDefinition).
securityScheme(3);
Map map = new HashMap();
map.put(DEVELOPER_ROLE_ID, 6);
map.put(ADMIN_ROLE_ID, 15);
apiBuilder.permissionMap(map);
return apiBuilder;
}
public static API getSummaryFromAPI(API api) {
return new API.APIBuilder(api.getProvider(), api.getName(), api.getVersion()).
id(api.getId()).
context(api.getContext()).
description(api.getDescription()).
lifeCycleStatus(api.getLifeCycleStatus()).
lifecycleInstanceId(api.getLifecycleInstanceId()).
securityScheme(api.getSecurityScheme()).build();
}
public static API copyAPIIgnoringNonEditableFields(API fromAPI, API toAPI) {
API.APIBuilder builder = new API.APIBuilder(toAPI);
return builder.provider(fromAPI.getProvider()).
id(fromAPI.getId()).
name(fromAPI.getName()).
version(fromAPI.getVersion()).
context(fromAPI.getContext()).
createdTime(fromAPI.getCreatedTime()).
createdBy(fromAPI.getCreatedBy()).
lifecycleInstanceId(fromAPI.getLifecycleInstanceId()).
lifeCycleStatus(fromAPI.getLifeCycleStatus()).
copiedFromApiId(fromAPI.getCopiedFromApiId()).build();
}
public static LifecycleState getMockLifecycleStateObject(String lifecycleId) {
LifecycleState lifecycleState = new LifecycleState();
lifecycleState.setLcName(APIMgtConstants.API_LIFECYCLE);
lifecycleState.setLifecycleId(lifecycleId);
lifecycleState.setState(APIStatus.PUBLISHED.getStatus());
return lifecycleState;
}
public static API getMockApiSummaryObject() {
return new API.APIBuilder(ADMIN, "Sample", API_VERSION).build();
}
public static API.APIBuilder createAlternativeAPI() {
Set<String> transport = new HashSet<>();
transport.add(HTTP);
Set<String> tags = new HashSet<>();
tags.add(TAG_FOOD);
tags.add(TAG_BEVERAGE);
Set<Policy> policies = new HashSet<>();
policies.add(silverSubscriptionPolicy);
policies.add(bronzeSubscriptionPolicy);
BusinessInformation businessInformation = new BusinessInformation();
businessInformation.setBusinessOwner(NAME_BUSINESS_OWNER_1);
businessInformation.setBusinessOwnerEmail(EMAIL_BUSINESS_OWNER_1);
businessInformation.setTechnicalOwner(NAME_BUSINESS_OWNER_2);
businessInformation.setBusinessOwnerEmail(EMAIL_BUSINESS_OWNER_2);
CorsConfiguration corsConfiguration = new CorsConfiguration();
corsConfiguration.setEnabled(true);
corsConfiguration.setAllowMethods(
Arrays.asList(APIMgtConstants.FunctionsConstants.GET, APIMgtConstants.FunctionsConstants.POST,
APIMgtConstants.FunctionsConstants.DELETE));
corsConfiguration.setAllowHeaders(Arrays.asList(ALLOWED_HEADER_AUTHORIZATION, ALLOWED_HEADER_CUSTOM));
corsConfiguration.setAllowCredentials(true);
corsConfiguration.setAllowOrigins(Arrays.asList("*"));
String permissionJson = "[{\"groupId\" : \"developer\", \"permission\" : "
+ "[\"READ\",\"UPDATE\"]},{\"groupId\" : \"admin\", \"permission\" : [\"READ\",\"UPDATE\"," +
"\"DELETE\", \"MANAGE_SUBSCRIPTION\"]}]";
Map permissionMap = new HashMap();
permissionMap.put(DEVELOPER_ROLE_ID, 6);
permissionMap.put(ADMIN_ROLE_ID, 15);
API.APIBuilder apiBuilder = new API.APIBuilder(ADMIN, "restaurantAPI", "0.9").
id(UUID.randomUUID().toString()).
context("weather").
description("Get Food & Beverage Info").
lifeCycleStatus(APIStatus.CREATED.getStatus()).
endpoint(Collections.emptyMap()).
isResponseCachingEnabled(true).
cacheTimeout(120).
isDefaultVersion(true).
apiPolicy(goldApiPolicy).
transport(transport).
tags(tags).
policies(policies).
visibility(API.Visibility.RESTRICTED).
visibleRoles(new HashSet<>(Arrays.asList(CUSTOMER_ROLE, MANAGER_ROLE, EMPLOYEE_ROLE))).
businessInformation(businessInformation).
corsConfiguration(corsConfiguration).
apiPermission(permissionJson).
permissionMap(permissionMap).
createdTime(LocalDateTime.now()).
createdBy(API_CREATOR).
apiDefinition(apiDefinition).
lastUpdatedTime(LocalDateTime.now()).
securityScheme(3);
return apiBuilder;
}
public static API.APIBuilder createUniqueAPI() {
Set<String> transport = new HashSet<>();
transport.add(HTTP);
Set<String> tags = new HashSet<>();
tags.add(TAG_FOOD);
tags.add(TAG_BEVERAGE);
Set<Policy> policies = new HashSet<>();
policies.add(silverSubscriptionPolicy);
policies.add(bronzeSubscriptionPolicy);
BusinessInformation businessInformation = new BusinessInformation();
businessInformation.setBusinessOwner(NAME_BUSINESS_OWNER_1);
businessInformation.setBusinessOwnerEmail(EMAIL_BUSINESS_OWNER_1);
businessInformation.setTechnicalOwner(NAME_BUSINESS_OWNER_2);
businessInformation.setBusinessOwnerEmail(EMAIL_BUSINESS_OWNER_2);
CorsConfiguration corsConfiguration = new CorsConfiguration();
corsConfiguration.setEnabled(true);
corsConfiguration.setAllowMethods(
Arrays.asList(APIMgtConstants.FunctionsConstants.GET, APIMgtConstants.FunctionsConstants.POST,
APIMgtConstants.FunctionsConstants.DELETE));
corsConfiguration.setAllowHeaders(Arrays.asList(ALLOWED_HEADER_AUTHORIZATION, ALLOWED_HEADER_CUSTOM));
corsConfiguration.setAllowCredentials(true);
corsConfiguration.setAllowOrigins(Arrays.asList("*"));
String permissionJson = "[{\"groupId\" : \"developer\", \"permission\" : "
+ "[\"READ\",\"UPDATE\"]},{\"groupId\" : \"admin\", \"permission\" : [\"READ\",\"UPDATE\"," +
"\"DELETE\", \"MANAGE_SUBSCRIPTION\"]}]";
Map permissionMap = new HashMap();
permissionMap.put(DEVELOPER_ROLE_ID, 6);
permissionMap.put(ADMIN_ROLE_ID, 15);
API.APIBuilder apiBuilder = new API.APIBuilder(UUID.randomUUID().toString(), UUID.randomUUID().toString(),
API_VERSION).
id(UUID.randomUUID().toString()).
context(UUID.randomUUID().toString()).
description("Get Food & Beverage Info").
lifeCycleStatus(APIStatus.CREATED.getStatus()).
endpoint(Collections.emptyMap()).
isResponseCachingEnabled(true).
cacheTimeout(120).
isDefaultVersion(true).
apiPolicy(goldApiPolicy).
transport(transport).
tags(tags).
policies(policies).
visibility(API.Visibility.RESTRICTED).
visibleRoles(new HashSet<>(Arrays.asList(CUSTOMER_ROLE, MANAGER_ROLE, EMPLOYEE_ROLE))).
businessInformation(businessInformation).
corsConfiguration(corsConfiguration).
apiPermission(permissionJson).
permissionMap(permissionMap).
createdTime(LocalDateTime.now()).
createdBy(API_CREATOR).
uriTemplates(Collections.emptyMap()).
apiDefinition(apiDefinition).
lastUpdatedTime(LocalDateTime.now()).
securityScheme(3);
return apiBuilder;
}
public static CompositeAPI.Builder createUniqueCompositeAPI() {
Set<String> transport = new HashSet<>();
transport.add(HTTP);
HashMap permissionMap = new HashMap();
permissionMap.put(DEVELOPER_ROLE_ID, 6);
permissionMap.put(ADMIN_ROLE_ID, 15);
permissionMap.put(ADMIN_ROLE_ID, 7);
Application app = createDefaultApplication();
//generate random name for each time when generating unique composite API
app.setName(UUID.randomUUID().toString());
try {
DAOFactory.getApplicationDAO().addApplication(app);
} catch (APIMgtDAOException e) {
log.error("Error adding application", e);
}
CompositeAPI.Builder apiBuilder = new CompositeAPI.Builder().
id(UUID.randomUUID().toString()).
name(UUID.randomUUID().toString()).
provider(UUID.randomUUID().toString()).
version(API_VERSION).
context(UUID.randomUUID().toString()).
description("Get Food & Beverage Info").
transport(transport).
permissionMap(permissionMap).
applicationId(app.getId()).
createdTime(LocalDateTime.now()).
createdBy(API_CREATOR).
uriTemplates(Collections.emptyMap()).
apiDefinition(apiDefinition).
lastUpdatedTime(LocalDateTime.now());
return apiBuilder;
}
public static API.APIBuilder createCustomAPI(String name, String version, String context) {
API.APIBuilder apiBuilder = createDefaultAPI();
apiBuilder.name(name);
apiBuilder.version(version);
apiBuilder.context(context);
apiBuilder.endpoint(Collections.emptyMap());
apiBuilder.uriTemplates(Collections.EMPTY_MAP);
return apiBuilder;
}
public static API copyAPISummary(API api) {
return new API.APIBuilder(api.getProvider(), api.getName(), api.getVersion()).
id(api.getId()).
context(api.getContext()).
description(api.getDescription()).
lifeCycleStatus(api.getLifeCycleStatus()).
lifecycleInstanceId(api.getLifecycleInstanceId()).
securityScheme(api.getSecurityScheme()).build();
}
public static String createAlternativeSwaggerDefinition() throws IOException {
return IOUtils.toString(Thread.currentThread().getContextClassLoader().getResourceAsStream(ALT_SWAGGER_PATH));
}
public static String createSampleGatewayConfig() {
try {
return IOUtils.toString(
Thread.currentThread().getContextClassLoader().getResourceAsStream(SAMPLE_GTW_CONFIG_PATH));
} catch (IOException e) {
log.error("Error while reading " + SAMPLE_GTW_CONFIG_PATH, e);
return null;
}
}
public static String createAlternativeGatewayConfig() throws IOException {
return IOUtils
.toString(Thread.currentThread().getContextClassLoader().getResourceAsStream(ALT_GTW_CONFIG_PATH));
}
public static InputStream createDefaultThumbnailImage() {
return Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_THUMBNAIL_IMG_1);
}
public static InputStream createAlternativeThumbnailImage() {
return Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_THUMBNAIL_IMG_2);
}
public static DocumentInfo createDefaultDocumentationInfo() {
//created by admin
DocumentInfo.Builder builder = new DocumentInfo.Builder();
builder.id(UUID.randomUUID().toString());
builder.name(SAMPLE_DOC_NAME);
builder.type(DocumentInfo.DocType.HOWTO);
builder.summary("Summary of Calculator Documentation");
builder.sourceType(DocumentInfo.SourceType.INLINE);
builder.sourceURL(EMPTY_STRING);
builder.otherType(EMPTY_STRING);
builder.visibility(DocumentInfo.Visibility.API_LEVEL);
builder.createdTime(LocalDateTime.now());
builder.lastUpdatedTime(LocalDateTime.now());
return builder.build();
}
public static DocumentInfo createAlternativeDocumentationInfo(String uuid) {
//created by admin
DocumentInfo.Builder builder = new DocumentInfo.Builder();
builder.id(uuid);
builder.name(SAMPLE_DOC_NAME);
builder.type(DocumentInfo.DocType.HOWTO);
builder.summary("Summary of Calculator Documentation - alternative");
builder.sourceType(DocumentInfo.SourceType.INLINE);
builder.sourceURL(EMPTY_STRING);
builder.otherType(EMPTY_STRING);
builder.visibility(DocumentInfo.Visibility.API_LEVEL);
builder.createdTime(LocalDateTime.now());
builder.lastUpdatedTime(LocalDateTime.now());
return builder.build();
}
/**
* Creates a file type documentation info sample
*
* @return a file type documentation info sample
*/
public static DocumentInfo createFileDocumentationInfo() {
//created by admin
DocumentInfo.Builder builder = new DocumentInfo.Builder();
builder.id(UUID.randomUUID().toString());
builder.name(SAMPLE_DOC_NAME);
builder.type(DocumentInfo.DocType.HOWTO);
builder.summary("Summary of PDF Type Documentation");
builder.sourceType(DocumentInfo.SourceType.FILE);
builder.sourceURL(EMPTY_STRING);
builder.otherType(EMPTY_STRING);
builder.visibility(DocumentInfo.Visibility.API_LEVEL);
builder.createdTime(LocalDateTime.now());
builder.lastUpdatedTime(LocalDateTime.now());
return builder.build();
}
/**
* Retrieves a sample file inline content string
*
* @return file inline content string
* @throws IOException If unable to read doc file resource
*/
public static String createDefaultInlineDocumentationContent() throws IOException {
return IOUtils.toString(Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_INLINE_DOC_1));
}
/**
* Retrieves a sample file inline content string
*
* @return file inline content string
* @throws IOException If unable to read doc file resource
*/
public static String createAlternativeInlineDocumentationContent() throws IOException {
return IOUtils.toString(Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_INLINE_DOC_2));
}
/**
* Retrieves file content byte array
*
* @return file content byte array
* @throws IOException If unable to read doc file resource
*/
public static byte[] createDefaultFileDocumentationContent() throws IOException {
return IOUtils.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_FILE_DOC_1));
}
public static Application createDefaultApplication() {
//created by admin
Application application = new Application(TEST_APP_1, ADMIN);
application.setId(UUID.randomUUID().toString());
application.setDescription("This is a test application");
application.setStatus(APIMgtConstants.ApplicationStatus.APPLICATION_CREATED);
application.setPolicy(fiftyPerMinApplicationPolicy);
application.setCreatedTime(LocalDateTime.now());
application.setUpdatedUser(ADMIN);
application.setUpdatedTime(LocalDateTime.now());
return application;
}
public static Application createAlternativeApplication() {
//created by admin and updated by admin2
Application application = new Application(TEST_APP_2, ADMIN);
application.setId(UUID.randomUUID().toString());
application.setDescription("This is test application 2");
application.setStatus(APIMgtConstants.ApplicationStatus.APPLICATION_APPROVED);
application.setPolicy(twentyPerMinApplicationPolicy);
application.setUpdatedUser("admin2");
application.setUpdatedTime(LocalDateTime.now());
return application;
}
public static Application createCustomApplication(String applicationName, String owner) {
Application application = new Application(applicationName, owner);
application.setId(UUID.randomUUID().toString());
application.setDescription("This is a test application");
application.setStatus(APIMgtConstants.ApplicationStatus.APPLICATION_CREATED);
application.setPolicy(fiftyPerMinApplicationPolicy);
application.setCreatedTime(LocalDateTime.now());
application.setUpdatedUser(owner);
application.setUpdatedTime(LocalDateTime.now());
return application;
}
public static Application createApplicationWithPermissions() {
//created by admin
HashMap permissionMap = new HashMap();
permissionMap.put(APIMgtConstants.Permission.UPDATE, APIMgtConstants.Permission.UPDATE_PERMISSION);
Application application = new Application(TEST_APP_1, ADMIN);
application.setId(UUID.randomUUID().toString());
application.setDescription("This is a test application");
application.setStatus(APIMgtConstants.ApplicationStatus.APPLICATION_CREATED);
application.setPolicy(fiftyPerMinApplicationPolicy);
application.setPermissionMap(permissionMap);
application.setCreatedTime(LocalDateTime.now());
application.setUpdatedUser(ADMIN);
application.setUpdatedTime(LocalDateTime.now());
return application;
}
/**
* Creates a sample function
*
* @return a sample function
* @throws URISyntaxException if error occurred while initializing the URI
*/
public static Function createDefaultFunction() throws URISyntaxException {
return new Function("sampleFunction1", new URI("http://localhost/test1"));
}
/**
* Creates an alternative function
*
* @return an alternative function
* @throws URISyntaxException if error occurred while initializing the URI
*/
public static Function createAlternativeFunction() throws URISyntaxException {
return new Function("alternativeFunction1", new URI("http://localhost/test-alternative1"));
}
/**
* Creates an alternative function
*
* @return an alternative function
* @throws URISyntaxException if error occurred while initializing the URI
*/
public static Function createAlternativeFunction2() throws URISyntaxException {
return new Function("alternativeFunction2", new URI("http://localhost/test-alternative2"));
}
/**
* Returns default WSDL 1.1 file content stream
*
* @return default WSDL 1.1 file content stream
*/
public static InputStream createDefaultWSDL11ContentInputStream() throws IOException {
return Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_WSDL11_File_1);
}
/**
* Returns default WSDL 1.1 file
*
* @return default WSDL 1.1 file
*/
public static byte[] createDefaultWSDL11Content() throws IOException {
return IOUtils.toByteArray(createDefaultWSDL11ContentInputStream());
}
/**
* Returns alternative WSDL 1.1 file content stream
*
* @return alternative WSDL 1.1 file content stream
*/
public static InputStream createAlternativeWSDL11ContentInputStream() throws IOException {
return Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_WSDL11_File_2);
}
/**
* Returns default WSDL 1.1 file
*
* @return default WSDL 1.1 file
*/
public static byte[] createAlternativeWSDL11Content() throws IOException {
return IOUtils
.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_WSDL11_File_2));
}
/**
* Returns default WSDL 2.0 file
*
* @return default WSDL 2.0 file
*/
public static byte[] createDefaultWSDL20Content() throws IOException {
return IOUtils
.toByteArray(Thread.currentThread().getContextClassLoader().getResourceAsStream(PATH_WSDL20_File_1));
}
/**
* Returns default WSDL 1.0 archive's input stream
*
* @return default WSDL 1.0 archive's input stream
*/
public static InputStream createDefaultWSDL11ArchiveInputStream() throws IOException, APIMgtDAOException {
return Thread.currentThread().getContextClassLoader()
.getResourceAsStream(PATH_WSDL11_ZIP_1);
}
/**
* Returns alternative WSDL 1.0 archive's input stream
*
* @return alternative WSDL 1.0 archive's input stream
*/
public static InputStream createAlternativeWSDL11ArchiveInputStream() throws IOException, APIMgtDAOException {
return Thread.currentThread().getContextClassLoader()
.getResourceAsStream(PATH_WSDL11_ZIP_2);
}
/**
* Returns default WSDL 1.0 archive extracted path
*
* @return default WSDL 1.0 archive extracted path
*/
public static String createDefaultWSDL11Archive() throws IOException, APIMgtDAOException {
InputStream zipInputStream = createDefaultWSDL11ArchiveInputStream();
final String tempFolderPath =
SystemUtils.getJavaIoTmpDir() + File.separator + UUID.randomUUID().toString();
String archivePath = tempFolderPath + File.separator + "wsdl11.zip";
return APIFileUtils.extractUploadedArchive(zipInputStream, "extracted", archivePath, tempFolderPath);
}
/**
* Returns default WSDL 2.0 archive extracted path
*
* @return default WSDL 2.0 archive extracted path
*/
public static String createDefaultWSDL20Archive() throws APIMgtDAOException {
InputStream zipInputStream = Thread.currentThread().getContextClassLoader()
.getResourceAsStream(PATH_WSDL20_ZIP_1);
final String tempFolderPath =
SystemUtils.getJavaIoTmpDir() + File.separator + UUID.randomUUID().toString();
String archivePath = tempFolderPath + File.separator + "wsdl20.zip";
return APIFileUtils.extractUploadedArchive(zipInputStream, "extracted", archivePath, tempFolderPath);
}
/**
* create default api policy
*
* @return APIPolicy object is returned
*/
public static APIPolicy createDefaultAPIPolicy() {
APIPolicy apiPolicy = new APIPolicy(SAMPLE_API_POLICY);
apiPolicy.setUuid(UUID.randomUUID().toString());
apiPolicy.setDisplayName(SAMPLE_API_POLICY);
apiPolicy.setDescription(SAMPLE_API_POLICY_DESCRIPTION);
apiPolicy.setUserLevel(APIMgtConstants.ThrottlePolicyConstants.API_LEVEL);
QuotaPolicy defaultQuotaPolicy = new QuotaPolicy();
defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE);
RequestCountLimit requestCountLimit = new RequestCountLimit(TIME_UNIT_SECONDS, 1000, 10000);
defaultQuotaPolicy.setLimit(requestCountLimit);
apiPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy);
apiPolicy.setPipelines(createDefaultPipelines());
return apiPolicy;
}
/**
* Updated the given API policy
*
* @param apiPolicy {@link APIPolicy} instance to be updated
* @return updated {@link APIPolicy} instance
*/
public static APIPolicy updateAPIPolicy(APIPolicy apiPolicy) {
apiPolicy.setDisplayName(UPDATED_SAMPLE_API_POLICY);
apiPolicy.setDescription(UPDATED_SAMPLE_API_POLICY_DESCRIPTION);
QuotaPolicy defaultQuotaPolicy = new QuotaPolicy();
defaultQuotaPolicy.setType(PolicyConstants.BANDWIDTH_TYPE);
BandwidthLimit bandwidthLimit = new BandwidthLimit(TIME_UNIT_SECONDS, 1, 1000, "KB");
defaultQuotaPolicy.setLimit(bandwidthLimit);
apiPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy);
apiPolicy.setPipelines(createDefaultPipelines());
apiPolicy.getPipelines().add(createNewIPRangePipeline());
return apiPolicy;
}
/**
* create default pipeline for api policy
*
* @return list of Pipeline objects is returned
*/
public static List<Pipeline> createDefaultPipelines() {
//Pipeline 1
IPCondition ipCondition = new IPCondition(PolicyConstants.IP_RANGE_TYPE);
ipCondition.setStartingIP("192.168.12.3");
ipCondition.setEndingIP("192.168.88.19");
IPCondition ipConditionSpecific = new IPCondition(PolicyConstants.IP_SPECIFIC_TYPE);
ipConditionSpecific.setSpecificIP("123.42.14.56");
//adding above conditions to condition list of pipeline 1
List<Condition> conditionsList = new ArrayList<>(); //contains conditions for each pipeline
conditionsList.add(ipCondition);
conditionsList.add(ipConditionSpecific);
//set quota policy with bandwidth limit
BandwidthLimit bandwidthLimit = new BandwidthLimit(TIME_UNIT_MONTH, 1, 1000, PolicyConstants.MB);
QuotaPolicy quotaPolicy1 = new QuotaPolicy();
quotaPolicy1.setType(PolicyConstants.BANDWIDTH_TYPE);
quotaPolicy1.setLimit(bandwidthLimit);
Pipeline pipeline1 = new Pipeline();
pipeline1.setConditions(conditionsList);
pipeline1.setQuotaPolicy(quotaPolicy1);
//End of pipeline 1 -> Beginning of pipeline 2
HeaderCondition headerCondition = new HeaderCondition();
headerCondition.setHeader("Browser");
headerCondition.setValue("Chrome");
JWTClaimsCondition jwtClaimsCondition = new JWTClaimsCondition();
jwtClaimsCondition.setClaimUrl("/path/path2");
jwtClaimsCondition.setAttribute("attributed");
QueryParameterCondition queryParameterCondition = new QueryParameterCondition();
queryParameterCondition.setParameter("Location");
queryParameterCondition.setValue("Colombo");
//adding conditions to condition list of pipeline2
conditionsList = new ArrayList<>();
conditionsList.add(headerCondition);
conditionsList.add(jwtClaimsCondition);
conditionsList.add(queryParameterCondition);
//pipeline 2 with request count as quota policy
RequestCountLimit requestCountLimit = new RequestCountLimit(TIME_UNIT_SECONDS, 1, 1000);
QuotaPolicy quotaPolicy2 = new QuotaPolicy();
quotaPolicy2.setType(PolicyConstants.REQUEST_COUNT_TYPE);
quotaPolicy2.setLimit(requestCountLimit);
Pipeline pipeline2 = new Pipeline();
pipeline2.setConditions(conditionsList);
pipeline2.setQuotaPolicy(quotaPolicy2);
//adding pipelines
List<Pipeline> pipelineList = new ArrayList<>(); //contains all the default pipelines
pipelineList.add(pipeline1);
pipelineList.add(pipeline2);
return pipelineList;
}
/**
* Creates a new {@link Pipeline} instance
*
* @return created Pipeline instance
*/
public static Pipeline createNewIPRangePipeline() {
IPCondition ipCondition = new IPCondition(PolicyConstants.IP_RANGE_TYPE);
ipCondition.setStartingIP("10.100.0.105");
ipCondition.setEndingIP("10.100.0.115");
Pipeline pipeline = new Pipeline();
RequestCountLimit requestCountLimit = new RequestCountLimit(TIME_UNIT_SECONDS, 1, 1000);
QuotaPolicy quotaPolicy = new QuotaPolicy();
quotaPolicy.setType(REQUEST_COUNT_TYPE);
quotaPolicy.setLimit(requestCountLimit);
pipeline.setQuotaPolicy(quotaPolicy);
pipeline.setConditions(Arrays.asList(ipCondition));
return pipeline;
}
/**
* Create default api policy with bandwidth limit as quota policy
*
* @return APIPolicy object with bandwidth limit as quota policy is returned
*/
public static APIPolicy createDefaultAPIPolicyWithBandwidthLimit() {
BandwidthLimit bandwidthLimit = new BandwidthLimit(TIME_UNIT_MONTH, 1, 1000, PolicyConstants.MB);
QuotaPolicy defaultQuotaPolicy = new QuotaPolicy();
defaultQuotaPolicy.setType(PolicyConstants.BANDWIDTH_TYPE);
defaultQuotaPolicy.setLimit(bandwidthLimit);
//set default API Policy
APIPolicy apiPolicy = new APIPolicy(SAMPLE_API_POLICY);
apiPolicy.setUuid(UUID.randomUUID().toString());
apiPolicy.setDisplayName(SAMPLE_API_POLICY);
apiPolicy.setDescription(SAMPLE_API_POLICY_DESCRIPTION);
apiPolicy.setUserLevel(APIMgtConstants.ThrottlePolicyConstants.API_LEVEL);
apiPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy);
return apiPolicy;
}
public static ApplicationPolicy createDefaultApplicationPolicy() {
ApplicationPolicy applicationPolicy = new ApplicationPolicy(SAMPLE_APP_POLICY);
applicationPolicy.setUuid(UUID.randomUUID().toString());
applicationPolicy.setDisplayName(SAMPLE_APP_POLICY);
applicationPolicy.setDescription(SAMPLE_APP_POLICY_DESCRIPTION);
applicationPolicy.setCustomAttributes(SAMPLE_CUSTOM_ATTRIBUTE.getBytes());
QuotaPolicy defaultQuotaPolicy = new QuotaPolicy();
defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE);
RequestCountLimit requestCountLimit = new RequestCountLimit(TIME_UNIT_SECONDS, 10000, 1000);
defaultQuotaPolicy.setLimit(requestCountLimit);
applicationPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy);
return applicationPolicy;
}
public static ApplicationPolicy updateApplicationPolicy(ApplicationPolicy applicationPolicy) {
applicationPolicy.setDisplayName(UPDATED_SAMPLE_APP_POLICY);
applicationPolicy.setDescription(UPDATED_SAMPLE_APP_POLICY_DESCRIPTION);
QuotaPolicy defaultQuotaPolicy = new QuotaPolicy();
defaultQuotaPolicy.setType(PolicyConstants.BANDWIDTH_TYPE);
BandwidthLimit bandwidthLimit = new BandwidthLimit(TIME_UNIT_SECONDS, 10, 1000, "KB");
defaultQuotaPolicy.setLimit(bandwidthLimit);
applicationPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy);
return applicationPolicy;
}
public static SubscriptionPolicy createDefaultSubscriptionPolicy() {
SubscriptionPolicy subscriptionPolicy = new SubscriptionPolicy(SAMPLE_SUBSCRIPTION_POLICY);
subscriptionPolicy.setUuid(UUID.randomUUID().toString());
subscriptionPolicy.setDisplayName(SAMPLE_SUBSCRIPTION_POLICY);
subscriptionPolicy.setDescription(SAMPLE_SUBSCRIPTION_POLICY_DESCRIPTION);
subscriptionPolicy.setCustomAttributes(SAMPLE_CUSTOM_ATTRIBUTE.getBytes());
subscriptionPolicy.setDeployed(true);
QuotaPolicy defaultQuotaPolicy = new QuotaPolicy();
defaultQuotaPolicy.setType(PolicyConstants.REQUEST_COUNT_TYPE);
RequestCountLimit requestCountLimit = new RequestCountLimit(TIME_UNIT_SECONDS, 10000, 1000);
defaultQuotaPolicy.setLimit(requestCountLimit);
subscriptionPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy);
subscriptionPolicy.setBillingPlan(BUSINESS_PLAN_FREE);
subscriptionPolicy.setRateLimitCount(100);
subscriptionPolicy.setRateLimitTimeUnit("s");
subscriptionPolicy.setStopOnQuotaReach(true);
return subscriptionPolicy;
}
public static SubscriptionPolicy createSubscriptionPolicyWithBandwithLimit() {
SubscriptionPolicy subscriptionPolicy = new SubscriptionPolicy("SubPolicyBandwith");
subscriptionPolicy.setUuid(UUID.randomUUID().toString());
subscriptionPolicy.setDisplayName(SAMPLE_SUBSCRIPTION_POLICY);
subscriptionPolicy.setDescription(SAMPLE_SUBSCRIPTION_POLICY_DESCRIPTION);
subscriptionPolicy.setCustomAttributes(SAMPLE_CUSTOM_ATTRIBUTE.getBytes());
QuotaPolicy defaultQuotaPolicy = new QuotaPolicy();
defaultQuotaPolicy.setType(PolicyConstants.BANDWIDTH_TYPE);
BandwidthLimit bandwidthLimit = new BandwidthLimit(TIME_UNIT_SECONDS, 1, 1000, "KB");
defaultQuotaPolicy.setLimit(bandwidthLimit);
subscriptionPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy);
subscriptionPolicy.setBillingPlan(BUSINESS_PLAN_FREE);
subscriptionPolicy.setRateLimitCount(100);
subscriptionPolicy.setRateLimitTimeUnit("s");
subscriptionPolicy.setStopOnQuotaReach(true);
return subscriptionPolicy;
}
public static SubscriptionPolicy updateSubscriptionPolicy(SubscriptionPolicy subscriptionPolicy) {
subscriptionPolicy.setDisplayName(UPDATED_SAMPLE_SUBSCRIPTION_POLICY);
subscriptionPolicy.setDescription(UPDATED_SAMPLE_SUBSCRIPTION_POLICY_DESCRIPTION);
QuotaPolicy defaultQuotaPolicy = new QuotaPolicy();
defaultQuotaPolicy.setType(PolicyConstants.BANDWIDTH_TYPE);
BandwidthLimit bandwidthLimit = new BandwidthLimit(TIME_UNIT_SECONDS, 1, 1000, "KB");
defaultQuotaPolicy.setLimit(bandwidthLimit);
subscriptionPolicy.setDefaultQuotaPolicy(defaultQuotaPolicy);
return subscriptionPolicy;
}
public static DocumentInfo getMockDocumentInfoObject(String docId) {
DocumentInfo.Builder builder = new DocumentInfo.Builder().fileName("sample_doc.pdf").name("howto_guide")
.id(docId);
return builder.build();
}
public static List<DocumentInfo> getMockDocumentInfoObjectsList() {
List<DocumentInfo> docList = new ArrayList<>();
DocumentInfo doc1 = new DocumentInfo.Builder().fileName("sample1").id("123").build();
DocumentInfo doc2 = new DocumentInfo.Builder().fileName("sample1").id("124").build();
DocumentInfo doc3 = new DocumentInfo.Builder().fileName("sample1").id("125").build();
docList.add(doc1);
docList.add(doc2);
docList.add(doc3);
return docList;
}
public static List<API> createMockAPIList() {
List<API> apiList = new ArrayList<>();
API api1 = createDefaultAPI().build();
API api2 = createAlternativeAPI().build();
apiList.add(api1);
apiList.add(api2);
return apiList;
}
public static Endpoint createMockEndpoint() {
return new Endpoint.Builder().endpointConfig("{'type':'http','url':'http://localhost:8280'}").id(endpointId)
.maxTps(1000L).security("{'enabled':false}").name("Endpoint1")
.applicableLevel(APIMgtConstants.GLOBAL_ENDPOINT).type("http").build();
}
public static Endpoint createUpdatedEndpoint() {
return new Endpoint.Builder().endpointConfig("{'type':'soap','url':'http://localhost:8280'}").id(endpointId)
.maxTps(1000L).security("{'enabled':false}").name("Endpoint1")
.applicableLevel(APIMgtConstants.GLOBAL_ENDPOINT).type("http").build();
}
public static Endpoint createAlternativeEndpoint() {
String uuid = UUID.randomUUID().toString();
return new Endpoint.Builder().endpointConfig("{'type':'soap','url':'http://localhost:8280'}").id(uuid)
.name("Endpoint2").maxTps(1000L).security("{'enabled':false}")
.applicableLevel(APIMgtConstants.GLOBAL_ENDPOINT).build();
}
public static Map<String, Endpoint> getMockEndpointMap() {
Map<String, Endpoint> endpointMap = new HashedMap();
endpointMap.put(PRODUCTION_ENDPOINT,
new Endpoint.Builder().id(endpointId).applicableLevel(APIMgtConstants.GLOBAL_ENDPOINT).name
("production").build());
endpointMap.put(SANDBOX_ENDPOINT,
new Endpoint.Builder().id(UUID.randomUUID().toString()).name("sandbox").applicableLevel(APIMgtConstants
.API_SPECIFIC_ENDPOINT).build());
return endpointMap;
}
public static Map<String, UriTemplate> getMockUriTemplates() {
Map<String, UriTemplate> uriTemplateMap = new HashMap();
UriTemplate.UriTemplateBuilder uriTemplateBuilder = new UriTemplate.UriTemplateBuilder();
uriTemplateBuilder.templateId(TEMPLATE_ID);
uriTemplateBuilder.uriTemplate("/apis/");
uriTemplateBuilder.authType(APIMgtConstants.AUTH_APPLICATION_LEVEL_TOKEN);
uriTemplateBuilder.policy(unlimitedApiPolicy);
uriTemplateBuilder.httpVerb(APIMgtConstants.FunctionsConstants.GET);
uriTemplateMap.put(TEMPLATE_ID, uriTemplateBuilder.build());
return uriTemplateMap;
}
public static Label.Builder createLabel(String name) {
List<String> accessUrls = new ArrayList<>();
accessUrls.add(ACCESS_URL + name);
return new Label.Builder().
id(UUID.randomUUID().toString()).
name(name).
accessUrls(accessUrls);
}
public static Workflow createWorkflow(String workflowReferenceID) throws APIMgtDAOException {
Workflow workflow = new ApplicationCreationWorkflow(DAOFactory.getApplicationDAO(),
DAOFactory.getWorkflowDAO(), null);
workflow.setExternalWorkflowReference(workflowReferenceID);
workflow.setStatus(WorkflowStatus.CREATED);
workflow.setCreatedTime(LocalDateTime.now());
workflow.setWorkflowType(WorkflowConstants.WF_TYPE_AM_APPLICATION_CREATION);
workflow.setWorkflowReference(UUID.randomUUID().toString());
Map<String, String> properties = new HashMap<>();
properties.put("property1", "value1");
properties.put("property2", "value2");
workflow.setAttributes(properties);
return workflow;
}
public static DocumentInfo createDefaultFileDocumentationInfo() {
//created by admin
DocumentInfo.Builder builder = new DocumentInfo.Builder();
builder.id(UUID.randomUUID().toString());
builder.name(SAMPLE_DOC_NAME);
builder.type(DocumentInfo.DocType.HOWTO);
builder.summary("Summary of Calculator Documentation");
builder.sourceType(DocumentInfo.SourceType.FILE);
builder.sourceURL(EMPTY_STRING);
builder.otherType(EMPTY_STRING);
builder.visibility(DocumentInfo.Visibility.API_LEVEL);
builder.createdTime(LocalDateTime.now());
builder.lastUpdatedTime(LocalDateTime.now());
return builder.build();
}
public static Comment createDefaultComment(String apiId) {
Comment comment = new Comment();
comment.setUuid(UUID.randomUUID().toString());
comment.setApiId(apiId);
comment.setCommentText("this is a sample comment");
comment.setCommentedUser("admin");
comment.setUpdatedUser("admin");
comment.setCreatedTime(LocalDateTime.now());
comment.setUpdatedTime(LocalDateTime.now());
return comment;
}
public static Comment createAlternativeComment(String apiId) {
Comment comment = new Comment();
comment.setUuid(UUID.randomUUID().toString());
comment.setApiId(apiId);
comment.setCommentText("this is a sample comment - alternative");
comment.setCommentedUser("admin");
comment.setUpdatedUser("admin");
comment.setCreatedTime(LocalDateTime.now());
comment.setUpdatedTime(LocalDateTime.now());
return comment;
}
public static Rating createDefaultRating(String apiId) {
Rating rating = new Rating();
rating.setUuid(UUID.randomUUID().toString());
rating.setApiId(apiId);
rating.setRating(4);
rating.setUsername("john");
rating.setLastUpdatedUser("john");
rating.setCreatedTime(LocalDateTime.now());
rating.setLastUpdatedTime(LocalDateTime.now());
return rating;
}
public static API.APIBuilder createDefaultAPIWithApiLevelEndpoint() {
Set<String> transport = new HashSet<>();
transport.add(HTTP);
transport.add(HTTPS);
Set<String> tags = new HashSet<>();
tags.add(TAG_CLIMATE);
Set<Policy> policies = new HashSet<>();
policies.add(new SubscriptionPolicy(GOLD_TIER));
policies.add(new SubscriptionPolicy(SILVER_TIER));
policies.add(new SubscriptionPolicy(BRONZE_TIER));
BusinessInformation businessInformation = new BusinessInformation();
CorsConfiguration corsConfiguration = new CorsConfiguration();
String permissionJson = "[{\"groupId\" : \"developer\", \"permission\" : "
+ "[\"READ\",\"UPDATE\"]},{\"groupId\" : \"admin\", \"permission\" : [\"READ\",\"UPDATE\"," +
"\"DELETE\", \"MANAGE_SUBSCRIPTION\"]}]";
Map<String, Endpoint> endpointMap = new HashMap<>();
endpointMap.put(APIMgtConstants.PRODUCTION_ENDPOINT,
new Endpoint.Builder().id(endpointId).name("api1-production--endpint")
.applicableLevel(APIMgtConstants.API_SPECIFIC_ENDPOINT).build());
API.APIBuilder apiBuilder = new API.APIBuilder(ADMIN, "WeatherAPI", API_VERSION).
id(UUID.randomUUID().toString()).
context("weather").
description("Get Weather Info").
lifeCycleStatus(APIStatus.CREATED.getStatus()).
lifecycleInstanceId(UUID.randomUUID().toString()).
endpoint(endpointMap).
wsdlUri("http://localhost:9443/echo?wsdl").
isResponseCachingEnabled(false).
cacheTimeout(60).
isDefaultVersion(false).
apiPolicy(APIUtils.getDefaultAPIPolicy()).
transport(transport).
tags(tags).
policies(policies).
visibility(API.Visibility.PUBLIC).
visibleRoles(new HashSet<>()).
businessInformation(businessInformation).
corsConfiguration(corsConfiguration).
createdTime(LocalDateTime.now()).
createdBy(ADMIN).
updatedBy(ADMIN).
lastUpdatedTime(LocalDateTime.now()).
apiPermission(permissionJson).
uriTemplates(getMockUriTemplates()).
apiDefinition(apiDefinition);
Map map = new HashMap();
map.put(DEVELOPER_ROLE_ID, 6);
map.put(ADMIN_ROLE_ID, 15);
apiBuilder.permissionMap(map);
return apiBuilder;
}
public static BlockConditions createDefaultBlockCondition(String conditionType) {
BlockConditions blockConditions = new BlockConditions();
blockConditions.setConditionType(conditionType);
blockConditions.setEnabled(true);
if (conditionType.equals(APIMgtConstants.ThrottlePolicyConstants.BLOCKING_CONDITIONS_IP)) {
blockConditions.setConditionValue(SAMPLE_IP_1);
} else if (conditionType.equals(APIMgtConstants.ThrottlePolicyConstants.BLOCKING_CONDITION_IP_RANGE)) {
blockConditions.setStartingIP(SAMPLE_IP_1);
blockConditions.setEndingIP(SAMPLE_IP_2);
} else if (conditionType.equals(APIMgtConstants.ThrottlePolicyConstants.BLOCKING_CONDITIONS_API)) {
try {
API.APIBuilder apiBuilder = SampleTestObjectCreator.createDefaultAPI();
API api = apiBuilder.build();
DAOFactory.getApiDAO().addAPI(api);
blockConditions.setConditionValue(api.getContext());
} catch (APIMgtDAOException e) {
log.error("Error while adding default api in default block condition", e);
}
} else if (conditionType.equals(APIMgtConstants.ThrottlePolicyConstants.BLOCKING_CONDITIONS_APPLICATION)) {
try {
Application app = createDefaultApplication();
DAOFactory.getApplicationDAO().addApplication(app);
blockConditions.setConditionValue(app.getId() + ":" + app.getName());
} catch (APIMgtDAOException e) {
log.error("Error while adding default app in default block condition", e);
}
} else if (conditionType.equals(APIMgtConstants.ThrottlePolicyConstants.BLOCKING_CONDITIONS_USER)) {
blockConditions.setConditionValue(ADMIN);
}
return blockConditions;
}
public static CustomPolicy createDefaultCustomPolicy() {
CustomPolicy customPolicy = new CustomPolicy(SAMPLE_CUSTOM_RULE);
customPolicy.setKeyTemplate("$userId");
String siddhiQuery = "FROM RequestStream SELECT userId, ( userId == 'admin@carbon.super' ) AS isEligible , "
+ "str:concat('admin@carbon.super','') as throttleKey INSERT INTO EligibilityStream;"
+ "FROM EligibilityStream[isEligible==true]#throttler:timeBatch(1 min) SELECT throttleKey, "
+ "(count(userId) >= 5 as isThrottled, expiryTimeStamp group by throttleKey INSERT ALL EVENTS into "
+ "ResultStream;";
customPolicy.setSiddhiQuery(siddhiQuery);
customPolicy.setDescription("Sample custom policy");
return customPolicy;
}
public static void createDefaultPolicy(PolicyDAO policyDAO) throws APIMgtDAOException {
QuotaPolicy quotaPolicy = new QuotaPolicy();
quotaPolicy.setType(REQUEST_COUNT_TYPE);
quotaPolicy.setLimit(new RequestCountLimit(SECONDS_TIMUNIT, 60, 1));
unlimitedApiPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addApiPolicy(unlimitedApiPolicy);
goldApiPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addApiPolicy(goldApiPolicy);
silverApiPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addApiPolicy(silverApiPolicy);
bronzeApiPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addApiPolicy(bronzeApiPolicy);
unlimitedSubscriptionPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addSubscriptionPolicy(unlimitedSubscriptionPolicy);
goldSubscriptionPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addSubscriptionPolicy(goldSubscriptionPolicy);
silverSubscriptionPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addSubscriptionPolicy(silverSubscriptionPolicy);
bronzeSubscriptionPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addSubscriptionPolicy(bronzeSubscriptionPolicy);
fiftyPerMinApplicationPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addApplicationPolicy(fiftyPerMinApplicationPolicy);
twentyPerMinApplicationPolicy.setDefaultQuotaPolicy(quotaPolicy);
policyDAO.addApplicationPolicy(twentyPerMinApplicationPolicy);
}
public static String createDefaultSiddhiAppforAppPolicy() {
ApplicationPolicy policy = createDefaultApplicationPolicy();
RequestCountLimit limit = (RequestCountLimit) createDefaultApplicationPolicy().getDefaultQuotaPolicy()
.getLimit();
String siddhiApp =
"@App:name('application_" + policy.getPolicyName() + "')\n" + "@App:description('ExecutionPlan for app_"
+ policy.getPolicyName() + "')\n" +
"@source(type='inMemory', topic='apim', @map(type='passThrough'))\n"
+ "define stream RequestStream (messageID string, appKey string, appTier string, "
+ "subscriptionKey string,"
+ " apiKey string, apiTier string, subscriptionTier string, resourceKey string,"
+ " resourceTier string,"
+ " userId string, apiContext string, apiVersion string, appTenant string, apiTenant string,"
+ " appId " + "string, apiName string, propertiesMap string);\n" +
"@sink(type='jms', @map(type='text'),\n"
+ "factory.initial='org.apache.activemq.jndi.ActiveMQInitialContextFactory',"
+ " provider.url='tcp://localhost:61616', destination='TEST.FOO', connection.factory."
+ "type='topic',\n" + "connection.factory.jndi.name='TopicConnectionFactory')\n"
+ "define stream GlobalThrottleStream (throttleKey string, isThrottled bool"
+ ", expiryTimeStamp long);\n" +
"FROM RequestStream\n" + "SELECT messageID, (appTier == '" + policy.getPolicyName()
+ "') AS isEligible, appKey AS throttleKey, " + "propertiesMap\n"
+ "INSERT INTO EligibilityStream;\n" +
"FROM EligibilityStream[isEligible==true]#throttler:timeBatch(" + policy.getDefaultQuotaPolicy()
.getLimit().getUnitTime() + " " + policy.getDefaultQuotaPolicy().getLimit().getTimeUnit()
+ ", 0)\n" + "select throttleKey, (count(messageID) >= " + limit.getRequestCount() + ")"
+ " as isThrottled, expiryTimeStamp group by throttleKey\n"
+ "INSERT ALL EVENTS into ResultStream;\n" +
"from ResultStream#throttler:emitOnStateChange(throttleKey, isThrottled)\n" + "select *\n"
+ "insert into GlobalThrottleStream;\n";
return siddhiApp;
}
public static String createDefaultSiddhiAppforSubscriptionPolicy() {
SubscriptionPolicy policy = createDefaultSubscriptionPolicy();
RequestCountLimit limit = (RequestCountLimit) policy.getDefaultQuotaPolicy()
.getLimit();
String siddhiApp = "@App:name('subscription_" + policy.getPolicyName() + "')\n"
+ "\n@App:description('ExecutionPlan for subscription_" + policy.getPolicyName() + "')\n" +
"\n@source(type='inMemory', topic='apim', @map(type='passThrough'))\n"
+ "define stream RequestStream (messageID string, appKey string, appTier string,"
+ " subscriptionKey string,"
+ " apiKey string, apiTier string, subscriptionTier string, resourceKey string, resourceTier string,"
+ " userId string, apiContext string, apiVersion string, appTenant string, apiTenant string, "
+ "appId string, apiName string, propertiesMap string);\n" +
"\n@sink(type='jms', @map(type='text'),\n"
+ "factory.initial='org.apache.activemq.jndi.ActiveMQInitialContextFactory',"
+ " provider.url='tcp://localhost:61616', destination='TEST.FOO', connection.factory."
+ "type='topic',\n" + "connection.factory.jndi.name='TopicConnectionFactory')\n"
+ "define stream GlobalThrottleStream (throttleKey string, isThrottled bool"
+ ", expiryTimeStamp long);\n" +
"\nFROM RequestStream\n" + "SELECT messageID, (subscriptionTier == '" + policy.getPolicyName()
+ "')" + " AS isEligible, subscriptionKey AS throttleKey, propertiesMap\n"
+ "INSERT INTO EligibilityStream;\n" + "\nFROM EligibilityStream[isEligible==true]#throttler:timeBatch("
+ policy.getDefaultQuotaPolicy().getLimit().getUnitTime() + " " + policy.getDefaultQuotaPolicy()
.getLimit().getTimeUnit() + ", 0)\n" + "select throttleKey, (count(messageID) >= " + limit
.getRequestCount() + ")" + " as isThrottled, expiryTimeStamp group by throttleKey\n"
+ "INSERT ALL EVENTS into ResultStream;\n" +
"\nfrom ResultStream#throttler:emitOnStateChange(throttleKey, isThrottled)" + " select * "
+ "insert into GlobalThrottleStream;";
return siddhiApp;
}
public static String createDefaultCustomPolicySiddhiApp() {
CustomPolicy policy = createDefaultCustomPolicy();
String siddhiApp =
"@App:name('custom_" + policy.getPolicyName() + "')" + "\n@App:description('ExecutionPlan for custom_"
+ policy.getPolicyName() + "')\n" +
"\n@source(type='inMemory', topic='apim', @map(type='passThrough'))\n"
+ "define stream RequestStream (messageID string, appKey string, appTier string, "
+ "subscriptionKey string, apiKey string, apiTier string, subscriptionTier string,"
+ " resourceKey string, resourceTier string, userId string, apiContext string, "
+ "apiVersion string, appTenant string, apiTenant string, appId string, apiName string, "
+ "propertiesMap string);\n" +
"\n@sink(type='jms', @map(type='text'),\n"
+ "factory.initial='org.apache.activemq.jndi.ActiveMQInitialContextFactory',"
+ " provider.url='tcp://localhost:61616', destination='TEST.FOO',"
+ " connection.factory.type='topic',\n"
+ "connection.factory.jndi.name='TopicConnectionFactory')\n"
+ "define stream GlobalThrottleStream (throttleKey string, isThrottled bool, "
+ "expiryTimeStamp long);\n"
+
"\n" + policy.getSiddhiQuery() + "\n" +
"\nfrom ResultStream#throttler:emitOnStateChange(throttleKey, isThrottled)" + "\nselect *\n"
+ "insert into GlobalThrottleStream;";
return siddhiApp;
}
public static String createDefaultSiddhiAppForAPIThrottlePolicy() {
APIPolicy apiPolicy = createDefaultAPIPolicy();
String siddhiApp = "\n@App:name('resource_" + apiPolicy.getPolicyName() + "_condition_0')"
+ "\n@App:description('ExecutionPlan for resource_" + apiPolicy.getPolicyName() + "_condition_0')\n"
+ "\n@source(type='inMemory', topic='apim', @map(type='passThrough'))"
+ "\ndefine stream RequestStream (messageID string, appKey string, appTier string, "
+ "subscriptionKey string,"
+ " apiKey string, apiTier string, subscriptionTier string, resourceKey string,"
+ " resourceTier string, userId string, apiContext string, apiVersion string, "
+ "appTenant string, apiTenant "
+ "string, appId string, apiName string, propertiesMap string);\n"
+ "\n@sink(type='jms', @map(type='text'),"
+ "\nfactory.initial='org.apache.activemq.jndi.ActiveMQInitialContextFactory',"
+ " provider.url='tcp://localhost:61616', "
+ "destination='TEST.FOO', connection.factory.type='topic',"
+ "\nconnection.factory.jndi.name='TopicConnectionFactory')"
+ "\ndefine stream GlobalThrottleStream (throttleKey string, isThrottled bool,"
+ " expiryTimeStamp long);\n"
+ "\nFROM RequestStream"
+ "\nSELECT messageID, (resourceTier == 'SampleAPIPolicy' AND (regex:find('Chrome',"
+ "cast(map:get(propertiesMap,'Browser'),"
+ "'string'))) AND (regex:find('attributed',"
+ "cast(map:get(propertiesMap,'/path/path2'),'string'))) AND "
+ "(cast(map:get(propertiesMap,'Location'),'string')=='Colombo'))"
+ " AS isEligible, str:concat(resourceKey,"
+ "'_condition_0') AS throttleKey, propertiesMap" + "\nINSERT INTO EligibilityStream;\n"
+ "\nFROM EligibilityStream[isEligible==true]#throttler:timeBatch(1 s, 0)"
+ "\nselect throttleKey, (count(messageID) >= 1000) as isThrottled,"
+ " expiryTimeStamp group by throttleKey"
+ "\nINSERT ALL EVENTS into ResultStream;\n"
+ "\nfrom ResultStream#throttler:emitOnStateChange(throttleKey, isThrottled)" + "\nselect *"
+ "\ninsert into GlobalThrottleStream;\n";
return siddhiApp;
}
public static String createDefaultSiddhiAppForAPILevelDefaultThrottlePolicy() {
APIPolicy apiPolicy = createDefaultAPIPolicy();
String siddhiApp = "\n@App:name('resource_" + apiPolicy.getPolicyName() + "_default')"
+ "\n@App:description('ExecutionPlan for resource_" + apiPolicy.getPolicyName() + "_default')\n"
+ "\n@source(type='inMemory', topic='apim', @map(type='passThrough'))"
+ "\ndefine stream RequestStream (messageID string, appKey string,"
+ " appTier string, subscriptionKey string,"
+ " apiKey string, apiTier string, subscriptionTier string, resourceKey string,"
+ " resourceTier string, userId string, apiContext string, apiVersion string, appTenant string,"
+ " apiTenant string,"
+ " appId string, apiName string, propertiesMap string);\n"
+ "\n@sink(type='jms', @map(type='text'),"
+ "\nfactory.initial='org.apache.activemq.jndi.ActiveMQInitialContextFactory',"
+ " provider.url='tcp://localhost:61616',"
+ " destination='TEST.FOO', connection.factory.type='topic',"
+ "\nconnection.factory.jndi.name='TopicConnectionFactory')"
+ "\ndefine stream GlobalThrottleStream (throttleKey string, isThrottled bool,"
+ " expiryTimeStamp long);\n"
+ "\nFROM RequestStream"
+ "\nSELECT messageID, (resourceTier == 'SampleAPIPolicy' AND "
+ "NOT(((3232238595l<=cast(map:get(propertiesMap,'ip'),'Long')"
+ " AND 3232258067l>=cast(map:get(propertiesMap,'ip'),'Long')) AND "
+ "(cast(map:get(propertiesMap,'ip'),'Long')==2066353720l)) "
+ "OR ((regex:find('Chrome',cast(map:get(propertiesMap,'Browser'),'string')))"
+ " AND (regex:find('attributed',"
+ "cast(map:get(propertiesMap,'/path/path2'),'string')))"
+ " AND (cast(map:get(propertiesMap,'Location'),'string')=='Colombo'))))"
+ " AS isEligible, resourceKey AS throttleKey, propertiesMap"
+ "\nINSERT INTO EligibilityStream;\n"
+ "\nFROM EligibilityStream[isEligible==true]#throttler:timeBatch(1000 s, 0)"
+ "\nselect throttleKey, (count(messageID) >= 10000) as isThrottled,"
+ " expiryTimeStamp group by throttleKey"
+ "\nINSERT ALL EVENTS into ResultStream;\n"
+ "\nfrom ResultStream#throttler:emitOnStateChange(throttleKey, isThrottled)" + "\nselect *"
+ "\ninsert into GlobalThrottleStream;\n";
return siddhiApp;
}
public static String getSampleApiSwagger() throws IOException {
//swagger definition
InputStream stream = null;
String definition = null;
try {
stream = Thread.currentThread().getContextClassLoader().getResourceAsStream("sampleApi.yaml");
definition = IOUtils.toString(stream);
} finally {
stream.close();
}
return definition;
}
}
| |
package com.mesosphere.dcos.cassandra.common.tasks;
import com.mesosphere.dcos.cassandra.common.config.*;
import org.apache.mesos.Protos;
import org.apache.mesos.dcos.Capabilities;
import org.apache.mesos.offer.VolumeRequirement;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.contrib.java.lang.system.EnvironmentVariables;
import org.mockito.Mockito;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import static org.mockito.Mockito.when;
/**
* This class tests the CassandraDaemonTask class.
*/
public class CassandraDaemonTaskTest {
@ClassRule
public static final EnvironmentVariables ENV_VARS = new EnvironmentVariables();
private static final String TEST_DAEMON_NAME = "test-daemon-task-name";
private static final UUID TEST_CONFIG_ID = UUID.randomUUID();
public static final String TEST_CONFIG_NAME = TEST_CONFIG_ID.toString();
private CassandraDaemonTask.Factory testTaskFactory;
private ExecutorConfig testExecutorConfig;
private CassandraTaskExecutor testTaskExecutor;
@Before
public void beforeEach() throws URISyntaxException, IOException {
Capabilities mockCapabilities = Mockito.mock(Capabilities.class);
when(mockCapabilities.supportsNamedVips()).thenReturn(true);
testTaskFactory = new CassandraDaemonTask.Factory(mockCapabilities);
testExecutorConfig = ExecutorConfig.create(
"test-cmd",
Arrays.asList("arg0"),
1.0,
256,
500,
1000,
"java-home",
new URI("http://jre-location"),
new URI("http://executor-location"),
new URI("http://cassandra-location"),
new URI("http://libmesos-location"),
false);
testTaskExecutor = CassandraTaskExecutor.create(
"test-framework-id",
TEST_DAEMON_NAME,
"test-role",
"test-principal",
testExecutorConfig);
}
@Test
public void testConstructCassandraDaemonTask() {
Assert.assertNotNull(testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT));
}
@Test
public void testUpdateUnchangedConfig() {
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT);
CassandraDaemonTask updatedTask = daemonTask.updateConfig(
CassandraConfig.DEFAULT,
testExecutorConfig,
TEST_CONFIG_ID);
Assert.assertEquals(normalizeCassandraTaskInfo(daemonTask), normalizeCassandraTaskInfo(updatedTask));
}
@Test
public void testUpdateCpuConfig() {
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT);
double newCpu = 1.0;
CassandraConfig updatedConfig = CassandraConfig.create(
"2.2.5",
newCpu,
4096,
10240,
VolumeRequirement.VolumeType.ROOT,
"",
HeapConfig.DEFAULT,
Location.DEFAULT,
7199,
false,
UUID.randomUUID().toString(),
CassandraApplicationConfig.builder().build());
CassandraDaemonTask updatedTask = daemonTask.updateConfig(
updatedConfig,
testExecutorConfig,
TEST_CONFIG_ID);
Assert.assertNotEquals(normalizeCassandraTaskInfo(daemonTask), normalizeCassandraTaskInfo(updatedTask));
Assert.assertEquals(newCpu, updatedTask.getConfig().getCpus(), 0.0);
Assert.assertTrue(allUrisAreCacheable(updatedTask.getTaskInfo().getExecutor().getCommand().getUrisList(), false));
}
@Test
public void testUpdateMemConfig() {
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT);
int newMem = 1000;
CassandraConfig updatedConfig = CassandraConfig.create(
"2.2.5",
0.2,
newMem,
10240,
VolumeRequirement.VolumeType.ROOT,
"",
HeapConfig.DEFAULT,
Location.DEFAULT,
7199,
false,
UUID.randomUUID().toString(),
CassandraApplicationConfig.builder().build());
CassandraDaemonTask updatedTask = daemonTask.updateConfig(
updatedConfig,
testExecutorConfig,
TEST_CONFIG_ID);
Assert.assertNotEquals(normalizeCassandraTaskInfo(daemonTask), normalizeCassandraTaskInfo(updatedTask));
Assert.assertEquals(newMem, updatedTask.getConfig().getMemoryMb(), 0.0);
double taskInfoDisk = getScalar(updatedTask.getTaskInfo().getResourcesList(), "mem");
Assert.assertEquals(newMem, taskInfoDisk, 0.0);
}
@Test
public void testUpdateDiskConfig() {
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT);
int newDisk = 5000;
CassandraConfig updatedConfig = CassandraConfig.create(
"2.2.5",
0.2,
4096,
newDisk,
VolumeRequirement.VolumeType.ROOT,
"",
HeapConfig.DEFAULT,
Location.DEFAULT,
7199,
false,
UUID.randomUUID().toString(),
CassandraApplicationConfig.builder().build());
CassandraDaemonTask updatedTask = daemonTask.updateConfig(
updatedConfig,
testExecutorConfig,
TEST_CONFIG_ID);
Assert.assertNotEquals(normalizeCassandraTaskInfo(daemonTask), normalizeCassandraTaskInfo(updatedTask));
Assert.assertEquals(newDisk, updatedTask.getConfig().getDiskMb(), 0.0);
double originalTaskInfoDisk = getScalar(daemonTask.getTaskInfo().getResourcesList(), "disk");
double updatedTaskInfoDisk = getScalar(updatedTask.getTaskInfo().getResourcesList(), "disk");
// Updating the Disk should not result in updated disk. Disk cannot be updated.
Assert.assertEquals(originalTaskInfoDisk, updatedTaskInfoDisk, 0.0);
}
@Test
public void testUpdateCassandraLocation() throws URISyntaxException {
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT);
ExecutorConfig updatedTestExecutorConfig = ExecutorConfig.create(
"test-cmd",
Arrays.asList("arg0"),
1.0,
256,
500,
1000,
"java-home",
new URI("http://jre-location"),
new URI("http://executor-location"),
new URI("http://cassandra-location-updated"),
new URI("http://libmesos-location"),
false);
testTaskExecutor = CassandraTaskExecutor.create(
"test-framework-id",
TEST_DAEMON_NAME,
"test-role",
"test-principal",
updatedTestExecutorConfig);
CassandraDaemonTask updatedTask = daemonTask.updateConfig(
CassandraConfig.DEFAULT,
updatedTestExecutorConfig,
TEST_CONFIG_ID);
Assert.assertNotEquals(normalizeCassandraTaskInfo(daemonTask), normalizeCassandraTaskInfo(updatedTask));
Assert.assertEquals(4, updatedTask.getExecutor().getURIs().size());
Assert.assertTrue(updatedTask.getExecutor().getURIs().contains("http://cassandra-location-updated"));
}
@Test
public void testPublishDiscoveryInfo() {
CassandraConfig cassandraConfig = CassandraConfig.builder().setPublishDiscoveryInfo(true).build();
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
cassandraConfig);
Protos.DiscoveryInfo discovery = daemonTask.getTaskInfo().getDiscovery();
Assert.assertEquals("Test Cluster.test-daemon-task-name", discovery.getName());
Assert.assertEquals(Protos.DiscoveryInfo.Visibility.EXTERNAL, discovery.getVisibility());
Assert.assertEquals(1, discovery.getPorts().getPortsCount());
Assert.assertEquals(9042, discovery.getPorts().getPorts(0).getNumber());
Assert.assertEquals("NativeTransport", discovery.getPorts().getPorts(0).getName());
}
@Test
public void testDcosNamedVipDiscoveryInfo() {
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT);
Protos.DiscoveryInfo discovery = daemonTask.getTaskInfo().getDiscovery();
Assert.assertEquals("test-daemon-task-name", discovery.getName());
Assert.assertEquals(Protos.DiscoveryInfo.Visibility.EXTERNAL, discovery.getVisibility());
Assert.assertEquals(1, discovery.getPorts().getPortsCount());
Assert.assertEquals(9042, discovery.getPorts().getPorts(0).getNumber());
Assert.assertEquals("tcp", discovery.getPorts().getPorts(0).getProtocol());
}
@Test
public void testUpdateCacheFetchedUris() throws URISyntaxException {
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT);
ExecutorConfig updatedTestExecutorConfig = ExecutorConfig.create(
"test-cmd",
Arrays.asList("arg0"),
1.0,
256,
500,
1000,
"java-home",
new URI("http://jre-location"),
new URI("http://executor-location"),
new URI("http://cassandra-location"),
new URI("http://libmesos-location"),
/* cacheFetchedUris */ true);
testTaskExecutor = CassandraTaskExecutor.create(
"test-framework-id",
TEST_DAEMON_NAME,
"test-role",
"test-principal",
updatedTestExecutorConfig);
CassandraDaemonTask updatedTask = daemonTask.updateConfig(
CassandraConfig.DEFAULT,
updatedTestExecutorConfig,
TEST_CONFIG_ID);
Assert.assertTrue(allUrisAreCacheable(updatedTask.getTaskInfo().getExecutor().getCommand().getUrisList(), true));
}
@Test
public void testUpdateLibmesosLocation() throws URISyntaxException, UnsupportedEncodingException {
// Set the default environment variable to simulate the setting of this variable from parsing scheduler.yml
final String DEFAULT_LIBMESOS_LOCATION = "http://libmesos-default";
ENV_VARS.set("EXECUTOR_LIBMESOS_LOCATION", DEFAULT_LIBMESOS_LOCATION);
// Before the introduction of libmesosLocation variable, the executor config stored in zookeeper has
// null libmesosLocation and should be constructed and deserialized correctly.
ExecutorConfig testExecutorConfig = ExecutorConfig.create(
"test-cmd",
Arrays.asList("arg0"),
1.0,
256,
500,
1000,
"java-home",
"http://jre-location",
"http://executor-location",
"http://cassandra-location",
/* libmesosLocation */ null,
false,
false);
testTaskExecutor = CassandraTaskExecutor.create(
"test-framework-id",
TEST_DAEMON_NAME,
"test-role",
"test-principal",
testExecutorConfig);
CassandraDaemonTask daemonTask = testTaskFactory.create(
TEST_DAEMON_NAME,
TEST_CONFIG_NAME,
testTaskExecutor,
CassandraConfig.DEFAULT);
Assert.assertEquals(4, daemonTask.getExecutor().getURIs().size());
Assert.assertTrue(daemonTask.getExecutor().getURIs().contains(DEFAULT_LIBMESOS_LOCATION));
ExecutorConfig updatedTestExecutorConfig = ExecutorConfig.create(
"test-cmd",
Arrays.asList("arg0"),
1.0,
256,
500,
1000,
"java-home",
"http://jre-location",
"http://executor-location",
"http://cassandra-location",
"http://libmesos-location-new",
false,
false);
testTaskExecutor = CassandraTaskExecutor.create(
"test-framework-id",
TEST_DAEMON_NAME,
"test-role",
"test-principal",
updatedTestExecutorConfig);
CassandraDaemonTask updatedTask = daemonTask.updateConfig(
CassandraConfig.DEFAULT,
updatedTestExecutorConfig,
TEST_CONFIG_ID);
Assert.assertTrue(updatedTask.getExecutor().getURIs().contains("http://libmesos-location-new"));
}
private Protos.TaskInfo normalizeCassandraTaskInfo(CassandraDaemonTask daemonTask) {
Protos.TaskInfo daemonTaskInfo = daemonTask.getTaskInfo();
Protos.ExecutorInfo expectedExecutorInfo = Protos.ExecutorInfo.newBuilder(daemonTaskInfo.getExecutor())
.setExecutorId(Protos.ExecutorID.newBuilder().setValue(""))
.build();
daemonTaskInfo = Protos.TaskInfo.newBuilder(daemonTaskInfo)
.setTaskId(Protos.TaskID.newBuilder().setValue(""))
.setExecutor(expectedExecutorInfo)
.build();
return daemonTaskInfo;
}
private Double getScalar(List<Protos.Resource> resources, String name) {
for (Protos.Resource resource : resources) {
if (resource.getName().equals(name)) {
return resource.getScalar().getValue();
}
}
return null;
}
// Helper method that returns true iff all the @param uris in the given list have the cache property set to the
// given @param cacheable value.
private boolean allUrisAreCacheable(List<Protos.CommandInfo.URI> uris, boolean cacheable) {
for (Protos.CommandInfo.URI uri : uris) {
if (uri.getCache() != cacheable) {
return false;
}
}
return true;
}
}
| |
/*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.infinispan.schematic.internal.document;
import static org.junit.Assert.assertNotNull;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
import org.bson.BSONObject;
import org.bson.BasicBSONCallback;
import org.bson.BasicBSONDecoder;
import org.bson.BasicBSONEncoder;
import org.bson.BasicBSONObject;
import org.bson.types.BSONTimestamp;
import org.bson.types.BasicBSONList;
import org.codehaus.jackson.JsonToken;
import org.infinispan.schematic.FixFor;
import org.infinispan.schematic.TestUtil;
import org.infinispan.schematic.document.Binary;
import org.infinispan.schematic.document.Code;
import org.infinispan.schematic.document.CodeWithScope;
import org.infinispan.schematic.document.Document;
import org.infinispan.schematic.document.Json;
import org.infinispan.schematic.document.MaxKey;
import org.infinispan.schematic.document.MinKey;
import org.infinispan.schematic.document.ObjectId;
import org.infinispan.schematic.document.Symbol;
import org.infinispan.schematic.document.Timestamp;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class BsonReadingAndWritingTest {
protected BsonReader reader;
protected BsonWriter writer;
protected Document input;
protected Document output;
protected boolean print;
@Before
public void beforeTest() {
reader = new BsonReader();
writer = new BsonWriter();
print = false;
}
@After
public void afterTest() {
reader = null;
writer = null;
}
@Test
public void shouldReadExampleBsonStream() throws IOException {
// "\x16\x00\x00\x00\x02hello\x00\x06\x00\x00\x00world\x00\x00"
byte[] bytes = new byte[] {0x16, 0x00, 0x00, 0x00, 0x02, 0x68, 0x65, 0x6c, 0x6c, 0x6f, 0x00, 0x06, 0x00, 0x00, 0x00,
0x77, 0x6f, 0x72, 0x6c, 0x64, 0x00, 0x00};
output = reader.read(new ByteArrayInputStream(bytes));
String json = Json.write(output);
String expected = "{ \"hello\" : \"world\" }";
if (print) {
System.out.println(json);
System.out.flush();
}
assert expected.equals(json);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithStringValue() {
input = new BasicDocument("name", "Joe");
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithBooleanValue() {
input = new BasicDocument("foo", 3L);
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithIntValue() {
input = new BasicDocument("foo", 3);
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithLongValue() {
input = new BasicDocument("foo", 3L);
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithFloatValue() {
input = new BasicDocument("foo", 3.0f);
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithDoubleValue() {
input = new BasicDocument("foo", 3.0d);
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithDateValue() {
input = new BasicDocument("foo", new Date());
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithTimestampValue() {
input = new BasicDocument("foo", new Timestamp(new Date()));
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithObjectId() {
// print = true;
int time = Math.abs((int) new Date().getTime());
if (print) System.out.println("time value: " + time);
input = new BasicDocument("foo", new ObjectId(time, 1, 2, 3));
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithCode() {
input = new BasicDocument("foo", new Code("bar"));
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithCodeWithScope() {
Document scope = new BasicDocument("baz", "bam", "bak", "bat");
input = new BasicDocument("foo", new CodeWithScope("bar", scope));
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithMaxKey() {
input = new BasicDocument("foo", MaxKey.getInstance());
assertRoundtrip(input, false);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithMinKey() {
input = new BasicDocument("foo", MinKey.getInstance());
assertRoundtrip(input, false);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithSymbol() {
input = new BasicDocument("foo", new Symbol("bar"));
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithNull() {
input = new BasicDocument("foo", null);
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithBinary1() {
byte[] data = new byte[] {0x16, 0x00, 0x00, 0x00, 0x02, 0x68, 0x65, 0x6c};
input = new BasicDocument("foo", new Binary(data));
assertRoundtrip(input);
}
@Test
//Fix-For MODE-1575
public void shouldRoundTripSimpleBsonObjectWithBinary2() throws Exception {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
InputStream is = getClass().getClassLoader().getResourceAsStream("binary");
assertNotNull(is);
try {
byte[] buff = new byte[1024];
int read;
while ((read = is.read(buff)) != -1) {
bos.write(buff, 0, read);
}
} finally {
bos.close();
is.close();
}
input = new BasicDocument("foo", new Binary(bos.toByteArray()));
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithUuid() {
input = new BasicDocument("foo", UUID.randomUUID());
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithPattern() {
// print = true;
input = new BasicDocument("foo", Pattern.compile("[CH]at\\s+"));
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithPatternAndFlags() {
// print = true;
input = new BasicDocument("foo", Pattern.compile("[CH]at\\s+", Pattern.CASE_INSENSITIVE | Pattern.MULTILINE));
assertRoundtrip(input);
}
@Test
public void shouldRoundTripSimpleBsonObjectWithArray() {
BasicArray array = new BasicArray();
array.addValue("value1");
array.addValue(new Symbol("value2"));
array.addValue(30);
array.addValue(40L);
array.addValue(4.33d);
array.addValue(false);
array.addValue(null);
array.addValue("value2");
input = new BasicDocument("foo", array);
assertRoundtrip(input);
}
@Test
public void shouldRoundTripBsonObjectWithTwoFields() {
input = new BasicDocument("name", "Joe", "age", 35);
assertRoundtrip(input);
}
@Test
public void shouldRoundTripBsonObjectWithThreeFields() {
input = new BasicDocument("name", "Joe", "age", 35, "nick", "joey");
assertRoundtrip(input);
}
@Test
public void shouldRoundTripBsonObjectWithNestedDocument() {
BasicDocument address = new BasicDocument("street", "100 Main", "city", "Springfield", "zip", 12345);
input = new BasicDocument("name", "Joe", "age", 35, "address", address, "nick", "joey");
assertRoundtrip(input);
}
@Test
public void shouldRoundTripLargeModeShapeDocument() throws Exception {
Document doc = Json.read(TestUtil.resource("json/sample-large-modeshape-doc.json"));
// OutputStream os = new FileOutputStream("src/test/resources/json/sample-large-modeshape-doc2.json");
// Json.writePretty(doc, os);
// os.flush();
// os.close();
assertRoundtrip(doc);
}
@Test
@FixFor( "MODE-2074" )
public void shouldRoundTripBsonWithLargeStringField() throws Exception {
//use a string which overflows the default buffer BufferCache.MINIMUM_SIZE
final String largeString = readFile("json/sample-large-modeshape-doc3.json");
Document document = new BasicDocument("largeString", largeString);
assertRoundtrip(document);
}
@Test
@FixFor( "MODE-2074" )
public void shouldRoundTripBsonWithLargeStringFieldFromMultipleThreads() throws Exception {
final String largeString = readFile("json/sample-large-modeshape-doc3.json");
int threadCount = 10;
List<Future<Void>> results = new ArrayList<Future<Void>>();
ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
for (int i = 0; i < threadCount; i++) {
results.add(executorService.submit(new Callable<Void>() {
@Override
public Void call() throws Exception {
Document document = new BasicDocument("largeString", largeString);
assertRoundtrip(document);
return null;
}
}));
}
for (Future<Void> result : results) {
result.get(1, TimeUnit.SECONDS);
}
}
@Test
@FixFor( "MODE-2430" )
public void shouldRoundTripLargeStringsSuccessively() throws Exception {
int limit = 1048 * 8; // the default buffer size
int iterations = 20;
char letter = 'a';
for (int i = 0; i < iterations; i++) {
int size = limit + i;
char[] chars = new char[size];
Arrays.fill(chars, letter);
letter = (char)((byte) letter + 1);
String str = new String(chars);
Document document = new BasicDocument("largeString", str);
assertRoundtrip(document, false);
}
}
protected String readFile(String filePath) throws IOException {
InputStreamReader reader = new InputStreamReader(TestUtil.resource(filePath));
StringBuilder stringBuilder = new StringBuilder();
boolean error = false;
try {
int numRead = 0;
char[] buffer = new char[1024];
while ((numRead = reader.read(buffer)) > -1) {
stringBuilder.append(buffer, 0, numRead);
}
} catch (IOException e) {
error = true; // this error should be thrown, even if there is an error closing reader
throw e;
} catch (RuntimeException e) {
error = true; // this error should be thrown, even if there is an error closing reader
throw e;
} finally {
try {
reader.close();
} catch (IOException e) {
if (!error) throw e;
}
}
return stringBuilder.toString();
}
protected void assertRoundtrip( Document input ) {
assertRoundtrip(input, true);
}
protected void assertRoundtrip( Document input,
boolean compareToOtherImpls ) {
assert input != null;
Document output = writeThenRead(input, compareToOtherImpls);
if (print) {
System.out.println("********************************************************************************");
System.out.println("INPUT : " + input);
System.out.println();
System.out.println("OUTPUT: " + output);
System.out.println("********************************************************************************");
System.out.flush();
}
Assert.assertEquals("Round trip failed", input, output);
}
protected Document writeThenRead( Document object,
boolean compareToOtherImpls ) {
try {
long start = System.nanoTime();
byte[] bytes = writer.write(object);
long writeTime = System.nanoTime() - start;
start = System.nanoTime();
Document result = reader.read(new ByteArrayInputStream(bytes));
long readTime = System.nanoTime() - start;
if (compareToOtherImpls) {
// Convert to MongoDB, write to bytes, and compare ...
BSONObject mongoData = createMongoData(object);
start = System.nanoTime();
byte[] mongoBytes = new BasicBSONEncoder().encode(mongoData);
long mongoWriteTime = System.nanoTime() - start;
assertSame(bytes, mongoBytes, "BSON ", "Mongo ");
// FYI: The Jackson BSON library writes several of the types incorrectly,
// whereas the MongoDB library seems to write things per the spec.
// // Convert to Jackson BSON, write to bytes, and compare ...
// ByteArrayOutputStream stream2 = new ByteArrayOutputStream();
// ObjectMapper om = new ObjectMapper(new BsonFactory());
// Map<String, Object> jacksonData = createJacksonData(object);
// om.writeValue(stream2, jacksonData);
// byte[] jacksonBytes = stream2.toByteArray();
// assertSame(bytes, jacksonBytes, "BSON ", "Jackson");
start = System.nanoTime();
new BasicBSONDecoder().decode(bytes, new BasicBSONCallback());
long mongoReadTime = System.nanoTime() - start;
Document fromMongo = reader.read(new ByteArrayInputStream(mongoBytes));
if (!fromMongo.equals(result)) {
System.out.println("from Schematic: " + result);
System.out.println("from Mongo: " + fromMongo);
assert false : "Document read from bytes written by Mongo did not match expected document: " + result;
}
if (print) {
System.out.println("Reading with Schematic: " + percent(readTime, mongoReadTime) + " than Mongo");
System.out.println("Writing with Schematic: " + percent(writeTime, mongoWriteTime) + " than Mongo");
}
}
return result;
} catch (IOException e) {
throw new AssertionError(e);
}
}
protected String time( long nanos ) {
return "" + TimeUnit.NANOSECONDS.convert(nanos, TimeUnit.NANOSECONDS) + "ns";
}
protected String percent( long nanos1,
long nanos2 ) {
float percent = 100.0f * (float)(((double)nanos2 - (double)nanos1) / nanos1);
if (percent < 0.0d) {
return "" + -percent + "% slower";
}
return "" + percent + "% faster";
}
protected BSONObject createMongoData( Document document ) {
BSONObject obj = new BasicBSONObject();
for (Document.Field field : document.fields()) {
Object value = field.getValue();
obj.put(field.getName(), createMongoData(value));
}
return obj;
}
protected Object createMongoData( Object value ) {
if (value instanceof MinKey) {
value = "MinKey";
} else if (value instanceof MaxKey) {
value = "MaxKey";
} else if (value instanceof Symbol) {
Symbol symbol = (Symbol)value;
value = new org.bson.types.Symbol(symbol.getSymbol());
} else if (value instanceof ObjectId) {
ObjectId id = (ObjectId)value;
value = new org.bson.types.ObjectId(id.getBytes());
} else if (value instanceof Timestamp) {
Timestamp ts = (Timestamp)value;
value = new BSONTimestamp(ts.getTime(), ts.getInc());
} else if (value instanceof CodeWithScope) {
CodeWithScope code = (CodeWithScope)value;
value = new org.bson.types.CodeWScope(code.getCode(), createMongoData(code.getScope()));
} else if (value instanceof Code) {
Code code = (Code)value;
value = new org.bson.types.Code(code.getCode());
} else if (value instanceof Binary) {
Binary binary = (Binary)value;
value = new org.bson.types.Binary(binary.getBytes());
} else if (value instanceof List) {
List<?> values = (List<?>)value;
BasicBSONList newValues = new BasicBSONList();
for (Object v : values) {
newValues.add(createMongoData(v));
}
value = newValues;
} else if (value instanceof Document) {
value = createMongoData((Document)value);
}
return value;
}
protected Map<String, Object> createJacksonData( Document document ) {
Map<String, Object> data = new LinkedHashMap<String, Object>();
for (Document.Field field : document.fields()) {
Object value = field.getValue();
data.put(field.getName(), createJacksonData(value));
}
return data;
}
protected Object createJacksonData( Object value ) {
if (value instanceof MinKey) {
value = JsonToken.VALUE_STRING;
} else if (value instanceof MaxKey) {
value = JsonToken.VALUE_STRING;
} else if (value instanceof Symbol) {
value = new de.undercouch.bson4jackson.types.Symbol(((Symbol)value).getSymbol());
} else if (value instanceof ObjectId) {
ObjectId id = (ObjectId)value;
value = new de.undercouch.bson4jackson.types.ObjectId(id.getTime(), id.getMachine(), id.getInc());
} else if (value instanceof Timestamp) {
Timestamp ts = (Timestamp)value;
value = new de.undercouch.bson4jackson.types.Timestamp(ts.getTime(), ts.getInc());
} else if (value instanceof CodeWithScope) {
CodeWithScope code = (CodeWithScope)value;
value = new de.undercouch.bson4jackson.types.JavaScript(code.getCode(), createJacksonData(code.getScope()));
} else if (value instanceof Code) {
Code code = (Code)value;
value = new de.undercouch.bson4jackson.types.JavaScript(code.getCode(), null);
} else if (value instanceof List) {
List<?> values = (List<?>)value;
List<Object> newValues = new ArrayList<Object>(values.size());
for (Object v : values) {
newValues.add(createJacksonData(v));
}
value = newValues;
} else if (value instanceof Document) {
value = createJacksonData((Document)value);
}
return value;
}
protected void assertSame( byte[] b1,
byte[] b2,
String name1,
String name2 ) {
if (b1.equals(b2)) return;
int s1 = b1.length;
int s2 = b2.length;
String sb1 = toString(b1);
String sb2 = toString(b2);
if (!sb1.equals(sb2)) {
System.out.println(name1 + " size: " + padLeft(s1, 3) + " content: " + sb1);
System.out.println(name2 + " size: " + padLeft(s2, 3) + " content: " + sb2);
assert false;
}
}
protected String padLeft( Object value,
int width ) {
String result = value != null ? value.toString() : "null";
while (result.length() < width) {
result = " " + result;
}
return result;
}
protected String toString( byte[] bytes ) {
StringBuilder sb = new StringBuilder();
for (byte b : bytes) {
sb.append(padLeft((int)b, 4)).append(' ');
}
return sb.toString();
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.directory.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ds-2015-04-16/ListTagsForResource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTagsForResourceResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* List of tags returned by the ListTagsForResource operation.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Tag> tags;
/**
* <p>
* Reserved for future use.
* </p>
*/
private String nextToken;
/**
* <p>
* List of tags returned by the ListTagsForResource operation.
* </p>
*
* @return List of tags returned by the ListTagsForResource operation.
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.SdkInternalList<Tag>();
}
return tags;
}
/**
* <p>
* List of tags returned by the ListTagsForResource operation.
* </p>
*
* @param tags
* List of tags returned by the ListTagsForResource operation.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags);
}
/**
* <p>
* List of tags returned by the ListTagsForResource operation.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* List of tags returned by the ListTagsForResource operation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForResourceResult withTags(Tag... tags) {
if (this.tags == null) {
setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* List of tags returned by the ListTagsForResource operation.
* </p>
*
* @param tags
* List of tags returned by the ListTagsForResource operation.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForResourceResult withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* <p>
* Reserved for future use.
* </p>
*
* @param nextToken
* Reserved for future use.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* Reserved for future use.
* </p>
*
* @return Reserved for future use.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* Reserved for future use.
* </p>
*
* @param nextToken
* Reserved for future use.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForResourceResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTags() != null)
sb.append("Tags: ").append(getTags()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTagsForResourceResult == false)
return false;
ListTagsForResourceResult other = (ListTagsForResourceResult) obj;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListTagsForResourceResult clone() {
try {
return (ListTagsForResourceResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl;
import static org.apache.drill.TestBuilder.listOf;
import static org.apache.drill.TestBuilder.mapOf;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import io.netty.buffer.DrillBuf;
import java.util.ArrayList;
import java.util.List;
import mockit.Injectable;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.TestBuilder;
import org.apache.drill.exec.compile.ClassTransformer;
import org.apache.drill.exec.compile.ClassTransformer.ScalarReplacementOption;
import org.apache.drill.exec.expr.fn.impl.DateUtility;
import org.apache.drill.exec.proto.UserBitShared.QueryType;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.rpc.RpcException;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.rpc.user.UserServer;
import org.apache.drill.exec.server.Drillbit;
import org.apache.drill.exec.server.DrillbitContext;
import org.apache.drill.exec.server.options.OptionManager;
import org.apache.drill.exec.server.options.OptionValue;
import org.apache.drill.exec.server.options.OptionValue.OptionType;
import org.apache.drill.exec.util.ByteBufUtil.HadoopWritables;
import org.apache.drill.exec.util.VectorUtil;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.exec.vector.VarCharVector;
import org.joda.time.DateTime;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.io.Resources;
public class TestConvertFunctions extends BaseTestQuery {
// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestConvertFunctions.class);
private static final String CONVERSION_TEST_LOGICAL_PLAN = "functions/conv/conversionTestWithLogicalPlan.json";
private static final String CONVERSION_TEST_PHYSICAL_PLAN = "functions/conv/conversionTestWithPhysicalPlan.json";
private static final float DELTA = (float) 0.0001;
// "1980-01-01 01:23:45.678"
private static final String DATE_TIME_BE = "\\x00\\x00\\x00\\x49\\x77\\x85\\x1f\\x8e";
private static final String DATE_TIME_LE = "\\x8e\\x1f\\x85\\x77\\x49\\x00\\x00\\x00";
private static DateTime time = DateTime.parse("01:23:45.678", DateUtility.getTimeFormatter());
private static DateTime date = DateTime.parse("1980-01-01", DateUtility.getDateTimeFormatter());
String textFileContent;
@Test
public void test_JSON_convertTo_empty_list_drill_1416() throws Exception {
String listStr = "[ 4, 6 ]";
testBuilder()
.sqlQuery("select cast(convert_to(rl[1], 'JSON') as varchar(100)) as json_str from cp.`/store/json/input2.json`")
.unOrdered()
.baselineColumns("json_str")
.baselineValues(listStr)
.baselineValues("[ ]")
.baselineValues(listStr)
.baselineValues(listStr)
.go();
Object listVal = listOf(4l, 6l);
testBuilder()
.sqlQuery("select convert_from(convert_to(rl[1], 'JSON'), 'JSON') list_col from cp.`/store/json/input2.json`")
.unOrdered()
.baselineColumns("list_col")
.baselineValues(listVal)
.baselineValues(listOf())
.baselineValues(listVal)
.baselineValues(listVal)
.go();
Object mapVal1 = mapOf("f1", 4l, "f2", 6l);
Object mapVal2 = mapOf("f1", 11l);
testBuilder()
.sqlQuery("select convert_from(convert_to(rl[1], 'JSON'), 'JSON') as map_col from cp.`/store/json/json_project_null_object_from_list.json`")
.unOrdered()
.baselineColumns("map_col")
.baselineValues(mapVal1)
.baselineValues(mapOf())
.baselineValues(mapVal2)
.baselineValues(mapVal1)
.go();
}
@Test
public void testConvertToComplexJSON() throws Exception {
String result1 =
"[ {\n" +
" \"$numberLong\" : 4\n" +
"}, {\n" +
" \"$numberLong\" : 6\n" +
"} ]";
String result2 = "[ ]";
testBuilder()
.sqlQuery("select cast(convert_to(rl[1], 'EXTENDEDJSON') as varchar(100)) as json_str from cp.`/store/json/input2.json`")
.unOrdered()
.baselineColumns("json_str")
.baselineValues(result1)
.baselineValues(result2)
.baselineValues(result1)
.baselineValues(result1)
.go();
}
@Test
public void testDateTime1() throws Throwable {
verifyPhysicalPlan("(convert_from(binary_string('" + DATE_TIME_BE + "'), 'TIME_EPOCH_BE'))", time);
}
@Test
public void testDateTime2() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('" + DATE_TIME_LE + "'), 'TIME_EPOCH')", time);
}
@Test
public void testDateTime3() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('" + DATE_TIME_BE + "'), 'DATE_EPOCH_BE')", date );
}
@Test
public void testDateTime4() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('" + DATE_TIME_LE + "'), 'DATE_EPOCH')", date);
}
@Test
public void testFixedInts1() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\xAD'), 'TINYINT')", (byte) 0xAD);
}
@Test
public void testFixedInts2() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\xFE\\xCA'), 'SMALLINT')", (short) 0xCAFE);
}
@Test
public void testFixedInts3() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\xCA\\xFE'), 'SMALLINT_BE')", (short) 0xCAFE);
}
@Test
public void testFixedInts4() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\xBE\\xBA\\xFE\\xCA'), 'INT')", 0xCAFEBABE);
}
@Test
public void testFixedInts4SQL_from() throws Throwable {
verifySQL("select"
+ " convert_from(binary_string('\\xBE\\xBA\\xFE\\xCA'), 'INT')"
+ " from"
+ " cp.`employee.json` LIMIT 1",
0xCAFEBABE);
}
@Test
public void testFixedInts4SQL_to() throws Throwable {
verifySQL("select"
+ " convert_to(-889275714, 'INT')"
+ " from"
+ " cp.`employee.json` LIMIT 1",
new byte[] {(byte) 0xBE, (byte) 0xBA, (byte) 0xFE, (byte) 0xCA});
}
@Test
public void testFixedInts5() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\xCA\\xFE\\xBA\\xBE'), 'INT_BE')", 0xCAFEBABE);
}
@Test
public void testFixedInts6() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\xEF\\xBE\\xAD\\xDE\\xBE\\xBA\\xFE\\xCA'), 'BIGINT')", 0xCAFEBABEDEADBEEFL);
}
@Test
public void testFixedInts7() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\xCA\\xFE\\xBA\\xBE\\xDE\\xAD\\xBE\\xEF'), 'BIGINT_BE')", 0xCAFEBABEDEADBEEFL);
}
@Test
public void testFixedInts8() throws Throwable {
verifyPhysicalPlan("convert_from(convert_to(cast(77 as varchar(2)), 'INT_BE'), 'INT_BE')", 77);
}
@Test
public void testFixedInts9() throws Throwable {
verifyPhysicalPlan("convert_to(cast(77 as varchar(2)), 'INT_BE')", new byte[] {0, 0, 0, 77});
}
@Test
public void testFixedInts10() throws Throwable {
verifyPhysicalPlan("convert_to(cast(77 as varchar(2)), 'INT')", new byte[] {77, 0, 0, 0});
}
@Test
public void testFixedInts11() throws Throwable {
verifyPhysicalPlan("convert_to(77, 'BIGINT_BE')", new byte[] {0, 0, 0, 0, 0, 0, 0, 77});
}
@Test
public void testFixedInts12() throws Throwable {
verifyPhysicalPlan("convert_to(9223372036854775807, 'BIGINT')", new byte[] {-1, -1, -1, -1, -1, -1, -1, 0x7f});
}
@Test
public void testFixedInts13() throws Throwable {
verifyPhysicalPlan("convert_to(-9223372036854775808, 'BIGINT')", new byte[] {0, 0, 0, 0, 0, 0, 0, (byte)0x80});
}
@Test
public void testVInts1() throws Throwable {
verifyPhysicalPlan("convert_to(cast(0 as int), 'INT_HADOOPV')", new byte[] {0});
}
@Test
public void testVInts2() throws Throwable {
verifyPhysicalPlan("convert_to(cast(128 as int), 'INT_HADOOPV')", new byte[] {-113, -128});
}
@Test
public void testVInts3() throws Throwable {
verifyPhysicalPlan("convert_to(cast(256 as int), 'INT_HADOOPV')", new byte[] {-114, 1, 0});
}
@Test
public void testVInts4() throws Throwable {
verifyPhysicalPlan("convert_to(cast(65536 as int), 'INT_HADOOPV')", new byte[] {-115, 1, 0, 0});
}
@Test
public void testVInts5() throws Throwable {
verifyPhysicalPlan("convert_to(cast(16777216 as int), 'INT_HADOOPV')", new byte[] {-116, 1, 0, 0, 0});
}
@Test
public void testVInts6() throws Throwable {
verifyPhysicalPlan("convert_to(4294967296, 'BIGINT_HADOOPV')", new byte[] {-117, 1, 0, 0, 0, 0});
}
@Test
public void testVInts7() throws Throwable {
verifyPhysicalPlan("convert_to(1099511627776, 'BIGINT_HADOOPV')", new byte[] {-118, 1, 0, 0, 0, 0, 0});
}
@Test
public void testVInts8() throws Throwable {
verifyPhysicalPlan("convert_to(281474976710656, 'BIGINT_HADOOPV')", new byte[] {-119, 1, 0, 0, 0, 0, 0, 0});
}
@Test
public void testVInts9() throws Throwable {
verifyPhysicalPlan("convert_to(72057594037927936, 'BIGINT_HADOOPV')", new byte[] {-120, 1, 0, 0, 0, 0, 0, 0, 0});
}
@Test
public void testVInts10() throws Throwable {
verifyPhysicalPlan("convert_to(9223372036854775807, 'BIGINT_HADOOPV')", new byte[] {-120, 127, -1, -1, -1, -1, -1, -1, -1});
}
@Test
public void testVInts11() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\x88\\x7f\\xFF\\xFF\\xFF\\xFF\\xFF\\xFF\\xFF'), 'BIGINT_HADOOPV')", 9223372036854775807L);
}
@Test
public void testVInts12() throws Throwable {
verifyPhysicalPlan("convert_to(-9223372036854775808, 'BIGINT_HADOOPV')", new byte[] {-128, 127, -1, -1, -1, -1, -1, -1, -1});
}
@Test
public void testVInts13() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\x80\\x7f\\xFF\\xFF\\xFF\\xFF\\xFF\\xFF\\xFF'), 'BIGINT_HADOOPV')", -9223372036854775808L);
}
@Test
public void testBool1() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\x01'), 'BOOLEAN_BYTE')", true);
}
@Test
public void testBool2() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('\\x00'), 'BOOLEAN_BYTE')", false);
}
@Test
public void testBool3() throws Throwable {
verifyPhysicalPlan("convert_to(true, 'BOOLEAN_BYTE')", new byte[] {1});
}
@Test
public void testBool4() throws Throwable {
verifyPhysicalPlan("convert_to(false, 'BOOLEAN_BYTE')", new byte[] {0});
}
@Test
public void testFloats1() throws Throwable {
}
@Test
public void testFloats2() throws Throwable {
verifyPhysicalPlan("convert_from(convert_to(cast(77 as float4), 'FLOAT'), 'FLOAT')", new Float(77.0));
}
@Test
public void testFloats2be() throws Throwable {
verifyPhysicalPlan("convert_from(convert_to(cast(77 as float4), 'FLOAT_BE'), 'FLOAT_BE')", new Float(77.0));
}
@Test
public void testFloats3() throws Throwable {
verifyPhysicalPlan("convert_to(cast(1.4e-45 as float4), 'FLOAT')", new byte[] {1, 0, 0, 0});
}
@Test
public void testFloats4() throws Throwable {
verifyPhysicalPlan("convert_to(cast(3.4028235e+38 as float4), 'FLOAT')", new byte[] {-1, -1, 127, 127});
}
@Test
public void testFloats5(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
verifyPhysicalPlan("convert_from(convert_to(cast(77 as float8), 'DOUBLE'), 'DOUBLE')", 77.0);
}
@Test
public void testFloats5be(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
verifyPhysicalPlan("convert_from(convert_to(cast(77 as float8), 'DOUBLE_BE'), 'DOUBLE_BE')", 77.0);
}
@Test
public void testFloats6(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
verifyPhysicalPlan("convert_to(cast(77 as float8), 'DOUBLE')", new byte[] {0, 0, 0, 0, 0, 64, 83, 64});
}
@Test
public void testFloats7(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
verifyPhysicalPlan("convert_to(4.9e-324, 'DOUBLE')", new byte[] {1, 0, 0, 0, 0, 0, 0, 0});
}
@Test
public void testFloats8(@Injectable final DrillbitContext bitContext,
@Injectable UserServer.UserClientConnection connection) throws Throwable {
verifyPhysicalPlan("convert_to(1.7976931348623157e+308, 'DOUBLE')", new byte[] {-1, -1, -1, -1, -1, -1, -17, 127});
}
@Test
public void testUTF8() throws Throwable {
verifyPhysicalPlan("convert_from(binary_string('apache_drill'), 'UTF8')", "apache_drill");
verifyPhysicalPlan("convert_to('apache_drill', 'UTF8')", new byte[] {'a', 'p', 'a', 'c', 'h', 'e', '_', 'd', 'r', 'i', 'l', 'l'});
}
@Ignore // TODO(DRILL-2326) remove this when we get rid of the scalar replacement option test cases below
@Test
public void testBigIntVarCharReturnTripConvertLogical() throws Exception {
final String logicalPlan = Resources.toString(
Resources.getResource(CONVERSION_TEST_LOGICAL_PLAN), Charsets.UTF_8);
final List<QueryDataBatch> results = testLogicalWithResults(logicalPlan);
int count = 0;
final RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
for (QueryDataBatch result : results) {
count += result.getHeader().getRowCount();
loader.load(result.getHeader().getDef(), result.getData());
if (loader.getRecordCount() > 0) {
VectorUtil.showVectorAccessibleContent(loader);
}
loader.clear();
result.release();
}
assertTrue(count == 10);
}
/**
* Set up the options to test the scalar replacement retry option (see
* ClassTransformer.java). Scalar replacement rewrites bytecode to replace
* value holders (essentially boxed values) with their member variables as
* locals. There is still one pattern that doesn't work, and occasionally new
* ones are introduced. This can be used in tests that exercise failing patterns.
*
* <p>This also flushes the compiled code cache.
*
* <p>TODO this should get moved to QueryTestUtil once DRILL-2245 has been merged
*
* @param drillbit the drillbit
* @param srOption the scalar replacement option value to use
* @return the original scalar replacement option setting (so it can be restored)
*/
private static OptionValue setupScalarReplacementOption(
final Drillbit drillbit, final ScalarReplacementOption srOption) {
// set the system option
final DrillbitContext drillbitContext = drillbit.getContext();
final OptionManager optionManager = drillbitContext.getOptionManager();
final OptionValue originalOptionValue = optionManager.getOption(ClassTransformer.SCALAR_REPLACEMENT_OPTION);
final OptionValue newOptionValue = OptionValue.createString(OptionType.SYSTEM,
ClassTransformer.SCALAR_REPLACEMENT_OPTION, srOption.name().toLowerCase());
optionManager.setOption(newOptionValue);
// flush the code cache
drillbitContext.getCompiler().flushCache();
return originalOptionValue;
}
/**
* Restore the original scalar replacement option returned from
* setupScalarReplacementOption().
*
* <p>This also flushes the compiled code cache.
*
* <p>TODO this should get moved to QueryTestUtil once DRILL-2245 has been merged
*
* @param drillbit the drillbit
* @param srOption the scalar replacement option value to use
*/
private static void restoreScalarReplacementOption(final Drillbit drillbit, final OptionValue srOption) {
final DrillbitContext drillbitContext = drillbit.getContext();
final OptionManager optionManager = drillbitContext.getOptionManager();
optionManager.setOption(srOption);
// flush the code cache
drillbitContext.getCompiler().flushCache();
}
@Test // TODO(DRILL-2326) temporary until we fix the scalar replacement bug for this case
public void testBigIntVarCharReturnTripConvertLogical_ScalarReplaceTRY() throws Exception {
final OptionValue srOption = setupScalarReplacementOption(bits[0], ScalarReplacementOption.TRY);
try {
// this should work fine
testBigIntVarCharReturnTripConvertLogical();
} finally {
// restore the system option
restoreScalarReplacementOption(bits[0], srOption);
}
}
@Test // TODO(DRILL-2326) temporary until we fix the scalar replacement bug for this case
public void testBigIntVarCharReturnTripConvertLogical_ScalarReplaceON() throws Exception {
final OptionValue srOption = setupScalarReplacementOption(bits[0], ScalarReplacementOption.ON);
boolean caughtException = false;
try {
// this will fail (with a JUnit assertion) until we fix the SR bug
testBigIntVarCharReturnTripConvertLogical();
} catch(RpcException e) {
caughtException = true;
} finally {
restoreScalarReplacementOption(bits[0], srOption);
}
assertTrue(caughtException);
}
@Test // TODO(DRILL-2326) temporary until we fix the scalar replacement bug for this case
public void testBigIntVarCharReturnTripConvertLogical_ScalarReplaceOFF() throws Exception {
final OptionValue srOption = setupScalarReplacementOption(bits[0], ScalarReplacementOption.OFF);
try {
// this should work fine
testBigIntVarCharReturnTripConvertLogical();
} finally {
// restore the system option
restoreScalarReplacementOption(bits[0], srOption);
}
}
@Test
public void testHadooopVInt() throws Exception {
final int _0 = 0;
final int _9 = 9;
final DrillBuf buffer = getAllocator().buffer(_9);
long longVal = 0;
buffer.clear();
HadoopWritables.writeVLong(buffer, _0, _9, 0);
longVal = HadoopWritables.readVLong(buffer, _0, _9);
assertEquals(longVal, 0);
buffer.clear();
HadoopWritables.writeVLong(buffer, _0, _9, Long.MAX_VALUE);
longVal = HadoopWritables.readVLong(buffer, _0, _9);
assertEquals(longVal, Long.MAX_VALUE);
buffer.clear();
HadoopWritables.writeVLong(buffer, _0, _9, Long.MIN_VALUE);
longVal = HadoopWritables.readVLong(buffer, _0, _9);
assertEquals(longVal, Long.MIN_VALUE);
int intVal = 0;
buffer.clear();
HadoopWritables.writeVInt(buffer, _0, _9, 0);
intVal = HadoopWritables.readVInt(buffer, _0, _9);
assertEquals(intVal, 0);
buffer.clear();
HadoopWritables.writeVInt(buffer, _0, _9, Integer.MAX_VALUE);
intVal = HadoopWritables.readVInt(buffer, _0, _9);
assertEquals(intVal, Integer.MAX_VALUE);
buffer.clear();
HadoopWritables.writeVInt(buffer, _0, _9, Integer.MIN_VALUE);
intVal = HadoopWritables.readVInt(buffer, _0, _9);
assertEquals(intVal, Integer.MIN_VALUE);
buffer.release();
}
protected <T> void verifySQL(String sql, T expectedResults) throws Throwable {
verifyResults(sql, expectedResults, getRunResult(QueryType.SQL, sql));
}
protected <T> void verifyPhysicalPlan(String expression, T expectedResults) throws Throwable {
expression = expression.replace("\\", "\\\\\\\\"); // "\\\\\\\\" => Java => "\\\\" => JsonParser => "\\" => AntlrParser "\"
if (textFileContent == null) {
textFileContent = Resources.toString(Resources.getResource(CONVERSION_TEST_PHYSICAL_PLAN), Charsets.UTF_8);
}
String planString = textFileContent.replace("__CONVERT_EXPRESSION__", expression);
verifyResults(expression, expectedResults, getRunResult(QueryType.PHYSICAL, planString));
}
protected Object[] getRunResult(QueryType queryType, String planString) throws Exception {
List<QueryDataBatch> resultList = testRunAndReturn(queryType, planString);
List<Object> res = new ArrayList<Object>();
RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
for(QueryDataBatch result : resultList) {
if (result.getData() != null) {
loader.load(result.getHeader().getDef(), result.getData());
ValueVector v = loader.iterator().next().getValueVector();
for (int j = 0; j < v.getAccessor().getValueCount(); j++) {
if (v instanceof VarCharVector) {
res.add(new String(((VarCharVector) v).getAccessor().get(j)));
} else {
res.add(v.getAccessor().getObject(j));
}
}
loader.clear();
result.release();
}
}
return res.toArray();
}
protected <T> void verifyResults(String expression, T expectedResults, Object[] actualResults) throws Throwable {
String testName = String.format("Expression: %s.", expression);
assertEquals(testName, 1, actualResults.length);
assertNotNull(testName, actualResults[0]);
if (expectedResults.getClass().isArray()) {
assertArraysEquals(testName, expectedResults, actualResults[0]);
} else {
assertEquals(testName, expectedResults, actualResults[0]);
}
}
protected void assertArraysEquals(Object expected, Object actual) {
assertArraysEquals(null, expected, actual);
}
protected void assertArraysEquals(String message, Object expected, Object actual) {
if (expected instanceof byte[] && actual instanceof byte[]) {
assertArrayEquals(message, (byte[]) expected, (byte[]) actual);
} else if (expected instanceof Object[] && actual instanceof Object[]) {
assertArrayEquals(message, (Object[]) expected, (Object[]) actual);
} else if (expected instanceof char[] && actual instanceof char[]) {
assertArrayEquals(message, (char[]) expected, (char[]) actual);
} else if (expected instanceof short[] && actual instanceof short[]) {
assertArrayEquals(message, (short[]) expected, (short[]) actual);
} else if (expected instanceof int[] && actual instanceof int[]) {
assertArrayEquals(message, (int[]) expected, (int[]) actual);
} else if (expected instanceof long[] && actual instanceof long[]) {
assertArrayEquals(message, (long[]) expected, (long[]) actual);
} else if (expected instanceof float[] && actual instanceof float[]) {
assertArrayEquals(message, (float[]) expected, (float[]) actual, DELTA);
} else if (expected instanceof double[] && actual instanceof double[]) {
assertArrayEquals(message, (double[]) expected, (double[]) actual, DELTA);
} else {
fail(String.format("%s: Error comparing arrays of type '%s' and '%s'",
expected.getClass().getName(), (actual == null ? "null" : actual.getClass().getName())));
}
}
}
| |
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.volley;
import com.android.volley.Request.Priority;
import com.android.volley.RequestQueue.RequestFinishedListener;
import com.android.volley.mock.MockRequest;
import com.android.volley.mock.ShadowSystemClock;
import com.android.volley.toolbox.NoCache;
import com.android.volley.utils.ImmediateResponseDelivery;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.timeout;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
/**
* Integration tests for {@link RequestQueue}, that verify its behavior in conjunction with real
* dispatcher, queues and
* Requests. Network is mocked out
*/
@RunWith(RobolectricTestRunner.class) @Config(shadows = { ShadowSystemClock.class })
public class RequestQueueIntegrationTest {
private ResponseDelivery mDelivery;
@Mock private Network mMockNetwork;
@Before public void setUp() throws Exception {
mDelivery = new ImmediateResponseDelivery();
initMocks(this);
}
@Test public void add_requestProcessedInCorrectOrder() throws Exception {
// Enqueue 2 requests with different cache keys, and different priorities. The second, higher priority request
// takes 20ms.
// Assert that first request is only handled after the first one has been parsed and delivered.
MockRequest lowerPriorityReq = new MockRequest();
MockRequest higherPriorityReq = new MockRequest();
lowerPriorityReq.setCacheKey("1");
higherPriorityReq.setCacheKey("2");
lowerPriorityReq.setPriority(Priority.LOW);
higherPriorityReq.setPriority(Priority.HIGH);
RequestFinishedListener listener = mock(RequestFinishedListener.class);
Answer<NetworkResponse> delayAnswer = new Answer<NetworkResponse>() {
@Override public NetworkResponse answer(InvocationOnMock invocationOnMock)
throws Throwable {
Thread.sleep(20);
return mock(NetworkResponse.class);
}
};
//delay only for higher request
when(mMockNetwork.performRequest(higherPriorityReq)).thenAnswer(delayAnswer);
when(mMockNetwork.performRequest(lowerPriorityReq)).thenReturn(mock(NetworkResponse.class));
RequestQueue queue = new RequestQueue(new NoCache(), mMockNetwork, 1, mDelivery);
queue.addRequestFinishedListener(listener);
queue.add(lowerPriorityReq);
queue.add(higherPriorityReq);
queue.start();
// you cannot do strict order verification in combination with timeouts with mockito 1.9.5 :(
// as an alternative, first verify no requests have finished, while higherPriorityReq should be processing
verifyNoMoreInteractions(listener);
// verify higherPriorityReq goes through first
verify(listener, timeout(100)).onRequestFinished(higherPriorityReq);
// verify lowerPriorityReq goes last
verify(listener, timeout(10)).onRequestFinished(lowerPriorityReq);
queue.stop();
}
/**
* Asserts that requests with same cache key are processed in order.
*
* Needs to be an integration test because relies on complex interations between various queues
*/
@Test public void add_dedupeByCacheKey() throws Exception {
// Enqueue 2 requests with the same cache key. The first request takes 20ms. Assert that the
// second request is only handled after the first one has been parsed and delivered.
Request req1 = new MockRequest();
Request req2 = new MockRequest();
RequestFinishedListener listener = mock(RequestFinishedListener.class);
Answer<NetworkResponse> delayAnswer = new Answer<NetworkResponse>() {
@Override public NetworkResponse answer(InvocationOnMock invocationOnMock)
throws Throwable {
Thread.sleep(20);
return mock(NetworkResponse.class);
}
};
//delay only for first
when(mMockNetwork.performRequest(req1)).thenAnswer(delayAnswer);
when(mMockNetwork.performRequest(req2)).thenReturn(mock(NetworkResponse.class));
RequestQueue queue = new RequestQueue(new NoCache(), mMockNetwork, 3, mDelivery);
queue.addRequestFinishedListener(listener);
queue.add(req1);
queue.add(req2);
queue.start();
// you cannot do strict order verification with mockito 1.9.5 :(
// as an alternative, first verify no requests have finished, then verify req1 goes through
verifyNoMoreInteractions(listener);
verify(listener, timeout(100)).onRequestFinished(req1);
verify(listener, timeout(10)).onRequestFinished(req2);
queue.stop();
}
/**
* Verify RequestFinishedListeners are informed when requests are canceled
*
* Needs to be an integration test because relies on Request -> dispatcher -> RequestQueue
* interaction
*/
@Test public void add_requestFinishedListenerCanceled() throws Exception {
RequestFinishedListener listener = mock(RequestFinishedListener.class);
Request request = new MockRequest();
Answer<NetworkResponse> delayAnswer = new Answer<NetworkResponse>() {
@Override public NetworkResponse answer(InvocationOnMock invocationOnMock)
throws Throwable {
Thread.sleep(200);
return mock(NetworkResponse.class);
}
};
RequestQueue queue = new RequestQueue(new NoCache(), mMockNetwork, 1, mDelivery);
when(mMockNetwork.performRequest(request)).thenAnswer(delayAnswer);
queue.addRequestFinishedListener(listener);
queue.start();
queue.add(request);
request.cancel();
verify(listener, timeout(100)).onRequestFinished(request);
queue.stop();
}
/**
* Verify RequestFinishedListeners are informed when requests are successfully delivered
*
* Needs to be an integration test because relies on Request -> dispatcher -> RequestQueue
* interaction
*/
@Test public void add_requestFinishedListenerSuccess() throws Exception {
NetworkResponse response = mock(NetworkResponse.class);
Request request = new MockRequest();
RequestFinishedListener listener = mock(RequestFinishedListener.class);
RequestFinishedListener listener2 = mock(RequestFinishedListener.class);
RequestQueue queue = new RequestQueue(new NoCache(), mMockNetwork, 1, mDelivery);
queue.addRequestFinishedListener(listener);
queue.addRequestFinishedListener(listener2);
queue.start();
queue.add(request);
verify(listener, timeout(100)).onRequestFinished(request);
verify(listener2, timeout(100)).onRequestFinished(request);
queue.stop();
}
/**
* Verify RequestFinishedListeners are informed when request errors
*
* Needs to be an integration test because relies on Request -> dispatcher -> RequestQueue
* interaction
*/
@Test public void add_requestFinishedListenerError() throws Exception {
RequestFinishedListener listener = mock(RequestFinishedListener.class);
Request request = new MockRequest();
RequestQueue queue = new RequestQueue(new NoCache(), mMockNetwork, 1, mDelivery);
when(mMockNetwork.performRequest(request)).thenThrow(new VolleyError());
queue.addRequestFinishedListener(listener);
queue.start();
queue.add(request);
verify(listener, timeout(100)).onRequestFinished(request);
queue.stop();
}
}
| |
package org.hyperledger.indy.sdk.agent;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import org.hyperledger.indy.sdk.IndyException;
import org.hyperledger.indy.sdk.IndyJava;
import org.hyperledger.indy.sdk.LibIndy;
import org.hyperledger.indy.sdk.pool.Pool;
import org.hyperledger.indy.sdk.wallet.Wallet;
import com.sun.jna.Callback;
/**
* agent.rs API
*/
/**
* Agent related functionality.
*/
public class Agent extends IndyJava.API {
private static Map<Integer, Agent.Connection> connections = new ConcurrentHashMap<Integer, Agent.Connection>();
private static Map<Integer, Agent.Listener> listeners = new ConcurrentHashMap<Integer, Agent.Listener>();
private Agent() {
}
/*
* OBSERVERS
*/
private static Map<Integer, AgentObservers.MessageObserver> messageObserver = new ConcurrentHashMap<Integer, AgentObservers.MessageObserver>();
private static Map<Integer, AgentObservers.ConnectionObserver> connectionObservers = new ConcurrentHashMap<Integer, AgentObservers.ConnectionObserver>();
/**
* Adds a message observer to track against the specified command handle.
*
* @param commandHandle The command handle to track against.
* @param messageObserver The message observer to track.
*/
private static void addMessageObserver(int commandHandle, AgentObservers.MessageObserver messageObserver) {
assert(! Agent.messageObserver.containsKey(commandHandle));
Agent.messageObserver.put(commandHandle, messageObserver);
}
/**
* Removes a message observer from tracking.
*
* @param xcommand_handle The command handle the message observer is tracked against.
* @return The message observer.
*/
private static AgentObservers.MessageObserver removeMessageObserver(int xcommand_handle) {
AgentObservers.MessageObserver future = messageObserver.remove(xcommand_handle);
assert(future != null);
return future;
}
/**
* Adds a connection observer to track against the specified command handle.
*
* @param commandHandle The command handle to track against.
* @param connectionObserver The connection observer to track.
*/
private static void addConnectionObserver(int commandHandle, AgentObservers.ConnectionObserver connectionObserver) {
assert(! connectionObservers.containsKey(commandHandle));
connectionObservers.put(commandHandle, connectionObserver);
}
/**
* Removes a connection observer from tracking.
*
* @param xcommand_handle The command handle the connection observer is tracked against.
* @return The connection observer.
*/
private static AgentObservers.ConnectionObserver removeConnectionObserver(int xcommand_handle) {
AgentObservers.ConnectionObserver future = connectionObservers.remove(xcommand_handle);
assert(future != null);
return future;
}
/*
* STATIC CALLBACKS
*/
/**
* Callback used when an outgoing connection is established.
*/
private static Callback agentConnectConnectionCb = new Callback() {
@SuppressWarnings("unused")
public void callback(int xcommand_handle, int err, int connection_handle) throws IndyException {
CompletableFuture<Connection> future = (CompletableFuture<Connection>) removeFuture(xcommand_handle);
if (! checkCallback(future, err)) return;
assert(! connections.containsKey(connection_handle));
Agent.Connection connection = new Agent.Connection(connection_handle);
connections.put(connection_handle, connection);
connection.messageObserver = removeMessageObserver(xcommand_handle);
future.complete(connection);
}
};
/**
* Callback used when am outgoing connection receives a message.
*/
private static Callback agentConnectMessageCb = new Callback() {
@SuppressWarnings("unused")
public void callback(int xconnection_handle, int err, String message) throws IndyException {
checkCallback(err);
Agent.Connection connection = connections.get(xconnection_handle);
if (connection == null) return;
AgentObservers.MessageObserver messageObserver = connection.messageObserver;
messageObserver.onMessage(connection, message);
}
};
/**
* Callback used when a listener is ready to accept connections.
*/
private static Callback agentListenListenerCb = new Callback() {
@SuppressWarnings("unused")
public void callback(int xcommand_handle, int err, int listener_handle) throws IndyException {
CompletableFuture<Listener> future = (CompletableFuture<Listener>) removeFuture(xcommand_handle);
if (! checkCallback(future, err)) return;
assert(! listeners.containsKey(listener_handle));
Agent.Listener listener = new Agent.Listener(listener_handle);
listeners.put(listener_handle, listener);
listener.connectionObserver = removeConnectionObserver(xcommand_handle);
future.complete(listener);
}
};
/**
* Callback used when a listener receives an incoming connection.
*/
private static Callback agentListenConnectionCb = new Callback() {
@SuppressWarnings("unused")
public void callback(int xlistener_handle, int err, int connection_handle, String sender_did, String receiver_did) throws IndyException {
checkCallback(err);
Agent.Listener listener = listeners.get(xlistener_handle);
if (listener == null) return;
assert(! connections.containsKey(connection_handle));
Agent.Connection connection = new Agent.Connection(connection_handle);
connections.put(connection_handle, connection);
AgentObservers.ConnectionObserver connectionObserver = listener.connectionObserver;
connection.messageObserver = connectionObserver.onConnection(listener, connection, sender_did, receiver_did);
}
};
/**
* Callback used when a listener connection receives a message.
*/
private static Callback agentListenMessageCb = new Callback() {
@SuppressWarnings("unused")
public void callback(int xconnection_handle, int err, String message) throws IndyException {
checkCallback(err);
Agent.Connection connection = connections.get(xconnection_handle);
if (connection == null) return;
AgentObservers.MessageObserver messageObserver = connection.messageObserver;
messageObserver.onMessage(connection, message);
}
};
/**
* Callback used when an identity is added to a listener.
*/
private static Callback agentAddIdentityCb = new Callback() {
@SuppressWarnings({"unused", "unchecked"})
public void callback(int xcommand_handle, int err, int listener_handle) {
CompletableFuture<Void> future = (CompletableFuture<Void>) removeFuture(xcommand_handle);
if (! checkCallback(future, err)) return;
future.complete(null);
}
};
/**
* Callback used when an identity is removed from a listener.
*/
private static Callback agentRemoveIdentityCb = new Callback() {
@SuppressWarnings({"unused", "unchecked"})
public void callback(int xcommand_handle, int err, int listener_handle) {
CompletableFuture<Void> future = (CompletableFuture<Void>) removeFuture(xcommand_handle);
if (! checkCallback(future, err)) return;
future.complete(null);
}
};
/**
* Callback used when a message is sent.
*/
private static Callback agentSendCb = new Callback() {
@SuppressWarnings({"unused", "unchecked"})
public void callback(int xcommand_handle, int err) {
CompletableFuture<Void> future = (CompletableFuture<Void>) removeFuture(xcommand_handle);
if (! checkCallback(future, err)) return;
future.complete(null);
}
};
/**
* Callback used when a connection is closed.
*/
private static Callback agentCloseConnectionCb = new Callback() {
@SuppressWarnings({"unused", "unchecked"})
public void callback(int xcommand_handle, int err) {
CompletableFuture<Void> future = (CompletableFuture<Void>) removeFuture(xcommand_handle);
if (! checkCallback(future, err)) return;
future.complete(null);
}
};
/**
* Callback used when a listener is closed.
*/
private static Callback agentCloseListenerCb = new Callback() {
@SuppressWarnings({"unused", "unchecked"})
public void callback(int xcommand_handle, int err) {
CompletableFuture<Void> future = (CompletableFuture<Void>) removeFuture(xcommand_handle);
if (! checkCallback(future, err)) return;
future.complete(null);
}
};
/*
* STATIC METHODS
*/
/**
* Establishes an outgoing connection.
*
* @param pool The pool.
* @param wallet The wallet.
* @param senderDid The sender DID.
* @param receiverDid The receiver DID.
* @param messageObserver The message observer that will be notified when a message is received on the connection.
* @return A future that resolves to a Connection.
* @throws IndyException Thrown if a failure occurs when calling the SDK.
*/
public static CompletableFuture<Connection> agentConnect(
Pool pool,
Wallet wallet,
String senderDid,
String receiverDid,
AgentObservers.MessageObserver messageObserver) throws IndyException {
CompletableFuture<Connection> future = new CompletableFuture<>();
int commandHandle = addFuture(future);
addMessageObserver(commandHandle, messageObserver);
int poolHandle = pool.getPoolHandle();
int walletHandle = wallet.getWalletHandle();
int result = LibIndy.api.indy_agent_connect(
commandHandle,
poolHandle,
walletHandle,
senderDid,
receiverDid,
agentConnectConnectionCb,
agentConnectMessageCb);
checkResult(result);
return future;
}
/**
* Starts a listener that listens for incoming connections.
*
* @param endpoint The endpoint on which the listener should listen for connections.
* @param connectionObserver An observer that will be notified when new incoming connections are established.
* @return A future that resolves to a listener.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public static CompletableFuture<Listener> agentListen(
String endpoint,
AgentObservers.ConnectionObserver connectionObserver) throws IndyException {
CompletableFuture<Listener> future = new CompletableFuture<>();
int commandHandle = addFuture(future);
addConnectionObserver(commandHandle, connectionObserver);
int result = LibIndy.api.indy_agent_listen(
commandHandle,
endpoint,
agentListenListenerCb,
agentListenConnectionCb,
agentListenMessageCb);
checkResult(result);
return future;
}
/**
* Adds an identity to the specified listener.
*
* @param listener The listener to add the identity to.
* @param pool The pool.
* @param wallet The wallet.
* @param did The DID of the identity to add.
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public static CompletableFuture<Void> agentAddIdentity(
Agent.Listener listener,
Pool pool,
Wallet wallet,
String did) throws IndyException {
CompletableFuture<Void> future = new CompletableFuture<Void>();
int commandHandle = addFuture(future);
int listenerHandle = listener.getListenerHandle();
int poolHandle = pool.getPoolHandle();
int walletHandle = wallet.getWalletHandle();
int result = LibIndy.api.indy_agent_add_identity(
commandHandle,
listenerHandle,
poolHandle,
walletHandle,
did,
agentAddIdentityCb);
checkResult(result);
return future;
}
/**
* Removes an identity from the specified listener.
*
* @param listener The listener to remove the identity from.
* @param wallet The wallet.
* @param did The DID of the identity to remove.
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public static CompletableFuture<Void> agentRemoveIdentity(
Agent.Listener listener,
Wallet wallet,
String did) throws IndyException {
CompletableFuture<Void> future = new CompletableFuture<Void>();
int commandHandle = addFuture(future);
int listenerHandle = listener.getListenerHandle();
int walletHandle = wallet.getWalletHandle();
int result = LibIndy.api.indy_agent_remove_identity(
commandHandle,
listenerHandle,
walletHandle,
did,
agentRemoveIdentityCb);
checkResult(result);
return future;
}
/**
* Sends a message to the specified connection.
*
* @param connection The connection to send the message to.
* @param message The message to send.
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public static CompletableFuture<Void> agentSend(
Agent.Connection connection,
String message) throws IndyException {
CompletableFuture<Void> future = new CompletableFuture<Void>();
int commandHandle = addFuture(future);
int connectionHandle = connection.getConnectionHandle();
int result = LibIndy.api.indy_agent_send(
commandHandle,
connectionHandle,
message,
agentSendCb);
checkResult(result);
return future;
}
/**
* Closes the provided connection.
*
* @param connection The connection to close.
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public static CompletableFuture<Void> agentCloseConnection(
Agent.Connection connection) throws IndyException {
CompletableFuture<Void> future = new CompletableFuture<Void>();
int commandHandle = addFuture(future);
int connectionHandle = connection.getConnectionHandle();
connections.remove(connectionHandle);
int result = LibIndy.api.indy_agent_close_connection(
commandHandle,
connectionHandle,
agentCloseConnectionCb);
checkResult(result);
return future;
}
/**
* Closes the specified listener.
*
* @param listener The listener to close.
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public static CompletableFuture<Void> agentCloseListener(
Agent.Listener listener) throws IndyException {
CompletableFuture<Void> future = new CompletableFuture<Void>();
int commandHandle = addFuture(future);
int listenerHandle = listener.getListenerHandle();
listeners.remove(listenerHandle);
int result = LibIndy.api.indy_agent_close_listener(
commandHandle,
listenerHandle,
agentCloseListenerCb);
checkResult(result);
return future;
}
/*
* NESTED CLASSES WITH INSTANCE METHODS
*/
/**
* Listens for incoming connections.
*/
public static class Listener {
private final int listenerHandle;
private AgentObservers.ConnectionObserver connectionObserver;
private Listener(int listenerHandle) {
this.listenerHandle = listenerHandle;
}
/**
* Gets the handle of the listener.
*
* @return The handle of the listener.
*/
public int getListenerHandle() {
return this.listenerHandle;
}
/**
* Adds an identity to the listener.
*
* @param pool The pool.
* @param wallet The wallet.
* @param did The DID of the identity to add.
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public CompletableFuture<Void> agentAddIdentity(Pool pool, Wallet wallet, String did) throws IndyException {
return Agent.agentAddIdentity(this, pool, wallet, did);
}
/**
* Removes an identity from the listener.
*
* @param wallet The wallet.
* @param did The DID of the identity to remove.
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public CompletableFuture<Void> agentRemoveIdentity(Wallet wallet, String did) throws IndyException {
return Agent.agentRemoveIdentity(this, wallet, did);
}
/**
* Closes the listener.
*
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public CompletableFuture<Void> agentCloseListener() throws IndyException {
return Agent.agentCloseListener(this);
}
}
/**
* A connection between two agents.
*/
public static class Connection {
private final int connectionHandle;
private AgentObservers.MessageObserver messageObserver;
private Connection(int connectionHandle) {
this.connectionHandle = connectionHandle;
}
/**
* Gets the handle of the connection.
*
* @return The handle of the connection.
*/
public int getConnectionHandle() {
return this.connectionHandle;
}
/**
* Sends a message on the connection.
*
* @param message The message to send.
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public CompletableFuture<Void> agentSend(String message) throws IndyException {
return Agent.agentSend(this, message);
}
/**
* Closes the connection.
*
* @return A future that does not resolve a value.
* @throws IndyException Thrown if an error occurs when calling the SDK.
*/
public CompletableFuture<Void> agentCloseConnection() throws IndyException {
return Agent.agentCloseConnection(this);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.luni.util;
import java.util.AbstractCollection;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Arrays;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
/**
*
* Reductive hash with two keys
*
*/
public class TwoKeyHashMap<E, K, V> extends AbstractMap<String, V> {
static final float DEFAULT_LOAD_FACTOR = 0.75f;
static final int DEFAULT_INITIAL_SIZE = 16;
private Set<Map.Entry<String, V>> entrySet;
private Collection<V> values;
private int size;
private int arrSize;
private int modCount;
private Entry<E, K, V>[] arr;
private float loadFactor;
int threshold = 0;
/**
* Constructs an empty HashMap
*/
public TwoKeyHashMap() {
this(DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR);
}
/**
* Constructs an empty HashMap
* @param initialCapacity
*/
public TwoKeyHashMap(int initialCapacity) {
this(initialCapacity, DEFAULT_LOAD_FACTOR);
}
/**
* Constructs an empty HashMap
* @param initialCapacity
* @param initialLoadFactor
*/
public TwoKeyHashMap(int initialCapacity, float initialLoadFactor) {
if (initialCapacity < 0) {
throw new IllegalArgumentException("initialCapacity should be >= 0");
}
if (initialLoadFactor <= 0) {
throw new IllegalArgumentException("initialLoadFactor should be > 0");
}
loadFactor = initialLoadFactor;
if (initialCapacity == Integer.MAX_VALUE) {
initialCapacity--;
}
arrSize = initialCapacity > 0 ? initialCapacity : 1;
threshold = (int) (arrSize * loadFactor);
arr = new Entry[arrSize + 1];
}
/**
* Returns a collection view of the values
*/
public Collection<V> values() {
if (values == null) {
values = new ValuesCollectionImpl();
}
return values;
}
/**
* Returns a collection view of the mappings
*/
public Set<Map.Entry<String, V>> entrySet() {
if (entrySet == null) {
entrySet = new EntrySetImpl();
}
return entrySet;
}
/**
* Clears the map
*/
public void clear() {
modCount++;
size = 0;
Arrays.fill(arr, 0, arr.length, null);
}
/**
* Removes the mapping for the keys
* @param key1
* @param key2
* @return
*/
public V remove(Object key1, Object key2) {
Entry<E, K, V> e = removeEntry(key1, key2);
return null != e ? e.value : null;
}
/**
* Associates the specified value with the specified keys in this map
* @param key1
* @param key2
* @param value
* @return
*/
public V put(E key1, K key2, V value) {
if (key1 == null && key2 == null) {
int index = arrSize;
if (arr[index] == null) {
arr[index] = createEntry(0, null, null, value, null);
size++;
modCount++;
return null;
} else {
V oldValue = arr[index].value;
arr[index].value = value;
return oldValue;
}
}
int hash = key1.hashCode() + key2.hashCode();
int index = (hash & 0x7fffffff) % arrSize;
Entry<E, K, V> e = arr[index];
while (e != null) {
if (hash == e.hash && key1.equals(e.getKey1()) && key2.equals(e.getKey2())) {
V oldValue = e.value;
e.value = value;
return oldValue;
}
e = e.next;
}
arr[index] = createEntry(hash, key1, key2, value, arr[index]);
size++;
modCount++;
if (size > threshold) {
rehash();
}
return null;
}
/**
* Rehash the map
*
*/
void rehash() {
int newArrSize = (arrSize + 1) * 2 + 1;
if (newArrSize < 0) {
newArrSize = Integer.MAX_VALUE - 1;
}
Entry<E, K, V>[] newArr = new Entry[newArrSize + 1];
for (int i = 0; i < arr.length - 1; i++) {
Entry<E, K, V> entry = arr[i];
while (entry != null) {
Entry<E, K, V> next = entry.next;
int newIndex = (entry.hash & 0x7fffffff) % newArrSize;
entry.next = newArr[newIndex];
newArr[newIndex] = entry;
entry = next;
}
}
newArr[newArrSize] = arr[arrSize]; // move null entry
arrSize = newArrSize;
// The maximum array size is reached, increased loadFactor
// will keep array from further growing
if (arrSize == Integer.MAX_VALUE) {
loadFactor *= 10;
}
threshold = (int) (arrSize * loadFactor);
arr = newArr;
}
/**
* Returns true if this map contains a mapping for the specified keys
* @param key1
* @param key2
* @return
*/
public boolean containsKey(Object key1, Object key2) {
return findEntry(key1, key2) != null;
}
/**
* Return the value by keys
* @param key1
* @param key2
* @return
*/
public V get(Object key1, Object key2) {
Entry<E, K, V> e = findEntry(key1, key2);
if (e != null) {
return e.value;
}
return null;
}
/**
* Returns true if this map contains no key-value mappings
*/
public boolean isEmpty() {
return size == 0;
}
/**
* Returns the number of mappings
*/
public int size() {
return size;
}
/**
* Creates new entry
* @param hashCode
* @param key1
* @param key2
* @param value
* @param next
* @return
*/
Entry<E, K, V> createEntry(int hashCode, E key1, K key2,
V value, Entry<E, K, V> next) {
return new Entry<E, K, V>(hashCode, key1, key2, value, next);
}
/**
* Creates entries iterator
* @return
*/
Iterator<Map.Entry<String, V>> createEntrySetIterator() {
return new EntryIteratorImpl();
}
/**
* Creates values iterator
* @return
*/
Iterator<V> createValueCollectionIterator() {
return new ValueIteratorImpl();
}
/**
* Entry implementation for the TwoKeyHashMap class
*
*/
public static class Entry<E, K, V> implements Map.Entry<String, V> {
int hash;
E key1;
K key2;
V value;
Entry<E, K, V> next;
public Entry(int hash, E key1, K key2, V value, Entry<E, K, V> next) {
this.hash = hash;
this.key1 = key1;
this.key2 = key2;
this.value = value;
this.next = next;
}
public String getKey() {
return key1.toString() + key2.toString();
}
public E getKey1() {
return key1;
}
public K getKey2() {
return key2;
}
public V getValue() {
return value;
}
public V setValue(V value) {
V oldValue = this.value;
this.value = value;
return oldValue;
}
public boolean equals(Object obj) {
if (!(obj instanceof Entry)) {
return false;
}
Entry<?, ?, ?> e = (Entry<?, ?, ?>) obj;
Object getKey1 = e.getKey1();
Object getKey2 = e.getKey2();
Object getValue = e.getValue();
if ((key1 == null && getKey1 != null) ||
(key2 == null && getKey2 != null) ||
(value == null && getValue != null) ||
!key1.equals(e.getKey1()) ||
!key2.equals(e.getKey2()) ||
!value.equals(getValue)) {
return false;
}
return true;
}
public int hashCode() {
int hash1 = (key1 == null ? 0 : key1.hashCode());
int hash2 = (key2 == null ? 0 : key2.hashCode());
return (hash1 + hash2) ^ (value == null ? 0 : value.hashCode());
}
}
class EntrySetImpl extends AbstractSet<Map.Entry<String, V>> {
public int size() {
return size;
}
public void clear() {
TwoKeyHashMap.this.clear();
}
public boolean isEmpty() {
return size == 0;
}
public boolean contains(Object obj) {
if (!(obj instanceof Entry)) {
return false;
}
Entry<?, ?, ?> entry = (Entry<?, ?, ?>) obj;
Entry<E, K, V> entry2 = findEntry(entry.getKey1(), entry.getKey2());
if (entry2 == null) {
return false;
}
Object value = entry.getValue();
Object value2 = entry2.getValue();
return value == null ? value2 == null : value.equals(value2);
}
public boolean remove(Object obj) {
if (!(obj instanceof Entry)) {
return false;
}
return removeEntry(((Entry) obj).getKey1(), ((Entry)obj).getKey2()) != null;
}
public Iterator<Map.Entry<String, V>> iterator() {
return createEntrySetIterator();
}
}
// Iterates Entries inside the Map
class EntryIteratorImpl implements Iterator<Map.Entry<String, V>> {
private int startModCount;
private boolean found;
private int curr = -1;
private int returned_index = -1;
private Entry<E, K, V> curr_entry;
private Entry<E, K, V> returned_entry;
EntryIteratorImpl() {
startModCount = modCount;
}
public boolean hasNext() {
if (found) {
return true;
}
if (curr_entry != null) {
curr_entry = curr_entry.next;
}
if (curr_entry == null) {
for (curr++; curr < arr.length && arr[curr] == null; curr++) {
}
if (curr < arr.length) {
curr_entry = arr[curr];
}
}
return found = (curr_entry != null);
}
public Map.Entry<String, V> next() {
if (modCount != startModCount) {
throw new ConcurrentModificationException();
}
if (!hasNext()) {
throw new NoSuchElementException();
}
found = false;
returned_index = curr;
returned_entry = curr_entry;
return (Map.Entry<String, V>)curr_entry;
}
public void remove() {
if (returned_index == -1) {
throw new IllegalStateException();
}
if (modCount != startModCount) {
throw new ConcurrentModificationException();
}
Entry<E, K, V> p = null;
Entry<E, K, V> e = arr[returned_index];
while (e != returned_entry) {
p = e;
e = e.next;
}
if (p != null) {
p.next = returned_entry.next;
} else {
arr[returned_index] = returned_entry.next;
}
size--;
modCount++;
startModCount++;
returned_index = -1;
}
}
private final Entry<E, K, V> findEntry(Object key1, Object key2) {
if (key1 == null && key2 == null) {
return arr[arrSize];
}
int hash = key1.hashCode() + key2.hashCode();
int index = (hash & 0x7fffffff) % arrSize;
Entry<E, K, V> e = arr[index];
while (e != null) {
if (hash == e.hash && key1.equals(e.getKey1()) && key2.equals(e.getKey2())) {
return e;
}
e = e.next;
}
return null;
}
// Removes entry
private final Entry<E, K, V> removeEntry(Object key1, Object key2) {
if (key1 == null && key2 == null) {
int index = arrSize;
if (arr[index] != null) {
Entry<E, K, V> ret = arr[index];
arr[index] = null;
size--;
modCount++;
return ret;
}
return null;
}
int hash = key1.hashCode() + key2.hashCode();
int index = (hash & 0x7fffffff) % arrSize;
Entry<E, K, V> e = arr[index];
Entry<E, K, V> prev = e;
while (e != null) {
if (hash == e.hash && key1.equals(e.getKey1()) && key2.equals(e.getKey2())) {
if (prev == e) {
arr[index] = e.next;
} else {
prev.next = e.next;
}
size--;
modCount++;
return e;
}
prev = e;
e = e.next;
}
return null;
}
/**
* An instance is returned by the values() call.
*/
class ValuesCollectionImpl extends AbstractCollection<V> {
public int size() {
return size;
}
public void clear() {
TwoKeyHashMap.this.clear();
}
public boolean isEmpty() {
return size == 0;
}
public Iterator<V> iterator() {
return createValueCollectionIterator();
}
public boolean contains(Object obj) {
return containsValue(obj);
}
}
class ValueIteratorImpl implements Iterator<V> {
private EntryIteratorImpl itr;
ValueIteratorImpl() {
super();
this.itr = new EntryIteratorImpl();
}
public V next() {
return itr.next().getValue();
}
public void remove() {
itr.remove();
}
public boolean hasNext() {
return itr.hasNext();
}
}
}
| |
package com.example.android.sunshine;
import android.annotation.TargetApi;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Build;
import android.os.Bundle;
import android.preference.ListPreference;
import android.preference.Preference;
import android.preference.PreferenceActivity;
import android.preference.PreferenceManager;
import android.text.TextUtils;
import android.view.View;
import android.widget.ImageView;
import com.example.android.sunshine.data.WeatherContract;
import com.example.android.sunshine.sync.SunshineSyncAdapter;
import com.google.android.gms.location.places.Place;
import com.google.android.gms.location.places.ui.PlacePicker;
import com.google.android.gms.maps.model.LatLng;
/**
* A {@link PreferenceActivity} that presents a set of application settings.
* <p/>
* See <a href="http://developer.android.com/design/patterns/settings.html">
* Android Design: Settings</a> for design guidelines and the <a
* href="http://developer.android.com/guide/topics/ui/settings.html">Settings
* API Guide</a> for more information on developing a Settings UI.
*/
public class SettingsActivity extends PreferenceActivity
implements Preference.OnPreferenceChangeListener, SharedPreferences.OnSharedPreferenceChangeListener {
private static final String LOG_TAG = SettingsActivity.class.getSimpleName();
protected final static int PLACE_PICKER_REQUEST = 9090;
private ImageView mAttribution;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Add 'general' preferences, defined in the XML file
addPreferencesFromResource(R.xml.pref_general);
// For all preferences, attach an OnPreferenceChangeListener so the UI summary can be
// updated when the preference changes.
bindPreferenceSummaryToValue(findPreference(getString(R.string.pref_location_key)));
bindPreferenceSummaryToValue(findPreference(getString(R.string.pref_units_key)));
bindPreferenceSummaryToValue(findPreference(getString(R.string.pref_art_pack_key)));
// If we are using a PlacePicker location, we need to show attributions.
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mAttribution = new ImageView(this);
mAttribution.setImageResource(R.drawable.powered_by_google_light);
if (!Utility.isLocationLatLonAvailable(this)) {
mAttribution.setVisibility(View.GONE);
}
setListFooter(mAttribution);
}
}
@Override
protected void onResume() {
super.onResume();
// Registers a shared preference change listener that gets notified when preferences change.
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this);
sp.registerOnSharedPreferenceChangeListener(this);
}
@Override
protected void onPause() {
super.onPause();
// Unregisters a shared preference change listener.
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(this);
sp.unregisterOnSharedPreferenceChangeListener(this);
}
/**
* Attaches a listener so the summary is always updated with the preference value.
* Also fires the listener once, to initialize the summary (so it shows up before the value
* is changed.)
*/
private void bindPreferenceSummaryToValue(Preference preference) {
// Set the listener to watch for value changes.
preference.setOnPreferenceChangeListener(this);
// Trigger the listener immediately with the preference's
// current value.
onPreferenceChange(preference,
PreferenceManager
.getDefaultSharedPreferences(preference.getContext())
.getString(preference.getKey(), ""));
}
/**
* This get called before the preference is changed
*
* @param preference
* @param value
* @return
*/
@Override
public boolean onPreferenceChange(Preference preference, Object value) {
setPreferenceSummary(preference, value);
return true;
}
/**
* Helper Method
*
* @param preference
* @param value
*/
private void setPreferenceSummary(Preference preference, Object value) {
String stringValue = value.toString();
String key = preference.getKey();
if (preference instanceof ListPreference) {
// For list preferences, look up the correct display value in
// the preference's 'entries' list (since they have separate labels/values).
ListPreference listPreference = (ListPreference) preference;
int prefIndex = listPreference.findIndexOfValue(stringValue);
if (prefIndex >= 0) {
preference.setSummary(listPreference.getEntries()[prefIndex]);
}
} else if (key.equals(getString(R.string.pref_location_key))) {
@SunshineSyncAdapter.LocationSatus int status = Utility.getLocationStatus(this);
switch (status) {
case SunshineSyncAdapter.LOCATION_STATUS_OK:
preference.setSummary(stringValue);
break;
case SunshineSyncAdapter.LOCATION_STATUS_UNKNOWN:
preference.setSummary(getString(R.string.pref_location_unknown_description, value.toString()));
break;
case SunshineSyncAdapter.LOCATION_STATUS_INVALID:
preference.setSummary(getString(R.string.pref_location_error_description, value.toString()));
break;
}
} else {
// For other preferences, set the summary to the value's simple string representation.
preference.setSummary(stringValue);
}
}
/**
* This get called after the preference is changed.
* (which is important because we start our synchronization here)
*
* @param sharedPreferences
* @param key
*/
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (key.equals(getString(R.string.pref_location_key))) {
// We have changed the location
// Wipe out any potential PlacePicker latlng values so that we can use this text entry.
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.remove(getString(R.string.pref_location_latitude));
editor.remove(getString(R.string.pref_location_longitude));
editor.commit();
Utility.resetLocationStatus(this);
SunshineSyncAdapter.syncImmediately(this);
} else if (key.equals(getString(R.string.pref_units_key))) {
// Units have changed. update lists of weather entries accordingly
getContentResolver().notifyChange(WeatherContract.WeatherEntry.CONTENT_URI, null);
} else if (key.equals(getString(R.string.pref_location_status_key))) {
// Our locationStatus has changed. Update the summary accordingly
Preference locationPreference = findPreference(getString(R.string.pref_location_key));
bindPreferenceSummaryToValue(locationPreference);
} else if (key.equals(getString(R.string.pref_art_pack_key))) {
// Art pack have changed. Update lists of weather entries accordingly.
getContentResolver().notifyChange(WeatherContract.WeatherEntry.CONTENT_URI, null);
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
@Override
public Intent getParentActivityIntent() {
return super.getParentActivityIntent().addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// Check to see if the result is from our Place Picker intent
if (requestCode == PLACE_PICKER_REQUEST) {
// Make sure the request was successful
if (resultCode == RESULT_OK) {
Place place = PlacePicker.getPlace(data, this);
String address = place.getAddress().toString();
LatLng latLng = place.getLatLng();
// If the provided place doesn't have an address,
// we'll form a display-friendly string from the latlng values.
if (TextUtils.isEmpty(address)) {
address = String.format("(%.2f, %.2f)", latLng.latitude, latLng.longitude);
}
SharedPreferences sharedPreferences =
PreferenceManager.getDefaultSharedPreferences(this);
SharedPreferences.Editor editor = sharedPreferences.edit();
editor.putString(getString(R.string.pref_location_key), address);
// Also store the latitude and longitude so that
// we can use these to get a precise result from our weather service.
// We cannot expect the weather service to understand addresses that Google formats.
editor.putFloat(getString(R.string.pref_location_latitude), (float) latLng.latitude);
editor.putFloat(getString(R.string.pref_location_longitude), (float) latLng.longitude);
editor.commit();
// Tell the SyncAdapter that we've changed the location,
// so that we can update our UI with new values.
// We need to do this manually because we are responding
// to the PlacePicker widget result here instead of allowing the
// LocationEditTextPreference to handle these changes and invoke our callbacks.
Preference locationPreference = findPreference(getString(R.string.pref_location_key));
setPreferenceSummary(locationPreference, address);
Utility.resetLocationStatus(this);
SunshineSyncAdapter.syncImmediately(this);
}
} else {
super.onActivityResult(requestCode, resultCode, data);
}
}
}
| |
/*
* Copyright (c) 2010, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.scim.common.listener;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.CarbonConstants;
import org.wso2.carbon.identity.core.AbstractIdentityUserOperationEventListener;
import org.wso2.carbon.identity.core.util.IdentityCoreConstants;
import org.wso2.carbon.identity.scim.common.group.SCIMGroupHandler;
import org.wso2.carbon.identity.scim.common.utils.IdentitySCIMException;
import org.wso2.carbon.user.core.UserCoreConstants;
import org.wso2.carbon.user.core.UserStoreException;
import org.wso2.carbon.user.core.UserStoreManager;
import org.wso2.carbon.user.core.util.UserCoreUtil;
import org.wso2.charon.core.schema.SCIMConstants;
import org.wso2.charon.core.util.AttributeUtil;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
/**
* This is to perform SCIM related operation on User Operations.
* For eg: when a user is created through UserAdmin API, we need to set some SCIM specific properties
* as user attributes.
*/
public class SCIMUserOperationListener extends AbstractIdentityUserOperationEventListener {
private static Log log = LogFactory.getLog(SCIMUserOperationListener.class);
@Override
public int getExecutionOrderId() {
int orderId = getOrderId();
if (orderId != IdentityCoreConstants.EVENT_LISTENER_ORDER_ID) {
return orderId;
}
return 90;
}
@Override
public boolean doPreAuthenticate(String s, Object o, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostAuthenticate(String userName, boolean authenticated,
UserStoreManager userStoreManager)
throws UserStoreException {
if (!isEnable()) {
return true;
}
try {
String activeAttributeValue = userStoreManager.getUserClaimValue(userName, SCIMConstants.ACTIVE_URI, null);
boolean isUserActive = true;
if (activeAttributeValue != null) {
isUserActive = Boolean.parseBoolean(activeAttributeValue);
if (isUserActive) {
return true;
} else {
log.error("Trying to login from an inactive account of user: " + userName);
return false;
}
}
return true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
throw new UserStoreException(e);
}
}
@Override
public boolean doPreAddUser(String userName, Object credential, String[] roleList,
Map<String, String> claims, String profile,
UserStoreManager userStoreManager) throws UserStoreException {
if (!isEnable()) {
return true;
}
try {
if (!userStoreManager.isSCIMEnabled()) {
return true;
}
claims = this.getSCIMAttributes(userName, claims);
} catch (org.wso2.carbon.user.api.UserStoreException e) {
throw new UserStoreException(e);
}
return true;
}
@Override
public boolean doPostAddUser(String userName, Object credential, String[] roleList,
Map<String, String> claims, String profile,
UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPreUpdateCredential(String s, Object o, Object o1,
UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostUpdateCredential(String userName, Object credential, UserStoreManager userStoreManager)
throws UserStoreException {
return doPostUpdateCredentialByAdmin(userName, credential, userStoreManager);
}
@Override
public boolean doPreUpdateCredentialByAdmin(String s, Object o,
UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostUpdateCredentialByAdmin(String userName, Object credential,
UserStoreManager userStoreManager)
throws UserStoreException {
if (!isEnable()) {
return true;
}
try {
//update last-modified-date
try {
if (userStoreManager.isSCIMEnabled()) {
Date date = new Date();
String lastModifiedDate = AttributeUtil.formatDateTime(date);
userStoreManager.setUserClaimValue(
userName, SCIMConstants.META_LAST_MODIFIED_URI, lastModifiedDate, null);
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
log.debug(e);
throw new UserStoreException(
"Error in obtaining user store information: isSCIMEnabled", e);
}
return true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
throw new UserStoreException(e);
}
}
@Override
public boolean doPreDeleteUser(String userName, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostDeleteUser(String s, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPreSetUserClaimValue(String s, String s1, String s2, String s3,
UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostSetUserClaimValue(String s, UserStoreManager userStoreManager)
throws UserStoreException {
//TODO: need to set last modified time.
return true;
}
@Override
public boolean doPreSetUserClaimValues(String userName, Map<String, String> claims,
String profileName, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostSetUserClaimValues(String userName, Map<String, String> claims,
String profileName, UserStoreManager userStoreManager)
throws UserStoreException {
if (!isEnable()) {
return true;
}
String newUserName = claims.get("urn:scim:schemas:core:1.0:userName");
if(newUserName != null && !newUserName.isEmpty()){
userName = newUserName;
}
//update last-modified-date and proceed if scim enabled.
try {
if (userStoreManager.isSCIMEnabled()) {
Date date = new Date();
String lastModifiedDate = AttributeUtil.formatDateTime(date);
userStoreManager.setUserClaimValue(
userName, SCIMConstants.META_LAST_MODIFIED_URI, lastModifiedDate, null);
}
} catch (org.wso2.carbon.user.api.UserStoreException e) {
throw new UserStoreException("Error in retrieving claim values while provisioning " +
"'update user' operation.", e);
}
return true;
}
@Override
public boolean doPreDeleteUserClaimValues(String s, String[] strings, String s1,
UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostDeleteUserClaimValues(String s, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPreDeleteUserClaimValue(String s, String s1, String s2,
UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostDeleteUserClaimValue(String s, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPreAddRole(String s, String[] strings,
org.wso2.carbon.user.api.Permission[] permissions,
UserStoreManager userStoreManager) throws UserStoreException {
return true;
}
@Override
public boolean doPostAddRole(String roleName, String[] userList,
org.wso2.carbon.user.api.Permission[] permissions,
UserStoreManager userStoreManager) throws UserStoreException {
if (!isEnable()) {
return true;
}
try {
SCIMGroupHandler scimGroupHandler = new SCIMGroupHandler(userStoreManager.getTenantId());
String domainName = UserCoreUtil.getDomainName(userStoreManager.getRealmConfiguration());
if (domainName == null) {
domainName = UserCoreConstants.PRIMARY_DEFAULT_DOMAIN_NAME;
}
String roleNameWithDomain = domainName + CarbonConstants.DOMAIN_SEPARATOR + roleName;
//query role name from identity table
try {
if (!scimGroupHandler.isGroupExisting(roleNameWithDomain)) {
//if no attributes - i.e: group added via mgt console, not via SCIM endpoint
//add META
scimGroupHandler.addMandatoryAttributes(roleNameWithDomain);
}
} catch (IdentitySCIMException e) {
throw new UserStoreException("Error retrieving group information from SCIM Tables.", e);
}
return true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
throw new UserStoreException(e);
}
}
@Override
public boolean doPreDeleteRole(String roleName, UserStoreManager userStoreManager)
throws UserStoreException {
if (!isEnable()) {
return true;
}
try {
SCIMGroupHandler scimGroupHandler = new SCIMGroupHandler(userStoreManager.getTenantId());
String domainName = UserCoreUtil.getDomainName(userStoreManager.getRealmConfiguration());
if (domainName == null) {
domainName = UserCoreConstants.PRIMARY_DEFAULT_DOMAIN_NAME;
}
String roleNameWithDomain = domainName + CarbonConstants.DOMAIN_SEPARATOR + roleName;
try {
//delete group attributes - no need to check existence here,
//since it is checked in below method.
scimGroupHandler.deleteGroupAttributes(roleNameWithDomain);
} catch (IdentitySCIMException e) {
throw new UserStoreException("Error retrieving group information from SCIM Tables.", e);
}
return true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
throw new UserStoreException(e);
}
}
@Override
public boolean doPostDeleteRole(String roleName, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPreUpdateRoleName(String s, String s1, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostUpdateRoleName(String roleName, String newRoleName,
UserStoreManager userStoreManager)
throws UserStoreException {
if (!isEnable()) {
return true;
}
try {
//TODO:set last update date
SCIMGroupHandler scimGroupHandler = new SCIMGroupHandler(userStoreManager.getTenantId());
String domainName = UserCoreUtil.getDomainName(userStoreManager.getRealmConfiguration());
if (domainName == null) {
domainName = UserCoreConstants.PRIMARY_DEFAULT_DOMAIN_NAME;
}
String roleNameWithDomain = domainName + CarbonConstants.DOMAIN_SEPARATOR + roleName;
String newRoleNameWithDomain = domainName + CarbonConstants.DOMAIN_SEPARATOR + newRoleName;
try {
scimGroupHandler.updateRoleName(roleNameWithDomain, newRoleNameWithDomain);
} catch (IdentitySCIMException e) {
throw new UserStoreException("Error updating group information in SCIM Tables.", e);
}
return true;
} catch (org.wso2.carbon.user.api.UserStoreException e) {
throw new UserStoreException(e);
}
}
@Override
public boolean doPreUpdateUserListOfRole(String s, String[] strings, String[] strings1,
UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostUpdateUserListOfRole(String roleName, String[] deletedUsers,
String[] newUsers, UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPreUpdateRoleListOfUser(String s, String[] strings, String[] strings1,
UserStoreManager userStoreManager)
throws UserStoreException {
return true;
}
@Override
public boolean doPostUpdateRoleListOfUser(String s, String[] strings, String[] strings1,
UserStoreManager userStoreManager)
throws UserStoreException {
//TODO:
return true;
}
public Map<String, String> getSCIMAttributes(String userName, Map<String, String> claimsMap) {
Map<String, String> attributes = null;
if (MapUtils.isNotEmpty(claimsMap)) {
attributes = claimsMap;
} else {
attributes = new HashMap<>();
}
if (!attributes.containsKey(SCIMConstants.ID_URI)) {
String id = UUID.randomUUID().toString();
attributes.put(SCIMConstants.ID_URI, id);
}
Date date = new Date();
String createdDate = AttributeUtil.formatDateTime(date);
attributes.put(SCIMConstants.META_CREATED_URI, createdDate);
attributes.put(SCIMConstants.META_LAST_MODIFIED_URI, createdDate);
attributes.put(SCIMConstants.USER_NAME_URI, userName);
return attributes;
//TODO: add other optional attributes like location etc.
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.replication.ReplicationAdmin;
import org.apache.hadoop.hbase.client.replication.ReplicationPeerConfigUtil;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.testclassification.FlakeyTests;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
import org.apache.hadoop.hbase.zookeeper.ZKWatcher;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ReplicationProtos;
@Category({FlakeyTests.class, LargeTests.class})
public class TestPerTableCFReplication {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestPerTableCFReplication.class);
private static final Logger LOG = LoggerFactory.getLogger(TestPerTableCFReplication.class);
private static Configuration conf1;
private static Configuration conf2;
private static Configuration conf3;
private static HBaseTestingUtility utility1;
private static HBaseTestingUtility utility2;
private static HBaseTestingUtility utility3;
private static final long SLEEP_TIME = 500;
private static final int NB_RETRIES = 100;
private static final TableName tableName = TableName.valueOf("test");
private static final TableName tabAName = TableName.valueOf("TA");
private static final TableName tabBName = TableName.valueOf("TB");
private static final TableName tabCName = TableName.valueOf("TC");
private static final byte[] famName = Bytes.toBytes("f");
private static final byte[] f1Name = Bytes.toBytes("f1");
private static final byte[] f2Name = Bytes.toBytes("f2");
private static final byte[] f3Name = Bytes.toBytes("f3");
private static final byte[] row1 = Bytes.toBytes("row1");
private static final byte[] row2 = Bytes.toBytes("row2");
private static final byte[] noRepfamName = Bytes.toBytes("norep");
private static final byte[] val = Bytes.toBytes("myval");
private static HTableDescriptor table;
private static HTableDescriptor tabA;
private static HTableDescriptor tabB;
private static HTableDescriptor tabC;
@Rule
public TestName name = new TestName();
@BeforeClass
public static void setUpBeforeClass() throws Exception {
conf1 = HBaseConfiguration.create();
conf1.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/1");
// smaller block size and capacity to trigger more operations
// and test them
conf1.setInt("hbase.regionserver.hlog.blocksize", 1024*20);
conf1.setInt("replication.source.size.capacity", 1024);
conf1.setLong("replication.source.sleepforretries", 100);
conf1.setInt("hbase.regionserver.maxlogs", 10);
conf1.setLong("hbase.master.logcleaner.ttl", 10);
conf1.setLong(HConstants.THREAD_WAKE_FREQUENCY, 100);
conf1.setStrings(CoprocessorHost.USER_REGION_COPROCESSOR_CONF_KEY,
"org.apache.hadoop.hbase.replication.TestMasterReplication$CoprocessorCounter");
utility1 = new HBaseTestingUtility(conf1);
utility1.startMiniZKCluster();
MiniZooKeeperCluster miniZK = utility1.getZkCluster();
new ZKWatcher(conf1, "cluster1", null, true);
conf2 = new Configuration(conf1);
conf2.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/2");
conf3 = new Configuration(conf1);
conf3.set(HConstants.ZOOKEEPER_ZNODE_PARENT, "/3");
utility2 = new HBaseTestingUtility(conf2);
utility2.setZkCluster(miniZK);
new ZKWatcher(conf2, "cluster3", null, true);
utility3 = new HBaseTestingUtility(conf3);
utility3.setZkCluster(miniZK);
new ZKWatcher(conf3, "cluster3", null, true);
table = new HTableDescriptor(tableName);
HColumnDescriptor fam = new HColumnDescriptor(famName);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
table.addFamily(fam);
fam = new HColumnDescriptor(noRepfamName);
table.addFamily(fam);
tabA = new HTableDescriptor(tabAName);
fam = new HColumnDescriptor(f1Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabA.addFamily(fam);
fam = new HColumnDescriptor(f2Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabA.addFamily(fam);
fam = new HColumnDescriptor(f3Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabA.addFamily(fam);
tabB = new HTableDescriptor(tabBName);
fam = new HColumnDescriptor(f1Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabB.addFamily(fam);
fam = new HColumnDescriptor(f2Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabB.addFamily(fam);
fam = new HColumnDescriptor(f3Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabB.addFamily(fam);
tabC = new HTableDescriptor(tabCName);
fam = new HColumnDescriptor(f1Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabC.addFamily(fam);
fam = new HColumnDescriptor(f2Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabC.addFamily(fam);
fam = new HColumnDescriptor(f3Name);
fam.setScope(HConstants.REPLICATION_SCOPE_GLOBAL);
tabC.addFamily(fam);
utility1.startMiniCluster();
utility2.startMiniCluster();
utility3.startMiniCluster();
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
utility3.shutdownMiniCluster();
utility2.shutdownMiniCluster();
utility1.shutdownMiniCluster();
}
@Test
public void testParseTableCFsFromConfig() {
Map<TableName, List<String>> tabCFsMap = null;
// 1. null or empty string, result should be null
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig(null);
assertEquals(null, tabCFsMap);
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig("");
assertEquals(null, tabCFsMap);
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig(" ");
assertEquals(null, tabCFsMap);
final TableName tableName1 = TableName.valueOf(name.getMethodName() + "1");
final TableName tableName2 = TableName.valueOf(name.getMethodName() + "2");
final TableName tableName3 = TableName.valueOf(name.getMethodName() + "3");
// 2. single table: "tableName1" / "tableName2:cf1" / "tableName3:cf1,cf3"
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig(tableName1.getNameAsString());
assertEquals(1, tabCFsMap.size()); // only one table
assertTrue(tabCFsMap.containsKey(tableName1)); // its table name is "tableName1"
assertFalse(tabCFsMap.containsKey(tableName2)); // not other table
assertEquals(null, tabCFsMap.get(tableName1)); // null cf-list,
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig(tableName2 + ":cf1");
assertEquals(1, tabCFsMap.size()); // only one table
assertTrue(tabCFsMap.containsKey(tableName2)); // its table name is "tableName2"
assertFalse(tabCFsMap.containsKey(tableName1)); // not other table
assertEquals(1, tabCFsMap.get(tableName2).size()); // cf-list contains only 1 cf
assertEquals("cf1", tabCFsMap.get(tableName2).get(0));// the only cf is "cf1"
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig(tableName3 + " : cf1 , cf3");
assertEquals(1, tabCFsMap.size()); // only one table
assertTrue(tabCFsMap.containsKey(tableName3)); // its table name is "tableName2"
assertFalse(tabCFsMap.containsKey(tableName1)); // not other table
assertEquals(2, tabCFsMap.get(tableName3).size()); // cf-list contains 2 cf
assertTrue(tabCFsMap.get(tableName3).contains("cf1"));// contains "cf1"
assertTrue(tabCFsMap.get(tableName3).contains("cf3"));// contains "cf3"
// 3. multiple tables: "tableName1 ; tableName2:cf1 ; tableName3:cf1,cf3"
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig(tableName1 + " ; " + tableName2
+ ":cf1 ; " + tableName3 + ":cf1,cf3");
// 3.1 contains 3 tables : "tableName1", "tableName2" and "tableName3"
assertEquals(3, tabCFsMap.size());
assertTrue(tabCFsMap.containsKey(tableName1));
assertTrue(tabCFsMap.containsKey(tableName2));
assertTrue(tabCFsMap.containsKey(tableName3));
// 3.2 table "tab1" : null cf-list
assertEquals(null, tabCFsMap.get(tableName1));
// 3.3 table "tab2" : cf-list contains a single cf "cf1"
assertEquals(1, tabCFsMap.get(tableName2).size());
assertEquals("cf1", tabCFsMap.get(tableName2).get(0));
// 3.4 table "tab3" : cf-list contains "cf1" and "cf3"
assertEquals(2, tabCFsMap.get(tableName3).size());
assertTrue(tabCFsMap.get(tableName3).contains("cf1"));
assertTrue(tabCFsMap.get(tableName3).contains("cf3"));
// 4. contiguous or additional ";"(table delimiter) or ","(cf delimiter) can be tolerated
// still use the example of multiple tables: "tableName1 ; tableName2:cf1 ; tableName3:cf1,cf3"
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig(
tableName1 + " ; ; " + tableName2 + ":cf1 ; " + tableName3 + ":cf1,,cf3 ;");
// 4.1 contains 3 tables : "tableName1", "tableName2" and "tableName3"
assertEquals(3, tabCFsMap.size());
assertTrue(tabCFsMap.containsKey(tableName1));
assertTrue(tabCFsMap.containsKey(tableName2));
assertTrue(tabCFsMap.containsKey(tableName3));
// 4.2 table "tab1" : null cf-list
assertEquals(null, tabCFsMap.get(tableName1));
// 4.3 table "tab2" : cf-list contains a single cf "cf1"
assertEquals(1, tabCFsMap.get(tableName2).size());
assertEquals("cf1", tabCFsMap.get(tableName2).get(0));
// 4.4 table "tab3" : cf-list contains "cf1" and "cf3"
assertEquals(2, tabCFsMap.get(tableName3).size());
assertTrue(tabCFsMap.get(tableName3).contains("cf1"));
assertTrue(tabCFsMap.get(tableName3).contains("cf3"));
// 5. invalid format "tableName1:tt:cf1 ; tableName2::cf1 ; tableName3:cf1,cf3"
// "tableName1:tt:cf1" and "tableName2::cf1" are invalid and will be ignored totally
tabCFsMap = ReplicationPeerConfigUtil.parseTableCFsFromConfig(
tableName1 + ":tt:cf1 ; " + tableName2 + "::cf1 ; " + tableName3 + ":cf1,cf3");
// 5.1 no "tableName1" and "tableName2", only "tableName3"
assertEquals(1, tabCFsMap.size()); // only one table
assertFalse(tabCFsMap.containsKey(tableName1));
assertFalse(tabCFsMap.containsKey(tableName2));
assertTrue(tabCFsMap.containsKey(tableName3));
// 5.2 table "tableName3" : cf-list contains "cf1" and "cf3"
assertEquals(2, tabCFsMap.get(tableName3).size());
assertTrue(tabCFsMap.get(tableName3).contains("cf1"));
assertTrue(tabCFsMap.get(tableName3).contains("cf3"));
}
@Test
public void testTableCFsHelperConverter() {
ReplicationProtos.TableCF[] tableCFs = null;
Map<TableName, List<String>> tabCFsMap = null;
// 1. null or empty string, result should be null
assertNull(ReplicationPeerConfigUtil.convert(tabCFsMap));
tabCFsMap = new HashMap<>();
tableCFs = ReplicationPeerConfigUtil.convert(tabCFsMap);
assertEquals(0, tableCFs.length);
final TableName tableName1 = TableName.valueOf(name.getMethodName() + "1");
final TableName tableName2 = TableName.valueOf(name.getMethodName() + "2");
final TableName tableName3 = TableName.valueOf(name.getMethodName() + "3");
// 2. single table: "tab1" / "tab2:cf1" / "tab3:cf1,cf3"
tabCFsMap.clear();
tabCFsMap.put(tableName1, null);
tableCFs = ReplicationPeerConfigUtil.convert(tabCFsMap);
assertEquals(1, tableCFs.length); // only one table
assertEquals(tableName1.toString(),
tableCFs[0].getTableName().getQualifier().toStringUtf8());
assertEquals(0, tableCFs[0].getFamiliesCount());
tabCFsMap.clear();
tabCFsMap.put(tableName2, new ArrayList<>());
tabCFsMap.get(tableName2).add("cf1");
tableCFs = ReplicationPeerConfigUtil.convert(tabCFsMap);
assertEquals(1, tableCFs.length); // only one table
assertEquals(tableName2.toString(),
tableCFs[0].getTableName().getQualifier().toStringUtf8());
assertEquals(1, tableCFs[0].getFamiliesCount());
assertEquals("cf1", tableCFs[0].getFamilies(0).toStringUtf8());
tabCFsMap.clear();
tabCFsMap.put(tableName3, new ArrayList<>());
tabCFsMap.get(tableName3).add("cf1");
tabCFsMap.get(tableName3).add("cf3");
tableCFs = ReplicationPeerConfigUtil.convert(tabCFsMap);
assertEquals(1, tableCFs.length);
assertEquals(tableName3.toString(),
tableCFs[0].getTableName().getQualifier().toStringUtf8());
assertEquals(2, tableCFs[0].getFamiliesCount());
assertEquals("cf1", tableCFs[0].getFamilies(0).toStringUtf8());
assertEquals("cf3", tableCFs[0].getFamilies(1).toStringUtf8());
tabCFsMap.clear();
tabCFsMap.put(tableName1, null);
tabCFsMap.put(tableName2, new ArrayList<>());
tabCFsMap.get(tableName2).add("cf1");
tabCFsMap.put(tableName3, new ArrayList<>());
tabCFsMap.get(tableName3).add("cf1");
tabCFsMap.get(tableName3).add("cf3");
tableCFs = ReplicationPeerConfigUtil.convert(tabCFsMap);
assertEquals(3, tableCFs.length);
assertNotNull(ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName1.toString()));
assertNotNull(ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName2.toString()));
assertNotNull(ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName3.toString()));
assertEquals(0,
ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName1.toString()).getFamiliesCount());
assertEquals(1, ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName2.toString())
.getFamiliesCount());
assertEquals("cf1", ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName2.toString())
.getFamilies(0).toStringUtf8());
assertEquals(2, ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName3.toString())
.getFamiliesCount());
assertEquals("cf1", ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName3.toString())
.getFamilies(0).toStringUtf8());
assertEquals("cf3", ReplicationPeerConfigUtil.getTableCF(tableCFs, tableName3.toString())
.getFamilies(1).toStringUtf8());
tabCFsMap = ReplicationPeerConfigUtil.convert2Map(tableCFs);
assertEquals(3, tabCFsMap.size());
assertTrue(tabCFsMap.containsKey(tableName1));
assertTrue(tabCFsMap.containsKey(tableName2));
assertTrue(tabCFsMap.containsKey(tableName3));
// 3.2 table "tab1" : null cf-list
assertEquals(null, tabCFsMap.get(tableName1));
// 3.3 table "tab2" : cf-list contains a single cf "cf1"
assertEquals(1, tabCFsMap.get(tableName2).size());
assertEquals("cf1", tabCFsMap.get(tableName2).get(0));
// 3.4 table "tab3" : cf-list contains "cf1" and "cf3"
assertEquals(2, tabCFsMap.get(tableName3).size());
assertTrue(tabCFsMap.get(tableName3).contains("cf1"));
assertTrue(tabCFsMap.get(tableName3).contains("cf3"));
}
@Test
public void testPerTableCFReplication() throws Exception {
LOG.info("testPerTableCFReplication");
ReplicationAdmin replicationAdmin = new ReplicationAdmin(conf1);
Connection connection1 = ConnectionFactory.createConnection(conf1);
Connection connection2 = ConnectionFactory.createConnection(conf2);
Connection connection3 = ConnectionFactory.createConnection(conf3);
try {
Admin admin1 = connection1.getAdmin();
Admin admin2 = connection2.getAdmin();
Admin admin3 = connection3.getAdmin();
admin1.createTable(tabA);
admin1.createTable(tabB);
admin1.createTable(tabC);
admin2.createTable(tabA);
admin2.createTable(tabB);
admin2.createTable(tabC);
admin3.createTable(tabA);
admin3.createTable(tabB);
admin3.createTable(tabC);
Table htab1A = connection1.getTable(tabAName);
Table htab2A = connection2.getTable(tabAName);
Table htab3A = connection3.getTable(tabAName);
Table htab1B = connection1.getTable(tabBName);
Table htab2B = connection2.getTable(tabBName);
Table htab3B = connection3.getTable(tabBName);
Table htab1C = connection1.getTable(tabCName);
Table htab2C = connection2.getTable(tabCName);
Table htab3C = connection3.getTable(tabCName);
// A. add cluster2/cluster3 as peers to cluster1
ReplicationPeerConfig rpc2 = new ReplicationPeerConfig();
rpc2.setClusterKey(utility2.getClusterKey());
rpc2.setReplicateAllUserTables(false);
Map<TableName, List<String>> tableCFs = new HashMap<>();
tableCFs.put(tabCName, null);
tableCFs.put(tabBName, new ArrayList<>());
tableCFs.get(tabBName).add("f1");
tableCFs.get(tabBName).add("f3");
replicationAdmin.addPeer("2", rpc2, tableCFs);
ReplicationPeerConfig rpc3 = new ReplicationPeerConfig();
rpc3.setClusterKey(utility3.getClusterKey());
rpc3.setReplicateAllUserTables(false);
tableCFs.clear();
tableCFs.put(tabAName, null);
tableCFs.put(tabBName, new ArrayList<>());
tableCFs.get(tabBName).add("f1");
tableCFs.get(tabBName).add("f2");
replicationAdmin.addPeer("3", rpc3, tableCFs);
// A1. tableA can only replicated to cluster3
putAndWaitWithFamily(row1, f1Name, htab1A, htab3A);
ensureRowNotReplicated(row1, f1Name, htab2A);
deleteAndWaitWithFamily(row1, f1Name, htab1A, htab3A);
putAndWaitWithFamily(row1, f2Name, htab1A, htab3A);
ensureRowNotReplicated(row1, f2Name, htab2A);
deleteAndWaitWithFamily(row1, f2Name, htab1A, htab3A);
putAndWaitWithFamily(row1, f3Name, htab1A, htab3A);
ensureRowNotReplicated(row1, f3Name, htab2A);
deleteAndWaitWithFamily(row1, f3Name, htab1A, htab3A);
// A2. cf 'f1' of tableB can replicated to both cluster2 and cluster3
putAndWaitWithFamily(row1, f1Name, htab1B, htab2B, htab3B);
deleteAndWaitWithFamily(row1, f1Name, htab1B, htab2B, htab3B);
// cf 'f2' of tableB can only replicated to cluster3
putAndWaitWithFamily(row1, f2Name, htab1B, htab3B);
ensureRowNotReplicated(row1, f2Name, htab2B);
deleteAndWaitWithFamily(row1, f2Name, htab1B, htab3B);
// cf 'f3' of tableB can only replicated to cluster2
putAndWaitWithFamily(row1, f3Name, htab1B, htab2B);
ensureRowNotReplicated(row1, f3Name, htab3B);
deleteAndWaitWithFamily(row1, f3Name, htab1B, htab2B);
// A3. tableC can only replicated to cluster2
putAndWaitWithFamily(row1, f1Name, htab1C, htab2C);
ensureRowNotReplicated(row1, f1Name, htab3C);
deleteAndWaitWithFamily(row1, f1Name, htab1C, htab2C);
putAndWaitWithFamily(row1, f2Name, htab1C, htab2C);
ensureRowNotReplicated(row1, f2Name, htab3C);
deleteAndWaitWithFamily(row1, f2Name, htab1C, htab2C);
putAndWaitWithFamily(row1, f3Name, htab1C, htab2C);
ensureRowNotReplicated(row1, f3Name, htab3C);
deleteAndWaitWithFamily(row1, f3Name, htab1C, htab2C);
// B. change peers' replicable table-cf config
tableCFs.clear();
tableCFs.put(tabAName, new ArrayList<>());
tableCFs.get(tabAName).add("f1");
tableCFs.get(tabAName).add("f2");
tableCFs.put(tabCName, new ArrayList<>());
tableCFs.get(tabCName).add("f2");
tableCFs.get(tabCName).add("f3");
replicationAdmin.setPeerTableCFs("2", tableCFs);
tableCFs.clear();
tableCFs.put(tabBName, null);
tableCFs.put(tabCName, new ArrayList<>());
tableCFs.get(tabCName).add("f3");
replicationAdmin.setPeerTableCFs("3", tableCFs);
// B1. cf 'f1' of tableA can only replicated to cluster2
putAndWaitWithFamily(row2, f1Name, htab1A, htab2A);
ensureRowNotReplicated(row2, f1Name, htab3A);
deleteAndWaitWithFamily(row2, f1Name, htab1A, htab2A);
// cf 'f2' of tableA can only replicated to cluster2
putAndWaitWithFamily(row2, f2Name, htab1A, htab2A);
ensureRowNotReplicated(row2, f2Name, htab3A);
deleteAndWaitWithFamily(row2, f2Name, htab1A, htab2A);
// cf 'f3' of tableA isn't replicable to either cluster2 or cluster3
putAndWaitWithFamily(row2, f3Name, htab1A);
ensureRowNotReplicated(row2, f3Name, htab2A, htab3A);
deleteAndWaitWithFamily(row2, f3Name, htab1A);
// B2. tableB can only replicated to cluster3
putAndWaitWithFamily(row2, f1Name, htab1B, htab3B);
ensureRowNotReplicated(row2, f1Name, htab2B);
deleteAndWaitWithFamily(row2, f1Name, htab1B, htab3B);
putAndWaitWithFamily(row2, f2Name, htab1B, htab3B);
ensureRowNotReplicated(row2, f2Name, htab2B);
deleteAndWaitWithFamily(row2, f2Name, htab1B, htab3B);
putAndWaitWithFamily(row2, f3Name, htab1B, htab3B);
ensureRowNotReplicated(row2, f3Name, htab2B);
deleteAndWaitWithFamily(row2, f3Name, htab1B, htab3B);
// B3. cf 'f1' of tableC non-replicable to either cluster
putAndWaitWithFamily(row2, f1Name, htab1C);
ensureRowNotReplicated(row2, f1Name, htab2C, htab3C);
deleteAndWaitWithFamily(row2, f1Name, htab1C);
// cf 'f2' of tableC can only replicated to cluster2
putAndWaitWithFamily(row2, f2Name, htab1C, htab2C);
ensureRowNotReplicated(row2, f2Name, htab3C);
deleteAndWaitWithFamily(row2, f2Name, htab1C, htab2C);
// cf 'f3' of tableC can replicated to cluster2 and cluster3
putAndWaitWithFamily(row2, f3Name, htab1C, htab2C, htab3C);
deleteAndWaitWithFamily(row2, f3Name, htab1C, htab2C, htab3C);
} finally {
connection1.close();
connection2.close();
connection3.close();
}
}
private void ensureRowNotReplicated(byte[] row, byte[] fam, Table... tables) throws IOException {
Get get = new Get(row);
get.addFamily(fam);
for (Table table : tables) {
Result res = table.get(get);
assertEquals(0, res.size());
}
}
private void deleteAndWaitWithFamily(byte[] row, byte[] fam,
Table source, Table... targets)
throws Exception {
Delete del = new Delete(row);
del.addFamily(fam);
source.delete(del);
Get get = new Get(row);
get.addFamily(fam);
for (int i = 0; i < NB_RETRIES; i++) {
if (i==NB_RETRIES-1) {
fail("Waited too much time for del replication");
}
boolean removedFromAll = true;
for (Table target : targets) {
Result res = target.get(get);
if (res.size() >= 1) {
LOG.info("Row not deleted");
removedFromAll = false;
break;
}
}
if (removedFromAll) {
break;
} else {
Thread.sleep(SLEEP_TIME);
}
}
}
private void putAndWaitWithFamily(byte[] row, byte[] fam,
Table source, Table... targets)
throws Exception {
Put put = new Put(row);
put.addColumn(fam, row, val);
source.put(put);
Get get = new Get(row);
get.addFamily(fam);
for (int i = 0; i < NB_RETRIES; i++) {
if (i==NB_RETRIES-1) {
fail("Waited too much time for put replication");
}
boolean replicatedToAll = true;
for (Table target : targets) {
Result res = target.get(get);
if (res.isEmpty()) {
LOG.info("Row not available");
replicatedToAll = false;
break;
} else {
assertEquals(1, res.size());
assertArrayEquals(val, res.value());
}
}
if (replicatedToAll) {
break;
} else {
Thread.sleep(SLEEP_TIME);
}
}
}
}
| |
package org.strategoxt.lang;
import org.spoofax.interpreter.terms.IStrategoTerm;
/**
* An abstract strategy class. Inheritor classes should implement one
* of the invoke() overloads or invokeDynamic() as appropriate.
*
* @see DynamicStrategy
* An abstract strategy class where invokeDynamic() should be implemented.
*
* @author Lennart Kats <lennart add lclnet.nl>
*/
public abstract class Strategy {
private transient String name;
/**
* Invoke this strategy using a dynamic number of arguments.
*
* Implementing classes should override this method
* and the corresponding <code>invoke()</code> method
* (if applicable).
*
* @return The resulting term, or null in case of failure.
*/
public IStrategoTerm invokeDynamic(Context context, IStrategoTerm current,
Strategy[] s, IStrategoTerm[] t) {
switch (s.length) {
case 0:
switch (t.length) {
case 0: return invoke(context, current);
case 1: return invoke(context, current, t[0]);
case 2: return invoke(context, current, t[0], t[1]);
case 3: return invoke(context, current, t[0], t[1], t[2]);
case 4: return invoke(context, current, t[0], t[1], t[2], t[3]);
case 5: return invoke(context, current, t[0], t[1], t[2], t[3], t[4]);
}
break;
case 1:
switch (t.length) {
case 0: return invoke(context, current, s[0]);
case 1: return invoke(context, current, s[0], t[0]);
case 2: return invoke(context, current, s[0], t[0], t[1]);
case 3: return invoke(context, current, s[0], t[0], t[1], t[2]);
case 4: return invoke(context, current, s[0], t[0], t[1], t[2], t[3]);
case 5: return invoke(context, current, s[0], t[0], t[1], t[2], t[3], t[4]);
}
break;
case 2:
switch (t.length) {
case 0: return invoke(context, current, s[0], s[1]);
case 1: return invoke(context, current, s[0], s[1], t[0]);
case 2: return invoke(context, current, s[0], s[1], t[0], t[1]);
case 3: return invoke(context, current, s[0], s[1], t[0], t[1], t[2]);
case 4: return invoke(context, current, s[0], s[1], t[0], t[1], t[2], t[3]);
case 5: return invoke(context, current, s[0], s[1], t[0], t[1], t[2], t[3], t[4]);
}
break;
case 3:
switch (t.length) {
case 0: return invoke(context, current, s[0], s[1], s[2]);
case 1: return invoke(context, current, s[0], s[1], s[2], t[0]);
case 2: return invoke(context, current, s[0], s[1], s[2], t[0], t[1]);
case 3: return invoke(context, current, s[0], s[1], s[2], t[0], t[1], t[2]);
case 4: return invoke(context, current, s[0], s[1], s[2], t[0], t[1], t[2], t[3]);
case 5: return invoke(context, current, s[0], s[1], s[2], t[0], t[1], t[2], t[3], t[4]);
}
break;
case 4:
switch (t.length) {
case 0: return invoke(context, current, s[0], s[1], s[2], s[3]);
case 1: return invoke(context, current, s[0], s[1], s[2], s[3], t[0]);
case 2: return invoke(context, current, s[0], s[1], s[2], s[3], t[0], t[1]);
case 3: return invoke(context, current, s[0], s[1], s[2], s[3], t[0], t[1], t[2]);
case 4: return invoke(context, current, s[0], s[1], s[2], s[3], t[0], t[1], t[2], t[3]);
case 5: return invoke(context, current, s[0], s[1], s[2], s[3], t[0], t[1], t[2], t[3], t[4]);
}
break;
case 5:
switch (t.length) {
case 0: return invoke(context, current, s[0], s[1], s[2], s[3], s[4]);
case 1: return invoke(context, current, s[0], s[1], s[2], s[3], s[4], t[0]);
case 2: return invoke(context, current, s[0], s[1], s[2], s[3], s[4], t[0], t[1]);
case 3: return invoke(context, current, s[0], s[1], s[2], s[3], s[4], t[0], t[1], t[2]);
case 4: return invoke(context, current, s[0], s[1], s[2], s[3], s[4], t[0], t[1], t[2], t[3]);
case 5: return invoke(context, current, s[0], s[1], s[2], s[3], s[4], t[0], t[1], t[2], t[3], t[4]);
}
}
throw new IllegalArgumentException(s, t);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current) {
throw new IllegalArgumentException();
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, IStrategoTerm t1) {
throw new IllegalArgumentException(t1);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, IStrategoTerm t1, IStrategoTerm t2) {
throw new IllegalArgumentException(t1, t2);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3) {
throw new IllegalArgumentException(t1, t2, t3);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4) {
throw new IllegalArgumentException(t1, t2, t3, t4);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4, IStrategoTerm t5) {
throw new IllegalArgumentException(t1, t2, t3, t4, t5);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1) {
throw new IllegalArgumentException(s1);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, IStrategoTerm t1) {
throw new IllegalArgumentException(s1, t1);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, IStrategoTerm t1, IStrategoTerm t2) {
throw new IllegalArgumentException(s1, t1, t2);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3) {
throw new IllegalArgumentException(s1, t1, t2, t3);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4) {
throw new IllegalArgumentException(s1, t1, t2, t3, t4);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4, IStrategoTerm t5) {
throw new IllegalArgumentException(s1, t1, t2, t3, t4, t5);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2) {
throw new IllegalArgumentException(s1, s2);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, IStrategoTerm t1) {
throw new IllegalArgumentException(s1, s2, t1);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, IStrategoTerm t1, IStrategoTerm t2) {
throw new IllegalArgumentException(s1, s2, t1, t2);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3) {
throw new IllegalArgumentException(s1, s2, t1, t2, t3);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4) {
throw new IllegalArgumentException(s1, s2, t1, t2, t3, t4);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4, IStrategoTerm t5) {
throw new IllegalArgumentException(s1, s2, t1, t2, t3, t4, t5);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3) {
throw new IllegalArgumentException(s1, s2, s3);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, IStrategoTerm t1) {
throw new IllegalArgumentException(s1, s2, s3, t1);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, IStrategoTerm t1, IStrategoTerm t2) {
throw new IllegalArgumentException(s1, s2, s3, t1, t2);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3) {
throw new IllegalArgumentException(s1, s2, s3, t1, t2, t3);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4) {
throw new IllegalArgumentException(s1, s2, s3, t1, t2, t3, t4);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4, IStrategoTerm t5) {
throw new IllegalArgumentException(s1, s2, s3, t1, t2, t3, t4, t5);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4) {
throw new IllegalArgumentException(s1, s2, s3, s4);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, IStrategoTerm t1) {
throw new IllegalArgumentException(s1, s2, s3, s4, t1);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, IStrategoTerm t1, IStrategoTerm t2) {
throw new IllegalArgumentException(s1, s2, s3, s4, t1, t2);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3) {
throw new IllegalArgumentException(s1, s2, s3, s4, t1, t2, t3);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4) {
throw new IllegalArgumentException(s1, s2, s3, s4, t1, t2, t3, t4);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4, IStrategoTerm t5) {
throw new IllegalArgumentException(s1, s2, s3, s4, t1, t2, t3, t4, t5);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, Strategy s5) {
throw new IllegalArgumentException(s1, s2, s3, s4, s5);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, Strategy s5, IStrategoTerm t1) {
throw new IllegalArgumentException(s1, s2, s3, s4, s5, t1);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, Strategy s5, IStrategoTerm t1, IStrategoTerm t2) {
throw new IllegalArgumentException(s1, s2, s3, s4, s5, t1, t2);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, Strategy s5, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3) {
throw new IllegalArgumentException(s1, s2, s3, s4, s5, t1, t2, t3);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, Strategy s5, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4) {
throw new IllegalArgumentException(s1, s2, s3, s4, s5, t1, t2, t3, t4);
}
public IStrategoTerm invoke(Context context, IStrategoTerm current, Strategy s1, Strategy s2, Strategy s3, Strategy s4, Strategy s5, IStrategoTerm t1, IStrategoTerm t2, IStrategoTerm t3, IStrategoTerm t4, IStrategoTerm t5) {
throw new IllegalArgumentException(s1, s2, s3, s4, s5, t1, t2, t3, t4, t5);
}
public String getName() {
if(name == null) {
name = getClass().getSimpleName();
}
return name;
}
@Override
public String toString() {
return getName();
}
private static String argsToText(Object... args) {
if (args.length == 0)
return "(none)";
StringBuilder result = new StringBuilder("(");
boolean printedBar = false;
if (args[0] instanceof IStrategoTerm) {
result.append('|');
printedBar = true;
}
result.append(args[0]);
for (int i = 1; i < args.length; i++) {
Object arg = args[i];
if (!printedBar && arg instanceof IStrategoTerm) {
result.append(" |");
printedBar = true;
} else {
result.append(", ");
}
result.append(arg);
}
result.append(")");
return result.toString();
}
private static Object[] concat(Object[] l1, Object[] l2) {
Object[] results = new Object[l1.length + l2.length];
System.arraycopy(l1, 0, results, 0, l1.length);
System.arraycopy(l2, 0, results, l1.length, l2.length);
return results;
}
/**
* Thrown to indicate that a strategy has been passed an illegal or
* inappropriate argument or arguments.
*/
private class IllegalArgumentException extends java.lang.IllegalArgumentException {
private static final long serialVersionUID = 121251125123L;
public IllegalArgumentException(Object... args) {
super("Illegal arguments for " + getName() + " " + argsToText(args));
}
public IllegalArgumentException(Object[] s, Object[] t) {
this(concat(s, t));
}
}
}
| |
/*
* Copyright 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.plaidapp.ui.widget;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ObjectAnimator;
import android.content.Context;
import android.content.res.TypedArray;
import android.support.v4.view.MotionEventCompat;
import android.support.v4.view.ViewCompat;
import android.support.v4.widget.ViewDragHelper;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.view.animation.AnimationUtils;
import android.widget.FrameLayout;
import java.util.ArrayList;
import java.util.List;
import io.plaidapp.R;
import io.plaidapp.util.ViewOffsetHelper;
/**
* A {@link FrameLayout} which can be dragged downward to be dismissed (either directly or via a
* specified nested scrolling child). It expects to contain a single child view and exposes a
* listener interface to react to it's dismissal.
*
* View dragging has the benefit of reporting it's velocity allowing us to respond to flings etc
* but does not allow children to scroll. Nested scrolling allows child views to scroll (duh)
* but does not report velocity. We combine both to get the best experience we can with the APIs.
*/
public class BottomSheet extends FrameLayout {
// configurable attributes
private int dragDismissDistance = Integer.MAX_VALUE;
private boolean hasScrollingChild = false;
private int scrollingChildId = -1;
// child views & helpers
private View dragView;
private View scrollingChild;
private ViewDragHelper viewDragHelper;
private ViewOffsetHelper dragViewOffsetHelper;
// state
private final int FLING_VELOCITY;
private List<Listener> listeners;
private boolean isDismissing;
private int dragViewLeft;
private int dragViewTop;
private int dragViewBottom;
private boolean lastNestedScrollWasDownward;
public BottomSheet(Context context) {
this(context, null, 0);
}
public BottomSheet(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public BottomSheet(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
FLING_VELOCITY = ViewConfiguration.get(context).getScaledMinimumFlingVelocity();
final TypedArray a =
getContext().obtainStyledAttributes(attrs, R.styleable.BottomSheet, 0, 0);
if (a.hasValue(R.styleable.BottomSheet_scrollingChild)) {
hasScrollingChild = true;
scrollingChildId = a.getResourceId(R.styleable.BottomSheet_scrollingChild,
scrollingChildId);
}
if (a.hasValue(R.styleable.BottomSheet_dragDismissDistance)) {
dragDismissDistance = a.getDimensionPixelSize(
R.styleable.BottomSheet_dragDismissDistance, dragDismissDistance);
}
a.recycle();
}
public void addListener(Listener listener) {
if (listeners == null) {
listeners = new ArrayList<>();
}
listeners.add(listener);
}
public interface Listener {
void onDragDismissed();
void onDrag(int top);
}
public void doDismiss() {
viewDragHelper.settleCapturedViewAt(dragViewLeft, dragViewBottom);
}
@Override
public void addView(View child, int index, ViewGroup.LayoutParams params) {
if (dragView != null) {
throw new UnsupportedOperationException("BottomSheet must only have 1 child view");
}
dragView = child;
dragViewOffsetHelper = new ViewOffsetHelper(dragView);
if (hasScrollingChild) {
scrollingChild = dragView.findViewById(scrollingChildId);
}
super.addView(child, index, params);
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
viewDragHelper = ViewDragHelper.create(this, dragHelperCallbacks);
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
if (dragView != null && dragView.isLaidOut()) {
dragViewLeft = dragView.getLeft();
dragViewTop = dragView.getTop();
dragViewBottom = dragView.getBottom();
dragViewOffsetHelper.onViewLayout();
}
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
final int action = MotionEventCompat.getActionMasked(ev);
if (action == MotionEvent.ACTION_CANCEL || action == MotionEvent.ACTION_UP) {
viewDragHelper.cancel();
return false;
}
return isDraggableViewUnder((int) ev.getX(), (int) ev.getY())
&& (viewDragHelper.shouldInterceptTouchEvent(ev) || super.onInterceptTouchEvent(ev));
}
@Override
public boolean onTouchEvent(MotionEvent ev) {
viewDragHelper.processTouchEvent(ev);
if (viewDragHelper.getCapturedView() == null) {
return super.onTouchEvent(ev);
}
return true;
}
@Override
public void computeScroll() {
if (viewDragHelper.continueSettling(true)) {
ViewCompat.postInvalidateOnAnimation(this);
}
}
@Override
public boolean onStartNestedScroll(View child, View target, int nestedScrollAxes) {
return (nestedScrollAxes & View.SCROLL_AXIS_VERTICAL) != 0;
}
@Override
public void onNestedScroll(View target, int dxConsumed, int dyConsumed,
int dxUnconsumed, int dyUnconsumed) {
// if scrolling downward, use any unconsumed (i.e. not used by the scrolling child)
// to drag the sheet downward
lastNestedScrollWasDownward = dyUnconsumed < 0;
if (lastNestedScrollWasDownward) {
dragView.offsetTopAndBottom(-dyUnconsumed);
}
}
@Override
public void onNestedPreScroll(View target, int dx, int dy, int[] consumed) {
// if scrolling upward & the sheet has been dragged downward
// then drag back into place before allowing scrolls
if (dy > 0) {
final int dragDisplacement = dragView.getTop() - dragViewTop;
if (dragDisplacement > 0) {
final int consume = Math.min(dragDisplacement, dy);
dragView.offsetTopAndBottom(-consume);
consumed[1] = consume;
lastNestedScrollWasDownward = false;
}
}
}
@Override
public void onStopNestedScroll(View child) {
final int dragDisplacement = dragView.getTop() - dragViewTop;
if (dragDisplacement == 0) return;
// check if we should perform a dismiss or settle back into place
final boolean dismiss =
lastNestedScrollWasDownward && dragDisplacement >= dragDismissDistance;
// animate either back into place or to bottom
ObjectAnimator settleAnim = ObjectAnimator.ofInt(dragViewOffsetHelper,
ViewOffsetHelper.OFFSET_Y,
dragView.getTop(),
dismiss ? dragViewBottom : dragViewTop);
settleAnim.setDuration(200L);
settleAnim.setInterpolator(AnimationUtils.loadInterpolator(getContext(),
android.R.interpolator.fast_out_slow_in));
if (dismiss) {
settleAnim.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
dispatchDismissCallback();
}
});
}
settleAnim.start();
}
protected void dispatchDismissCallback() {
if (listeners != null && listeners.size() > 0) {
for (Listener listener : listeners) {
listener.onDragDismissed();
}
}
}
protected void dispatchDragCallback() {
if (listeners != null && listeners.size() > 0) {
for (Listener listener : listeners) {
listener.onDrag(getTop());
}
}
}
private boolean isDraggableViewUnder(int x, int y) {
return getVisibility() == VISIBLE && viewDragHelper.isViewUnder(this, x, y);
}
private ViewDragHelper.Callback dragHelperCallbacks = new ViewDragHelper.Callback() {
@Override
public boolean tryCaptureView(View child, int pointerId) {
// if we have a scrolling child and it can scroll then don't drag, it'll be handled
// by nested scrolling
boolean childCanScroll = scrollingChild != null
&& (scrollingChild.canScrollVertically(1)
|| scrollingChild.canScrollVertically(-1));
return !childCanScroll;
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
return Math.min(Math.max(top, dragViewTop), dragViewBottom);
}
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
return dragViewLeft;
}
@Override
public int getViewVerticalDragRange(View child) {
return dragViewBottom - dragViewTop;
}
@Override
public void onViewPositionChanged(View child, int left, int top, int dx, int dy) {
dispatchDragCallback();
}
@Override
public void onViewReleased(View releasedChild, float velocityX, float velocityY) {
if (velocityY >= FLING_VELOCITY) {
isDismissing = true;
doDismiss();
} else {
// settle back into position
viewDragHelper.settleCapturedViewAt(dragViewLeft, dragViewTop);
}
ViewCompat.postInvalidateOnAnimation(BottomSheet.this);
}
@Override
public void onViewDragStateChanged(int state) {
if (isDismissing && state == ViewDragHelper.STATE_IDLE) {
isDismissing = false;
dispatchDismissCallback();
}
}
};
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.hbase;
import java.io.IOException;
import java.security.PrivilegedAction;
import java.util.List;
import java.util.Map;
import org.apache.camel.Consumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.component.hbase.mapping.CellMappingStrategyFactory;
import org.apache.camel.component.hbase.model.HBaseCell;
import org.apache.camel.component.hbase.model.HBaseRow;
import org.apache.camel.impl.DefaultEndpoint;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.spi.UriPath;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.security.UserGroupInformation;
/**
* For reading/writing from/to an HBase store (Hadoop database).
*/
@UriEndpoint(firstVersion = "2.10.0", scheme = "hbase", title = "HBase", syntax = "hbase:tableName", consumerClass = HBaseConsumer.class, label = "hadoop")
public class HBaseEndpoint extends DefaultEndpoint {
private Configuration configuration;
private final Connection connection;
private HBaseAdmin admin;
@UriPath(description = "The name of the table") @Metadata(required = "true")
private final String tableName;
@UriParam(label = "producer", defaultValue = "100")
private int maxResults = 100;
@UriParam
private List<Filter> filters;
@UriParam(label = "consumer", enums = "CamelHBasePut,CamelHBaseGet,CamelHBaseScan,CamelHBaseDelete")
private String operation;
@UriParam(label = "consumer", defaultValue = "true")
private boolean remove = true;
@UriParam(enums = "header,body")
private String mappingStrategyName;
@UriParam
private String mappingStrategyClassName;
@UriParam
private CellMappingStrategyFactory cellMappingStrategyFactory = new CellMappingStrategyFactory();
@UriParam(label = "consumer")
private HBaseRemoveHandler removeHandler = new HBaseDeleteHandler();
@UriParam
private HBaseRow rowModel;
@UriParam(label = "consumer")
private int maxMessagesPerPoll;
@UriParam
private UserGroupInformation userGroupInformation;
@UriParam(prefix = "row.", multiValue = true)
private Map<String, Object> rowMapping;
/**
* in the purpose of performance optimization
*/
private byte[] tableNameBytes;
public HBaseEndpoint(String uri, HBaseComponent component, Connection connection, String tableName) {
super(uri, component);
this.tableName = tableName;
this.connection = connection;
if (this.tableName == null) {
throw new IllegalArgumentException("Table name can not be null");
} else {
tableNameBytes = tableName.getBytes();
}
}
public Producer createProducer() throws Exception {
return new HBaseProducer(this);
}
public Consumer createConsumer(Processor processor) throws Exception {
HBaseConsumer consumer = new HBaseConsumer(this, processor);
configureConsumer(consumer);
consumer.setMaxMessagesPerPoll(maxMessagesPerPoll);
return consumer;
}
public boolean isSingleton() {
return true;
}
public Configuration getConfiguration() {
return configuration;
}
public void setConfiguration(Configuration configuration) {
this.configuration = configuration;
}
public HBaseAdmin getAdmin() {
return admin;
}
public void setAdmin(HBaseAdmin admin) {
this.admin = admin;
}
public int getMaxResults() {
return maxResults;
}
/**
* The maximum number of rows to scan.
*/
public void setMaxResults(int maxResults) {
this.maxResults = maxResults;
}
public List<Filter> getFilters() {
return filters;
}
/**
* A list of filters to use.
*/
public void setFilters(List<Filter> filters) {
this.filters = filters;
}
public String getOperation() {
return operation;
}
/**
* The HBase operation to perform
*/
public void setOperation(String operation) {
this.operation = operation;
}
public CellMappingStrategyFactory getCellMappingStrategyFactory() {
return cellMappingStrategyFactory;
}
/**
* To use a custom CellMappingStrategyFactory that is responsible for mapping cells.
*/
public void setCellMappingStrategyFactory(CellMappingStrategyFactory cellMappingStrategyFactory) {
this.cellMappingStrategyFactory = cellMappingStrategyFactory;
}
public String getMappingStrategyName() {
return mappingStrategyName;
}
/**
* The strategy to use for mapping Camel messages to HBase columns. Supported values: header, or body.
*/
public void setMappingStrategyName(String mappingStrategyName) {
this.mappingStrategyName = mappingStrategyName;
}
public String getMappingStrategyClassName() {
return mappingStrategyClassName;
}
/**
* The class name of a custom mapping strategy implementation.
*/
public void setMappingStrategyClassName(String mappingStrategyClassName) {
this.mappingStrategyClassName = mappingStrategyClassName;
}
public HBaseRow getRowModel() {
return rowModel;
}
/**
* An instance of org.apache.camel.component.hbase.model.HBaseRow which describes how each row should be modeled
*/
public void setRowModel(HBaseRow rowModel) {
this.rowModel = rowModel;
}
public boolean isRemove() {
return remove;
}
/**
* If the option is true, Camel HBase Consumer will remove the rows which it processes.
*/
public void setRemove(boolean remove) {
this.remove = remove;
}
public HBaseRemoveHandler getRemoveHandler() {
return removeHandler;
}
/**
* To use a custom HBaseRemoveHandler that is executed when a row is to be removed.
*/
public void setRemoveHandler(HBaseRemoveHandler removeHandler) {
this.removeHandler = removeHandler;
}
public int getMaxMessagesPerPoll() {
return maxMessagesPerPoll;
}
/**
* Gets the maximum number of messages as a limit to poll at each polling.
* <p/>
* Is default unlimited, but use 0 or negative number to disable it as unlimited.
*/
public void setMaxMessagesPerPoll(int maxMessagesPerPoll) {
this.maxMessagesPerPoll = maxMessagesPerPoll;
}
public UserGroupInformation getUserGroupInformation() {
return userGroupInformation;
}
/**
* Defines privileges to communicate with HBase such as using kerberos.
*/
public void setUserGroupInformation(UserGroupInformation userGroupInformation) {
this.userGroupInformation = userGroupInformation;
}
public Map<String, Object> getRowMapping() {
return rowMapping;
}
/**
* To map the key/values from the Map to a {@link HBaseRow}.
* <p/>
* The following keys is supported:
* <ul>
* <li>rowId - The id of the row. This has limited use as the row usually changes per Exchange.</li>
* <li>rowType - The type to covert row id to. Supported operations: CamelHBaseScan.</li>
* <li>family - The column family. Supports a number suffix for referring to more than one columns.</li>
* <li>qualifier - The column qualifier. Supports a number suffix for referring to more than one columns.</li>
* <li>value - The value. Supports a number suffix for referring to more than one columns</li>
* <li>valueType - The value type. Supports a number suffix for referring to more than one columns. Supported operations: CamelHBaseGet, and CamelHBaseScan.</li>
* </ul>
*/
public void setRowMapping(Map<String, Object> rowMapping) {
this.rowMapping = rowMapping;
}
@Override
protected void doStart() throws Exception {
super.doStart();
if (rowModel == null && rowMapping != null) {
rowModel = createRowModel(rowMapping);
}
}
@Override
protected void doStop() throws Exception {
super.doStop();
}
/**
* Gets connection to the table (secured or not, depends on the object initialization)
* please remember to close the table after use
* @return table, remember to close!
*/
public Table getTable() throws IOException {
if (userGroupInformation != null) {
return userGroupInformation.doAs(new PrivilegedAction<Table>() {
@Override
public Table run() {
try {
return connection.getTable(TableName.valueOf(tableNameBytes));
} catch (IOException e) {
throw new RuntimeException(e);
}
}
});
} else {
return connection.getTable(TableName.valueOf(tableNameBytes));
}
}
/**
* Creates an {@link HBaseRow} model from the specified endpoint parameters.
*/
private HBaseRow createRowModel(Map<String, Object> parameters) {
HBaseRow rowModel = new HBaseRow();
if (parameters.containsKey(HBaseAttribute.HBASE_ROW_TYPE.asOption())) {
String rowType = String.valueOf(parameters.remove(HBaseAttribute.HBASE_ROW_TYPE.asOption()));
if (rowType != null && !rowType.isEmpty()) {
rowModel.setRowType(getCamelContext().getClassResolver().resolveClass(rowType));
}
}
for (int i = 1; parameters.get(HBaseAttribute.HBASE_FAMILY.asOption(i)) != null
&& parameters.get(HBaseAttribute.HBASE_QUALIFIER.asOption(i)) != null; i++) {
HBaseCell cellModel = new HBaseCell();
cellModel.setFamily(String.valueOf(parameters.remove(HBaseAttribute.HBASE_FAMILY.asOption(i))));
cellModel.setQualifier(String.valueOf(parameters.remove(HBaseAttribute.HBASE_QUALIFIER.asOption(i))));
cellModel.setValue(String.valueOf(parameters.remove(HBaseAttribute.HBASE_VALUE.asOption(i))));
if (parameters.containsKey(HBaseAttribute.HBASE_VALUE_TYPE.asOption(i))) {
String valueType = String.valueOf(parameters.remove(HBaseAttribute.HBASE_VALUE_TYPE.asOption(i)));
if (valueType != null && !valueType.isEmpty()) {
rowModel.setRowType(getCamelContext().getClassResolver().resolveClass(valueType));
}
}
rowModel.getCells().add(cellModel);
}
return rowModel;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.cache.affinity;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import org.apache.ignite.cache.CacheMode;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteEx;
import org.apache.ignite.internal.processors.affinity.GridAffinityAssignmentCache;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheProcessor;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.testframework.GridStringLogger;
import org.apache.ignite.testframework.GridTestUtils;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static org.apache.ignite.IgniteSystemProperties.IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD;
import static org.apache.ignite.internal.IgniteNodeAttributes.ATTR_IGNITE_INSTANCE_NAME;
/**
* Tests of partitions distribution logging.
*
* Tests based on using of affinity function which provides an even distribution of partitions between nodes.
*
* @see EvenDistributionAffinityFunction
*/
public class AffinityDistributionLoggingTest extends GridCommonAbstractTest {
/** Pattern to test. */
private static final String LOG_MESSAGE_PREFIX = "Local node affinity assignment distribution is not ideal ";
/** Partitions number. */
private int parts = 0;
/** Nodes number. */
private int nodes = 0;
/** Backups number. */
private int backups = 0;
/** For storing original value of system property. */
private String tempProp;
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
super.beforeTestsStarted();
tempProp = System.getProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
super.afterTestsStopped();
if (tempProp != null)
System.setProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD, tempProp);
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
super.afterTest();
System.clearProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD);
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
CacheConfiguration cacheCfg = defaultCacheConfiguration();
cacheCfg.setCacheMode(CacheMode.PARTITIONED);
cacheCfg.setBackups(backups);
cacheCfg.setAffinity(new EvenDistributionAffinityFunction(parts));
cfg.setCacheConfiguration(cacheCfg);
return cfg;
}
/**
* @throws Exception In case of an error.
*/
public void test2PartitionsIdealDistributionIsNotLogged() throws Exception {
System.setProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD, "0");
nodes = 2;
parts = 2;
backups = 1;
String testsLog = runAndGetExchangeLog(false);
assertFalse(testsLog.contains(LOG_MESSAGE_PREFIX));
}
/**
* @throws Exception In case of an error.
*/
public void test120PartitionsIdeadDistributionIsNotLogged() throws Exception {
System.setProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD, "0.0");
nodes = 3;
parts = 120;
backups = 2;
String testsLog = runAndGetExchangeLog(false);
assertFalse(testsLog.contains(LOG_MESSAGE_PREFIX));
}
/**
* @throws Exception In case of an error.
*/
public void test5PartitionsNotIdealDistributionIsLogged() throws Exception {
System.setProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD, "50.0");
nodes = 4;
parts = 5;
backups = 3;
String testsLog = runAndGetExchangeLog(false);
assertTrue(testsLog.contains(LOG_MESSAGE_PREFIX));
}
/**
* @throws Exception In case of an error.
*/
public void test5PartitionsNotIdealDistributionSuppressedLoggingOnClientNode() throws Exception {
System.setProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD, "0.0");
nodes = 4;
parts = 5;
backups = 3;
String testsLog = runAndGetExchangeLog(true);
assertFalse(testsLog.contains(LOG_MESSAGE_PREFIX));
}
/**
* @throws Exception In case of an error.
*/
public void test7PartitionsNotIdealDistributionSuppressedLogging() throws Exception {
System.setProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD, "50.0");
nodes = 3;
parts = 7;
backups = 0;
String testsLog = runAndGetExchangeLog(false);
assertFalse(testsLog.contains(LOG_MESSAGE_PREFIX));
}
/**
* @throws Exception In case of an error.
*/
public void test5PartitionsNotIdealDistributionSuppressedLogging() throws Exception {
System.setProperty(IGNITE_PART_DISTRIBUTION_WARN_THRESHOLD, "65");
nodes = 4;
parts = 5;
backups = 3;
String testsLog = runAndGetExchangeLog(false);
assertFalse(testsLog.contains(LOG_MESSAGE_PREFIX));
}
/**
* Starts a specified number of Ignite nodes and log partition node exchange during a last node's startup.
*
* @param testClientNode Whether it is necessary to get exchange log from the client node.
* @return Log of latest partition map exchange.
* @throws Exception In case of an error.
*/
private String runAndGetExchangeLog(boolean testClientNode) throws Exception {
assert nodes > 1;
IgniteEx ignite = (IgniteEx)startGrids(nodes - 1);
awaitPartitionMapExchange();
GridCacheProcessor proc = ignite.context().cache();
GridCacheContext cctx = proc.context().cacheContext(CU.cacheId(DEFAULT_CACHE_NAME));
final GridStringLogger log = new GridStringLogger(false, this.log);
GridAffinityAssignmentCache aff = GridTestUtils.getFieldValue(cctx.affinity(), "aff");
GridTestUtils.setFieldValue(aff, "log", log);
if (testClientNode) {
IgniteConfiguration cfg = getConfiguration("client");
cfg.setClientMode(true);
startGrid(cfg);
}
else
startGrid(nodes);
awaitPartitionMapExchange();
return log.toString();
}
/**
* Affinity function for a partitioned cache which provides even distribution partitions between nodes in cluster.
*/
private static class EvenDistributionAffinityFunction implements AffinityFunction {
/** */
private static final long serialVersionUID = 0L;
/** Partitions number. */
private int parts;
/**
* @param parts Number of partitions for one cache.
*/
private EvenDistributionAffinityFunction(int parts) {
this.parts = parts;
}
/** {@inheritDoc} */
@Override public int partitions() {
return parts;
}
/** {@inheritDoc} */
@Override public int partition(Object key) {
return key.hashCode() % parts;
}
/** {@inheritDoc} */
@Override public List<List<ClusterNode>> assignPartitions(AffinityFunctionContext affCtx) {
List<ClusterNode> nodes = new ArrayList<>(affCtx.currentTopologySnapshot());
nodes.sort(Comparator.comparing(o -> o.<String>attribute(ATTR_IGNITE_INSTANCE_NAME)));
List<List<ClusterNode>> res = new ArrayList<>(parts);
for (int i = 0; i < parts; i++) {
Set<ClusterNode> n0 = new LinkedHashSet<>();
n0.add(nodes.get(i % nodes.size()));
for (int j = 1; j <= affCtx.backups(); j++)
n0.add(nodes.get((i + j) % nodes.size()));
res.add(new ArrayList<>(n0));
}
return res;
}
/** {@inheritDoc} */
@Override public void removeNode(UUID nodeId) {
// No-op.
}
/** {@inheritDoc} */
@Override public void reset() {
// No-op.
}
}
}
| |
package com.example.jamsession;
import java.util.ArrayList;
import java.util.Iterator;
import org.json.JSONException;
import org.json.JSONObject;
import org.json.JSONTokener;
import Objects.Jam;
import Objects.User;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.Intent;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.app.ActionBarActivity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import com.example.jamsession.audiorecorder.AudioRecorder;
import com.loopj.android.http.AsyncHttpClient;
import com.loopj.android.http.AsyncHttpResponseHandler;
import com.loopj.android.http.RequestParams;
public class UpdateList extends ActionBarActivity
{
private User currentUser;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_update_list);
if (savedInstanceState == null)
{
getSupportFragmentManager().beginTransaction()
.add(R.id.container, new PlaceholderFragment()).commit();
}
//Get list of updates from the server
Intent intent = getIntent();
Bundle extras = intent.getExtras();
currentUser = new User(extras.getInt("userID"));
updateJams();
getAllJams();
}
public void updateJams(View v)
{
updateJams();
getAllJams();
}
public void updateJams()
{
System.out.println("Update Jams");
if(isOnline())
{
AsyncHttpClient client = new AsyncHttpClient();
RequestParams params = new RequestParams();
params.put("userID", Integer.toString(currentUser.getId()));
client.get("http://floating-taiga-3502.herokuapp.com/getUpdates", params, new AsyncHttpResponseHandler()
{
@SuppressLint("NewApi")
@Override
public void onSuccess(String response)
{
JSONTokener parser = new JSONTokener(response);
ArrayList<Jam> jams = new ArrayList<Jam>();
try {
JSONObject json = new JSONObject (parser);
Iterator itr = json.keys();
while(itr.hasNext())
{
Object ob = itr.next(); //key for the next json object
JSONTokener jtk = new JSONTokener(json.get(ob.toString()).toString());
JSONObject jamJSON = new JSONObject(jtk);
//parse through jam json
Jam jam = new Jam((int) jamJSON.get("id"), (int) jamJSON.get("ttl"), (int) jamJSON.get("user_id"));
User user = new User((int) jamJSON.get("user_id"));
user.setName((String)jamJSON.get("user_name"));
jam.setUser(user);
jams.add(jam);
}
setNewUpdateListAdapter(jams);
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
}
}
public void getAllJams()
{
System.out.println("Update All Jams");
if(isOnline())
{
AsyncHttpClient client = new AsyncHttpClient();
RequestParams params = new RequestParams();
params.put("userID", Integer.toString(currentUser.getId()));
client.get("http://floating-taiga-3502.herokuapp.com/getAllUpdates", params, new AsyncHttpResponseHandler()
{
@SuppressLint("NewApi")
@Override
public void onSuccess(String response)
{
JSONTokener parser = new JSONTokener(response);
ArrayList<Jam> jams = new ArrayList<Jam>();
try {
JSONObject json = new JSONObject (parser);
Iterator itr = json.keys();
while(itr.hasNext())
{
Object ob = itr.next(); //key for the next json object
JSONTokener jtk = new JSONTokener(json.get(ob.toString()).toString());
JSONObject jamJSON = new JSONObject(jtk);
//parse through jam json
Jam jam = new Jam((int) jamJSON.get("id"), (int) jamJSON.get("ttl"), (int) jamJSON.get("user_id"));
User user = new User((int) jamJSON.get("user_id"));
user.setName((String)jamJSON.get("user_name"));
jam.setUser(user);
jams.add(jam);
}
setAllUpdateListAdapter(jams);
} catch (JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
});
}
}
public boolean isOnline()
{
ConnectivityManager cm = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo ni = cm.getActiveNetworkInfo();
if (ni!=null && ni.isAvailable() && ni.isConnected()) {
return true;
} else {
return false;
}
}
public void setNewUpdateListAdapter(ArrayList<Jam> jams)
{
System.out.println("Set Jam List Adapter" + jams.toString());
ArrayAdapter<Jam> adapter = new ArrayAdapter<Jam>(this.getApplicationContext(),
android.R.layout.simple_list_item_1, jams);
final ListView listview = (ListView) findViewById(R.id.newUpdatesListView);
listview.setAdapter(adapter);
listview.setOnItemClickListener(new OnItemClickListener()
{
public void onItemClick(AdapterView<?> parent, View view, int position, long id)
{
//When one of the things on the list is clicked, create a new audioRecorder that will add to a jam
Jam jam = (Jam) listview.getItemAtPosition(position);
collaborate(jam);
}
});
}
public void setAllUpdateListAdapter(ArrayList<Jam> jams)
{
System.out.println("Set All List Adapter" + jams.toString());
ArrayAdapter<Jam> adapter = new ArrayAdapter<Jam>(this.getApplicationContext(),
android.R.layout.simple_list_item_1, jams);
final ListView listview = (ListView) findViewById(R.id.allUpdateListView);
listview.setAdapter(adapter);
listview.setOnItemClickListener(new OnItemClickListener()
{
public void onItemClick(AdapterView<?> parent, View view, int position, long id)
{
//When one of the things on the list is clicked, create a new audioRecorder that will add to a jam
Jam jam = (Jam) listview.getItemAtPosition(position);
collaborate(jam);
}
});
}
public void collaborate(Jam jam)
{
Intent intent = new Intent(this.getBaseContext(), AudioRecorder.class);
//add data to this intent to specify that this is for a new jam
intent.putExtra("isJam", true);
intent.putExtra("userID", currentUser.getId());
intent.putExtra("jamID", jam.getId());
startActivity(intent);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.update_list, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* A placeholder fragment containing a simple view.
*/
public static class PlaceholderFragment extends Fragment {
public PlaceholderFragment() {
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View rootView = inflater.inflate(R.layout.fragment_update_list,
container, false);
return rootView;
}
}
}
| |
package com.owera.xaps.dbi;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.owera.xaps.dbi.JobFlag.JobServiceWindow;
import com.owera.xaps.dbi.JobFlag.JobType;
import com.owera.xaps.dbi.util.MapWrapper;
public class Job {
private static Pattern pattern = Pattern.compile("(u|c|a|n|t)(\\d+)/?(\\d*)");
public static String ANY_UNIT_IN_GROUP = "ANY-UNIT-IN-GROUP";
public class StopRule {
public static final int ANY_FAILURE_TYPE = 0;
public static final int CONFIRMED_FAILURE_TYPE = 1;
public static final int UNCONFIRMED_FAILURE_TYPE = 2;
public static final int COUNT_TYPE = 3;
public static final int TIMEOUT_TYPE = 4;
private Integer numberMax;
private long numberLimit;
private int ruleType;
private String ruleStr;
public StopRule(String ruleStr) {
this.ruleStr = ruleStr;
Matcher m = pattern.matcher(ruleStr);
if (m.matches()) {
String type = m.group(1);
if (type.equals("u"))
ruleType = UNCONFIRMED_FAILURE_TYPE;
else if (type.equals("c"))
ruleType = CONFIRMED_FAILURE_TYPE;
else if (type.equals("a"))
ruleType = ANY_FAILURE_TYPE;
else if (type.equals("n"))
ruleType = COUNT_TYPE;
else if (type.equals("t"))
ruleType = TIMEOUT_TYPE;
numberLimit = Integer.parseInt(m.group(2));
String numberMaxStr = m.group(3);
if (numberMaxStr != null && !numberMaxStr.trim().equals("")) {
numberMax = Integer.parseInt(numberMaxStr);
if (numberLimit >= numberMax)
throw new IllegalArgumentException("The first number must be less than the second (rule: " + ruleStr + ")");
if (numberMax < 3 || numberMax > 10000)
throw new IllegalArgumentException("The last number must be between 3 and 10000 (rule: " + ruleStr + ")");
if (numberLimit < 2)
throw new IllegalArgumentException("The first number must be between 2 and 10000 (rule: " + ruleStr + ")");
} else if (numberLimit < 1)
throw new IllegalArgumentException("The number must be greater than 0 (rule: " + ruleStr + ")");
} else {
throw new IllegalArgumentException("The rule " + ruleStr + " does not match the regexp pattern " + pattern.toString());
}
}
public Integer getNumberMax() {
return numberMax;
}
public long getNumberLimit() {
return numberLimit;
}
public int getRuleType() {
return ruleType;
}
@Override
public String toString() {
return ruleStr;
}
}
private Unittype unittype;
private Integer id;
private String name;
private JobFlag flags;
private String oldName;
private String description;
private Group group;
private int unconfirmedTimeout;
private String sRules;
private List<StopRule> stopRules;
private File file;
private Job dependency;
private Integer repeatCount;
private Integer repeatInterval;
// private Profile moveToProfile; -- obsolete, move to a profile should be fixed with a script
/* The field is calculated based on other jobs */
private List<Job> children;
/*
* Fields which are set as the job has changed to
* status STARTED
*/
private JobStatus status = JobStatus.READY; // Initial Job status
private Date startTimestamp;
private Date endTimestamp;
private Map<String, JobParameter> defaultParameters;
private int completedNoFailures;
private int completedHadFailures;
private int confirmedFailed;
private int unconfirmedFailed;
/* These counters are calculated each time rules are set,
* and they represents absolute stop rules (not fractional
* rules).
*/
private long timeoutTms = Long.MAX_VALUE;
private long maxFailureAny = Integer.MAX_VALUE;
private long maxFailureConfirmed = Integer.MAX_VALUE;
private long maxFailureUnconfirmed = Integer.MAX_VALUE;
private long maxCount = Integer.MAX_VALUE;
/* The nextPII is calculated during JobLogic.checkNew to
* find which job to run first (or rather to set the
* nextPII correctly to the CPE. This is not kept in
* the database, but set during the session.
*/
private Long nextPII;
private boolean validateInput = true;
public Job() {
}
public Job(Unittype unittype, String name, JobFlag flags, String description, Group group, int unconfirmedTimeout, String stopRules, File file, Job dependency, Integer repeatCount,
Integer repeatInterval) {
setUnittype(unittype);
setName(name);
setFlags(flags);
setDescription(description);
setGroup(group);
setUnconfirmedTimeout(unconfirmedTimeout);
setStopRules(stopRules);
setFile(file);
setDependency(dependency);
setRepeatCount(repeatCount);
setRepeatInterval(repeatInterval);
}
public void validate() {
setUnittype(unittype);
setName(name);
setFlags(flags);
setDescription(description);
setGroup(group);
setUnconfirmedTimeout(unconfirmedTimeout);
setStopRules(stopRules);
setFile(file);
setDependency(dependency);
setRepeatCount(repeatCount);
setRepeatInterval(repeatInterval);
}
/* GET methods */
public Unittype getUnittype() {
return unittype;
}
public Integer getId() {
return id;
}
public String getName() {
return name;
}
protected String getOldName() {
return oldName;
}
public JobFlag getFlags() {
return flags;
}
public String getDescription() {
return description;
}
public Group getGroup() {
return group;
}
public int getUnconfirmedTimeout() {
return unconfirmedTimeout;
}
public List<StopRule> getStopRules() {
if (stopRules == null)
stopRules = new ArrayList<StopRule>();
return stopRules;
}
public String getStopRulesSerialized() {
return sRules;
}
public File getFile() {
return file;
}
public Job getDependency() {
return dependency;
}
public Integer getRepeatCount() {
return repeatCount;
}
public Integer getRepeatInterval() {
if (repeatCount != null && repeatInterval == null)
return 86400; // Make sure a default value is returned in case a repeat-counter exists
return repeatInterval;
}
/* SET methods */
public void setUnittype(Unittype unittype) {
if (unittype == null)
throw new IllegalArgumentException("Job unittype cannot be null");
this.unittype = unittype;
}
protected void setId(Integer id) {
this.id = id;
}
public void setName(String n) {
String name = n;
if (name != null) {
name = name.trim();
this.oldName = this.name;
this.name = name;
}
}
public void setFlags(JobFlag flags) {
if (validateInput && flags == null)
throw new IllegalArgumentException("Job Type/ServiceWindow cannot be null");
if (flags == null)
flags = new JobFlag(JobType.CONFIG, JobServiceWindow.REGULAR);
this.flags = flags;
}
public void setDescription(String description) {
this.description = description;
}
public void setGroup(Group group) {
if (validateInput && group == null)
throw new IllegalArgumentException("Job group cannot be null");
this.group = group;
}
public void setUnconfirmedTimeout(int unconfirmedTimeout) {
if (validateInput && unconfirmedTimeout < 60)
throw new IllegalArgumentException("Cannot set unconfirmed timeout to less than 60 (sec)");
if (unconfirmedTimeout < 60)
unconfirmedTimeout = 60;
this.unconfirmedTimeout = unconfirmedTimeout;
}
// public void setSRules(String stopRules) {
// sRules = stopRules;
// }
//
public void setStopRules(List<StopRule> stopRules) {
this.stopRules = stopRules;
}
public void setStopRules(String sRules) {
List<StopRule> tmpList = new ArrayList<StopRule>();
try {
if (sRules != null) {
String[] fRuleArr = sRules.split(",");
long maxCount = Long.MAX_VALUE;
long maxTimeoutTms = Long.MAX_VALUE;
long maxFailureAny = Long.MAX_VALUE;
long maxFailureConf = Long.MAX_VALUE;
long maxFailureUnconf = Long.MAX_VALUE;
for (String fRule : fRuleArr) {
StopRule stopRule = new StopRule(fRule.trim());
tmpList.add(stopRule);
if (stopRule.getNumberMax() == null) {
if (stopRule.getRuleType() == StopRule.COUNT_TYPE && stopRule.getNumberLimit() < maxCount)
maxCount = (int) stopRule.getNumberLimit();
else if (stopRule.getRuleType() == StopRule.TIMEOUT_TYPE && stopRule.getNumberLimit() < maxTimeoutTms)
maxTimeoutTms = stopRule.getNumberLimit();
else if (stopRule.getRuleType() == StopRule.ANY_FAILURE_TYPE && stopRule.getNumberLimit() < maxFailureAny)
maxFailureAny = stopRule.getNumberLimit();
else if (stopRule.getRuleType() == StopRule.CONFIRMED_FAILURE_TYPE && stopRule.getNumberLimit() < maxFailureConf)
maxFailureConf = stopRule.getNumberLimit();
else if (stopRule.getRuleType() == StopRule.UNCONFIRMED_FAILURE_TYPE && stopRule.getNumberLimit() < maxFailureUnconf)
maxFailureUnconf = stopRule.getNumberLimit();
}
}
this.maxCount = maxCount;
this.timeoutTms = maxTimeoutTms;
this.maxFailureAny = maxFailureAny;
this.maxFailureConfirmed = maxFailureConf;
this.maxFailureUnconfirmed = maxFailureUnconf;
}
} catch (IllegalArgumentException iae) {
if (validateInput)
throw iae;
}
this.sRules = sRules;
this.stopRules = tmpList;
}
public void setFile(File file) {
if (validateInput) {
if (this.getFlags().getType().requireFile() && file == null)
throw new IllegalArgumentException("Job with jobtype " + this.getFlags().getType() + " requires a file (software/script)");
else if (!this.getFlags().getType().requireFile() && file != null)
throw new IllegalArgumentException("Job with jobtype " + this.getFlags().getType() + " cannot specify a file");
}
this.file = file;
}
public void setDependency(Job newdep) {
if (dependency != null)
dependency.removeChild(this);
dependency = newdep;
if (dependency != null)
dependency.addChild(this);
}
public void setRepeatCount(Integer repeat) {
if (validateInput && repeat != null && repeat < 0)
throw new IllegalArgumentException("Job Repeat Count cannot be less than 0");
else if (repeat != null && repeat < 0)
repeatCount = 0;
this.repeatCount = repeat;
}
public void setRepeatInterval(Integer repeatInterval) {
if (validateInput && repeatInterval != null && repeatInterval < 0)
throw new IllegalArgumentException("Job Repeat Interval cannot be less than 0");
if (repeatInterval != null && repeatInterval < 0)
repeatInterval = 86400; // do not allow negative interval - set it default to 24h
this.repeatInterval = repeatInterval;
}
/* MISC Get-Methods for persistent fields set after Job has started */
public int getCompletedHadFailures() {
return completedHadFailures;
}
public int getCompletedNoFailures() {
return completedNoFailures;
}
public int getConfirmedFailed() {
return confirmedFailed;
}
public Map<String, JobParameter> getDefaultParameters() {
if (defaultParameters == null) {
MapWrapper<JobParameter> mw = new MapWrapper<JobParameter>(XAPS.isStrictOrder());
defaultParameters = mw.getMap();
// defaultParameters = new TreeMap<String, JobParameter>(XAPS.getComparator());
}
return defaultParameters;
}
public Date getEndTimestamp() {
return endTimestamp;
}
public Date getStartTimestamp() {
return startTimestamp;
}
public JobStatus getStatus() {
return status;
}
public int getUnconfirmedFailed() {
return unconfirmedFailed;
}
/* MISC Set-Methods for persistent fields set after Job has started */
public void setCompletedHadFailures(int completedHadFailures) {
this.completedHadFailures = completedHadFailures;
}
public void setCompletedNoFailures(int completedNoFailures) {
this.completedNoFailures = completedNoFailures;
}
public void setConfirmedFailed(int confirmedFailed) {
this.confirmedFailed = confirmedFailed;
}
protected void setDefaultParameters(TreeMap<String, JobParameter> defaultParameters) {
this.defaultParameters = defaultParameters;
}
public void setEndTimestamp(Date endTimestamp) {
this.endTimestamp = endTimestamp;
}
public void setNextPII(Long nextPII) {
this.nextPII = nextPII;
}
public void setStartTimestamp(Date startTimestamp) {
this.startTimestamp = startTimestamp;
}
public void setStatus(JobStatus status) {
this.status = status;
}
public void setUnconfirmedFailed(int unconfirmedFailed) {
this.unconfirmedFailed = unconfirmedFailed;
}
/* Job children manipulation and retrieval */
protected void addChild(Job child) {
if (children == null)
children = new ArrayList<Job>();
if (!this.children.contains(child))
this.children.add(child);
}
protected void removeChild(Job child) {
if (children != null)
children.remove(child);
}
public List<Job> getAllChildren() {
return getAllChildrenRec(this);
}
private List<Job> getAllChildrenRec(Job j) {
List<Job> groups = new ArrayList<Job>();
for (Job childrenJob : j.getChildren()) {
groups.add(childrenJob);
groups.addAll(getAllChildrenRec(childrenJob));
}
return groups;
}
public List<Job> getChildren() {
if (children == null)
children = new ArrayList<Job>();
return children;
}
/* Various GET/SET methods of non-persistent, run-time job-related fields */
public long getMaxFailureAny() {
return maxFailureAny;
}
public long getMaxCount() {
return maxCount;
}
public long getMaxFailureConfirmed() {
return maxFailureConfirmed;
}
public long getMaxFailureUnconfirmed() {
return maxFailureUnconfirmed;
}
public Long getNextPII() {
return nextPII;
}
public long getTimeoutTms() {
return timeoutTms;
}
@Override
public String toString() {
return name + " (" + id + ") [Status:" + status + "]";
}
protected void validateInput(boolean validateInput) {
this.validateInput = validateInput;
}
}
| |
package org.apache.ode.bpel.runtime;
import java.util.List;
import java.util.Set;
import javax.xml.namespace.QName;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.engine.BpelEngineImpl;
import org.apache.ode.bpel.engine.FragmentCompositionResponse;
import org.apache.ode.bpel.engine.fc.excp.FragmentCompositionException;
import org.apache.ode.bpel.extensions.events.ActivityExecuting;
import org.apache.ode.bpel.o.OActivity;
import org.apache.ode.bpel.o.OBase;
import org.apache.ode.bpel.o.OFragmentEntry;
import org.apache.ode.bpel.o.OFragmentExit;
import org.apache.ode.bpel.o.OFragmentScope;
import org.apache.ode.bpel.o.OFragmentSequence;
import org.apache.ode.bpel.o.OProcess;
import org.apache.ode.bpel.runtime.channels.FragmentCompositionChannel;
import org.apache.ode.bpel.runtime.channels.FragmentCompositionChannelListener;
import org.apache.ode.bpel.runtime.channels.FragmentCompositionResponseChannel;
import org.apache.ode.bpel.runtime.channels.FragmentEntryMappedChannel;
import org.apache.ode.bpel.runtime.channels.ParentScopeChannel;
import org.apache.ode.bpel.runtime.channels.TerminationChannel;
import org.apache.ode.bpel.util.fc.FragmentCompositionUtil;
import org.apache.ode.fc.dao.FCManagementDAO;
import org.apache.ode.utils.fc.Mapping;
/**
*
* @author Alex Hummel
*
*/
public class FRAGMENTSEQUENCE extends SEQUENCE {
private static final long serialVersionUID = 1L;
private OFragmentSequence _ofSequence;
private static final Log __log = LogFactory.getLog(FRAGMENTSEQUENCE.class);
FRAGMENTSEQUENCE(ActivityInfo self, ScopeFrame scopeFrame,
LinkFrame linkFrame) {
super(self, scopeFrame, linkFrame);
_ofSequence = (OFragmentSequence) _self.o;
}
FRAGMENTSEQUENCE(ActivityInfo self, ScopeFrame scopeFrame,
LinkFrame linkFrame, List<OActivity> remaining,
Set<CompensationHandler> compensations, Boolean firstT) {
super(self, scopeFrame, linkFrame, remaining, compensations, firstT);
_ofSequence = (OFragmentSequence) _self.o;
}
public void execute() {
FCManagementDAO fcManagementDAO = getBpelRuntimeContext()
.getBpelProcess().getEngine().getFCManagementDAO();
Long instanceId = getBpelRuntimeContext().getPid();
if (_firstTime) {
// Event Activity_Executing
ActivityExecuting evt = new ActivityExecuting(_self.o.name, _self.o.getId(),
_self.o.getXpath(), _self.aId, sFrame.oscope.getXpath(),
sFrame.scopeInstanceId, process_name, process_ID,
_self.o.getArt(), false);
getBpelRuntimeContext().getBpelProcess().getEngine().fireEvent(evt);
FragmentCompositionChannel compChannel = newChannel(
FragmentCompositionChannel.class, "created in FSequence");
fcManagementDAO.addGlueResponseChannel(instanceId,
_ofSequence.getId(), compChannel.export());
}
ActivityInfo info = null;
OActivity child = _remaining.get(0);
if (child instanceof OFragmentExit || child instanceof OFragmentEntry) {
instance(new COMPOSER());
}
info = new ActivityInfo(genMonotonic(), child,
newChannel(TerminationChannel.class), newChannel(
ParentScopeChannel.class, "sequence"
+ FRAGMENTSEQUENCE.this._self.toString()));
instance(createChild(info, _scopeFrame, _linkFrame));
final ActivityInfo childInfo = info;
instance(new ACTIVE(childInfo));
}
protected void createInstance(ActivityInfo self, ScopeFrame scopeFrame,
LinkFrame linkFrame, List<OActivity> remaining,
Set<CompensationHandler> compensations, Boolean firstT) {
instance(new FRAGMENTSEQUENCE(_self, _scopeFrame, _linkFrame,
remaining, compensations, false));
}
private class COMPOSER extends BpelJacobRunnable {
private static final long serialVersionUID = 1L;
private FragmentCompositionChannel compChannel;
public COMPOSER() {
FCManagementDAO fcManagementDAO = getBpelRuntimeContext()
.getBpelProcess().getEngine().getFCManagementDAO();
Long instanceId = getBpelRuntimeContext().getPid();
String channel = fcManagementDAO.getChannel(instanceId,
_self.o.getId());
compChannel = importChannel(channel,
FragmentCompositionChannel.class);
// @hahnml: Set the OBase id
oId = FRAGMENTSEQUENCE.this.oId;
}
public void run() {
object(false, new FragmentCompositionChannelListener(compChannel) {
private static final long serialVersionUID = -8140057051098543899L;
public void glue(QName newFragmentName,
FragmentCompositionResponse response) {
if (FragmentCompositionUtil.isGlueAllowed(_ofSequence
.getOwner())) {
BpelEngineImpl engine = FragmentCompositionUtil
.getEngine(FRAGMENTSEQUENCE.this);
OProcess toGlue = engine.getProcess(newFragmentName)
.getOProcessFromCBP();
OFragmentScope scope = FragmentCompositionUtil
.getScopeToGlue(_ofSequence, toGlue);
if (scope != null) {
glueOTree(scope);
FragmentCompositionUtil.merge(engine,
getBpelRuntimeContext().getBpelProcess(),
engine.getProcess(newFragmentName), toGlue,
_ofSequence);
__log.info("Fragment "
+ newFragmentName.getLocalPart()
+ " is glued into " + _self.o);
// instantiation happens in wireAndMap
// to prevent execution of fragmentEntry before
// fragmentExit is finished
instance(new COMPOSER());
response.returnValue(true);
} else {
instance(new COMPOSER());
response.returnValue(false);
}
}
}
public void wireAndMap(int fragmentExitId, int fragmentEntryId,
Mapping[] mappings, FragmentCompositionResponse response) {
try {
ElementMappingUtil.wireAndMap(FRAGMENTSEQUENCE.this,
fragmentExitId, fragmentEntryId, mappings);
getBpelRuntimeContext().getBpelProcess()
.serializeCompiledProcess(
_ofSequence.getOwner());
OBase exit = _ofSequence.getOwner().getChild(
fragmentExitId);
if (exit != null && exit instanceof OFragmentExit) {
Long instanceId = getBpelRuntimeContext().getPid();
FCManagementDAO fcManagementDAO = getBpelRuntimeContext()
.getBpelProcess().getEngine()
.getFCManagementDAO();
String channel = fcManagementDAO.getChannel(
instanceId, fragmentExitId);
if (channel != null) {
FragmentCompositionResponseChannel responseChannel = importChannel(
channel,
FragmentCompositionResponseChannel.class);
responseChannel.fragmentCompositionCompleted();
fcManagementDAO.removeChannel(instanceId,
exit.getId());
} else {
throw new FragmentCompositionException(
"FragmentCompositionResponseChannel could not be found!");
}
}
response.returnValue(true);
} catch (FragmentCompositionException e) {
instance(new COMPOSER());
response.throwException(e);
}
}
private void glueOTree(final OFragmentScope scope) {
OProcess oldOwner = scope.activity.getOwner();
scope.activity.getOwner()
.setNewRoot(_ofSequence.getOwner());
_ofSequence.getOwner().adoptChildren(oldOwner);
_remaining.add(scope);
_ofSequence.sequence.add(scope);
scope.setParent(_ofSequence);
scope.processScope = false;
_ofSequence.getOwner().incrementGluedFragmentsCount();
}
public void ignoreFragmentExit(int fragmentExitId,
FragmentCompositionResponse response) {
OBase exit = _ofSequence.getOwner()
.getChild(fragmentExitId);
if (exit != null && exit instanceof OFragmentExit) {
OFragmentExit fragmentExit = (OFragmentExit) exit;
boolean exitWasAlreadyIgnored = fragmentExit.ignoredExit;
fragmentExit.danglingExit = false;
fragmentExit.ignoredExit = true;
getBpelRuntimeContext().getBpelProcess()
.serializeCompiledProcess(_self.o.getOwner());
Long instanceId = getBpelRuntimeContext().getPid();
FCManagementDAO fcManagementDAO = getBpelRuntimeContext()
.getBpelProcess().getEngine()
.getFCManagementDAO();
String channel = fcManagementDAO.getChannel(instanceId,
fragmentExit.getId());
if (channel != null) {
FragmentCompositionResponseChannel respChannel = importChannel(
channel,
FragmentCompositionResponseChannel.class);
respChannel.fragmentCompositionCompleted();
fcManagementDAO.removeChannel(instanceId,
fragmentExit.getId());
if (!exitWasAlreadyIgnored) {
_ofSequence.getOwner()
.incrementGluedFragmentsCount();
}
response.returnValue(true);
} else {
instance(new COMPOSER());
response.throwException(new FragmentCompositionException(
"FragmentCompositionResponseChannel could not be found!"));
}
} else {
response.returnValue(false);
}
}
public void ignoreFragmentEntry(int fragmentEntryId,
FragmentCompositionResponse response) {
OBase entry = _ofSequence.getOwner().getChild(
fragmentEntryId);
if (entry != null && entry instanceof OFragmentEntry) {
OFragmentEntry fragmentEntry = (OFragmentEntry) entry;
fragmentEntry.danglingEntry = false;
fragmentEntry.ignoredEntry = true;
getBpelRuntimeContext().getBpelProcess()
.serializeCompiledProcess(_self.o.getOwner());
Long instanceId = getBpelRuntimeContext().getPid();
FCManagementDAO fcManagementDAO = getBpelRuntimeContext()
.getBpelProcess().getEngine()
.getFCManagementDAO();
String channel = fcManagementDAO.getChannel(instanceId,
fragmentEntry.getId());
if (channel != null) {
FragmentEntryMappedChannel respChannel = importChannel(
channel, FragmentEntryMappedChannel.class);
respChannel.ignoreEntry();
fcManagementDAO.removeChannel(instanceId,
fragmentEntry.getId());
response.returnValue(true);
} else {
instance(new COMPOSER());
response.throwException(new FragmentCompositionException(
"FragmentCompositionResponseChannel could not be found!"));
}
} else {
response.returnValue(false);
}
}
});
}
}
public String toString() {
return "<T:Act:FragmentSequence:" + _ofSequence.name + ">";
}
protected void onActivityComplete() {
FCManagementDAO fcManagementDAO = getBpelRuntimeContext()
.getBpelProcess().getEngine().getFCManagementDAO();
Long instanceId = getBpelRuntimeContext().getPid();
fcManagementDAO.removeChannel(instanceId, _self.o.getId());
}
}
| |
/****************************************************************
* Licensed to the AOS Community (AOS) under one or more *
* contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The AOS licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package io.aos.reflect.clazz;
import java.io.BufferedReader;
import java.io.FileReader;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Member;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Extract information about a class. This class was used to generate the quick
* reference for the NIO book for O'Reilly. It's kind of ugly and there are a
* lot of hacks in here. The most obvious one is the wrapping of <emphasis> tags
* around class and member names in the toString() output. With a little work
* this could be made into a generic class introspector, but it currently has
* some warts on it.
*
*/
public class ClassInfo {
private static final String SRC_DIR = "/usr/local/java/java1.4/src";
private Package packg;
private Class thisClass;
private Class superClass;
private Class declaringClass;
private Class[] interfaces;
private Field[] fields;
private Constructor[] constructors;
private Method[] methods;
private Class[] classes;
private ClassInfo[] internalClasses;
private int mods = Modifier.PUBLIC;
public ClassInfo(String className) throws Exception {
this(Class.forName(className));
}
public ClassInfo(String className, boolean prot) throws Exception {
this(Class.forName(className), (prot) ? Modifier.PUBLIC
| Modifier.PROTECTED : Modifier.PUBLIC);
}
public ClassInfo(Class clas) throws Exception {
this(clas, Modifier.PUBLIC);
}
public ClassInfo(Class clas, int mods) throws Exception {
thisClass = clas;
packg = clas.getPackage();
superClass = clas.getSuperclass();
interfaces = clas.getInterfaces();
declaringClass = clas.getDeclaringClass();
fields = trimFields(clas.getDeclaredFields(), mods);
constructors = trimConstructors(clas.getConstructors(), mods);
methods = trimMethods(clas.getDeclaredMethods(), mods);
classes = trimClasses(clas.getDeclaredClasses(), mods);
if (classes.length == 0) {
internalClasses = new ClassInfo[0];
} else {
List list = new LinkedList();
for (int i = 0; i < classes.length; i++) {
list.add(new ClassInfo(classes[i]));
}
internalClasses = new ClassInfo[list.size()];
list.toArray(internalClasses);
}
}
public static void main(String... argv) throws Exception {
for (int i = 0; i < argv.length; i++) {
ClassInfo ci = new ClassInfo(argv[i]);
System.out.println(new ClassInfo(argv[i]).toString());
}
}
public boolean isInterface() {
return (thisClass.isInterface());
}
public Class getSuperClass() {
if ((superClass == null) || (superClass == Object.class)) {
return (null);
}
return (superClass);
}
// ---------------------------------------------------------------
// Given an array of Class or Member, return a new array containing
// only those elements with the matching modifier bits set.
// This is to filter out private and protected methods for example.
private Field[] trimFields(Field[] item, int mods) {
List list = new LinkedList();
for (int i = 0; i < item.length; i++) {
if ((item[i].getModifiers() & mods) != 0) {
list.add(item[i]);
}
}
Field[] trimmed = new Field[list.size()];
list.toArray(trimmed);
Arrays.sort(trimmed, comper);
return (trimmed);
}
private Constructor[] trimConstructors(Constructor[] item, int mods) {
List list = new LinkedList();
for (int i = 0; i < item.length; i++) {
if ((item[i].getModifiers() & mods) != 0) {
list.add(item[i]);
}
}
Constructor[] trimmed = new Constructor[list.size()];
list.toArray(trimmed);
Arrays.sort(trimmed, comper);
return (trimmed);
}
private Method[] trimMethods(Method[] item, int mods) {
List list = new LinkedList();
for (int i = 0; i < item.length; i++) {
if ((item[i].getModifiers() & mods) != 0) {
list.add(item[i]);
}
}
Method[] trimmed = new Method[list.size()];
list.toArray(trimmed);
Arrays.sort(trimmed, comper);
return (trimmed);
}
private Class[] trimClasses(Class[] item, int mods) {
List list = new LinkedList();
for (int i = 0; i < item.length; i++) {
if ((item[i].getModifiers() & mods) != 0) {
list.add(item[i]);
}
}
Class[] trimmed = new Class[list.size()];
list.toArray(trimmed);
Arrays.sort(trimmed, comper);
return (trimmed);
}
private Comper comper = new Comper();
private class Comper implements Comparator {
public int compare(Object o1, Object o2) {
if (o1 instanceof Class) {
Class c1 = (Class) o1;
Class c2 = (Class) o2;
String n1 = c1.getName();
String n2 = c2.getName();
if (c1.isArray()) {
n1 = c1.getComponentType().getName();
}
if (c2.isArray()) {
n2 = c2.getComponentType().getName();
}
return (n1.compareTo(n2));
}
Member m1 = (Member) o1;
Member m2 = (Member) o2;
String n1 = m1.getName();
String n2 = m2.getName();
if ((!n1.equals(n2)) || (m1 instanceof Field)) {
return (n1.compareTo(n2));
}
if (m1 instanceof Constructor) {
return (compareArgs(((Constructor) m1).getParameterTypes(),
((Constructor) m2).getParameterTypes()));
}
return (compareArgs(((Method) m1).getParameterTypes(),
((Method) m2).getParameterTypes()));
}
private int compareArgs(Class[] p1, Class[] p2) {
int n = p1.length - p2.length;
if (n != 0) {
return (n);
}
for (int i = 0; i < p1.length; i++) {
n = comper.compare(p1[i], p2[i]);
if (n != 0) {
return (n);
}
}
return (0);
}
public boolean equals(Object o1) {
return (false);
}
}
// ---------------------------------------------------------------
public String getSimpleClassName(String name) {
String rep = name.replace('$', '.');
int n = rep.lastIndexOf(".");
if (n == -1) {
return (rep);
}
return (rep.substring(n + 1));
}
public String getSimpleClassName(Class clas) {
return (getSimpleClassName(clas.getName()));
}
public String getClassName(String name) {
String pkgname = packg.getName();
if (name.startsWith("java.lang")) {
return (name.substring("java.lang".length() + 1));
}
if (name.startsWith(pkgname)) {
String simple = name.substring(pkgname.length() + 1);
// is the package name the whole prefix?
if (simple.indexOf(".") == -1) {
return (simple.replace('$', '.'));
}
}
return (name.replace('$', '.'));
}
public String getClassName(Class clas) {
return (getClassName(clas.getName()));
}
public String getClassName() {
return (getClassName(thisClass));
}
public String getModifierNames(int mods) {
if (thisClass.isInterface()) {
mods &= ~Modifier.ABSTRACT;
}
return (Modifier.toString(mods));
}
public String getModifierNames() {
return (getModifierNames(thisClass.getModifiers()));
}
private void indent(StringBuffer sb, String indent, int level) {
for (int i = 0; i < level; i++) {
sb.append(indent);
}
}
// ---------------------------------------------------------------
public String toString() {
return (toString("\t"));
}
public String toString(String indent) {
return (toString(indent, true));
}
public String toString(String indent, boolean listPackage) {
StringBuffer sb = new StringBuffer();
stringify(sb, indent, 0, listPackage);
return (sb.toString());
}
void stringify(StringBuffer sb, String indent, int indentLevel,
boolean listPackage) {
if (listPackage) {
indent(sb, indent, indentLevel);
sb.append("package ").append(packg.getName());
sb.append("\n\n");
}
indent(sb, indent, indentLevel);
sb.append(getModifierNames());
if (thisClass.isInterface()) {
sb.append(" ");
} else {
sb.append(" class ");
}
// HACK! This should be better parameterized
sb.append("<emphasis>");
sb.append(getClassName());
sb.append("</emphasis>");
Class sc = getSuperClass();
if (sc != null) {
sb.append("\n");
indent(sb, indent, indentLevel);
sb.append(indent).append("extends ");
sb.append(getClassName(sc));
}
if (interfaces.length > 0) {
sb.append("\n");
indent(sb, indent, indentLevel);
sb.append(indent);
if (isInterface()) {
sb.append("extends ");
} else {
sb.append("implements ");
}
for (int i = 0; i < interfaces.length; i++) {
if (i != 0) {
sb.append(", ");
}
sb.append(getClassName(interfaces[i]));
}
}
sb.append("\n");
indent(sb, indent, indentLevel);
sb.append("{");
sb.append("\n");
boolean needsep = false;
if (fields.length > 0) {
printMembers(sb, fields, indent, indentLevel + 1);
needsep = true;
}
if (constructors.length > 0) {
if (needsep)
sb.append("\n");
printMembers(sb, constructors, indent, indentLevel + 1);
needsep = true;
}
if (methods.length > 0) {
if (needsep)
sb.append("\n");
printMembers(sb, methods, indent, indentLevel + 1);
needsep = true;
}
if (internalClasses.length > 0) {
if (needsep)
sb.append("\n");
for (int i = 0; i < internalClasses.length; i++) {
if (!Modifier.isPublic(classes[i].getModifiers())) {
continue;
}
if (i != 0)
sb.append("\n");
internalClasses[i].stringify(sb, indent, indentLevel + 1, false);
// nl is suppressed at end of classes
sb.append("\n");
}
}
indent(sb, indent, indentLevel);
sb.append("}");
}
private void printType(StringBuffer sb, Class type) {
if (type.isArray()) {
sb.append(getClassName(type.getComponentType()));
sb.append(" []");
} else {
sb.append(getClassName(type));
}
}
private void printMembers(StringBuffer sb, Member[] members, String indent,
int indentLevel) {
for (int i = 0; i < members.length; i++) {
Member member = members[i];
int mods = member.getModifiers();
if (Modifier.isPrivate(mods)) {
continue;
}
if ((!Modifier.isPublic(mods)) && (!Modifier.isProtected(mods))) {
continue; // package private
}
indent(sb, indent, indentLevel);
sb.append(getModifierNames(mods));
if (member instanceof Field) {
Field field = (Field) member;
sb.append(" ");
printType(sb, field.getType());
}
if (member instanceof Method) {
Method method = (Method) member;
sb.append(" ");
printType(sb, method.getReturnType());
}
sb.append(" ");
sb.append("<emphasis>");
sb.append(getClassName(member.getName()));
sb.append("</emphasis>");
if ((member instanceof Constructor) || (member instanceof Method)) {
printArgs(sb, member);
printExcept(sb, member, indent, indentLevel);
}
if (Modifier.isAbstract(mods)) {
sb.append(";");
}
sb.append("\n");
}
}
private void printArgs(StringBuffer sb, Member member) {
Class[] paramTypes = null;
if (member instanceof Constructor) {
Constructor c = (Constructor) member;
paramTypes = c.getParameterTypes();
}
if (member instanceof Method) {
Method m = (Method) member;
paramTypes = m.getParameterTypes();
}
if ((paramTypes == null) || (paramTypes.length == 0)) {
sb.append("()");
return;
}
sb.append(" (");
String[] names = getArgNames(member, paramTypes);
for (int i = 0; i < paramTypes.length; i++) {
Class p = paramTypes[i];
if (i != 0) {
sb.append(", ");
}
printType(sb, p);
if (names == null) {
sb.append(" XXX");
} else {
sb.append(" ").append(names[i]);
}
}
sb.append(")");
}
private void printExcept(StringBuffer sb, Member member, String indent,
int indentLevel) {
Class[] exs = null;
if (member instanceof Constructor) {
exs = ((Constructor) member).getExceptionTypes();
}
if (member instanceof Method) {
exs = ((Method) member).getExceptionTypes();
}
if (exs.length == 0) {
return;
}
sb.append("\n");
indent(sb, indent, indentLevel + 1);
sb.append("throws ");
for (int i = 0; i < exs.length; i++) {
if (i != 0) {
sb.append(", ");
}
sb.append(getClassName(exs[i]));
}
}
// ---------------------------------------------------------------
private String[] zeroStringArray = new String[0];
private String[] getArgNames(Member member, Class[] params) {
if (params.length == 0) {
return (zeroStringArray);
}
CharSequence cs = null;
try {
// This is inefficient, the file is loaded for
// each constructor/method. The source file should
// be loaded in the constructor and re-used.
cs = loadFile(srcFileName(thisClass.getName()));
} catch (Exception e) {
System.out.println("Can't open file: " + e);
return (null);
}
boolean isMethod = (member instanceof Method);
StringBuffer sb = new StringBuffer();
String s;
sb.append("(?ms)^\\s*");
s = getModifierNames(member.getModifiers());
sb.append(s.replaceAll("\\s+", "\\\\s+"));
sb.append("\\s+");
if (isMethod) {
Method method = (Method) member;
appendRegexType(sb, method.getReturnType());
sb.append("\\s+");
}
sb.append(getSimpleClassName(member.getName()));
sb.append("\\s*\\(");
for (int i = 0; i < params.length; i++) {
if (i != 0) {
sb.append(",\\s*");
}
sb.append("\\s*\\w*?\\s*");
Class p = params[i];
appendRegexType(sb, p);
sb.append("\\s+");
sb.append("(\\w+)\\s*");
}
sb.append("\\)");
String regex = sb.toString();
Pattern pat = Pattern.compile(regex);
Matcher matcher = pat.matcher(cs);
if (!matcher.find()) {
System.out.println(getSimpleClassName(thisClass) + ": no match='" + regex
+ "'");
return (null);
}
int count = matcher.groupCount();
String[] names = new String[count];
for (int i = 0; i < count; i++) {
names[i] = matcher.group(i + 1);
}
return (names);
}
private void appendRegexType(StringBuffer sb, Class t) {
if (t.isArray()) {
sb.append(getSimpleClassName(t.getComponentType()));
sb.append("\\s*\\[\\]\\s*");
} else {
sb.append(getSimpleClassName(t));
}
}
private String srcFileName(String className) {
return (SRC_DIR + "/" + className.replaceAll("\\.", "/") + ".java");
}
private CharSequence loadFile(String name) throws Exception {
BufferedReader in = new BufferedReader(new FileReader(name));
StringBuffer sb = new StringBuffer();
String s;
while ((s = in.readLine()) != null) {
sb.append(s);
sb.append("\n");
}
in.close();
return sb;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.execution.wsl;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.configurations.GeneralCommandLine;
import com.intellij.execution.process.ProcessOutput;
import com.intellij.execution.util.ExecUtil;
import com.intellij.openapi.application.Experiments;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.impl.ProjectJdkImpl;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.NullableLazyValue;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.impl.wsl.WslConstants;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.LinkOption;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.regex.Pattern;
import static com.intellij.openapi.util.NullableLazyValue.lazyNullable;
/**
* Class for working with WSL after Fall Creators Update
* https://blogs.msdn.microsoft.com/commandline/2017/10/11/whats-new-in-wsl-in-windows-10-fall-creators-update/
* - multiple linuxes
* - file system is unavailable form windows (for now at least)
*/
public final class WSLUtil {
public static final Logger LOG = Logger.getInstance("#com.intellij.execution.wsl");
private final static String WSL_PATH_TO_REMOVE = "wsl://";
/**
* @deprecated use {@link WslDistributionManager#getInstalledDistributions()} instead.
* Method will be removed after we check statistics and make sure versions before 1903 aren't used.
*/
@Deprecated(forRemoval = true)
@NotNull
public static List<WSLDistribution> getAvailableDistributions() {
if (!isSystemCompatible()) return Collections.emptyList();
final Path executableRoot = getExecutableRootPath();
if (executableRoot == null) return Collections.emptyList();
Collection<WslDistributionDescriptor> descriptors = WSLDistributionService.getInstance().getDescriptors();
final List<WSLDistribution> result = new ArrayList<>(descriptors.size() + 1 /* LEGACY_WSL */);
for (WslDistributionDescriptor descriptor : descriptors) {
String executablePathStr = descriptor.getExecutablePath();
if (executablePathStr != null) {
Path executablePath = Paths.get(executablePathStr);
if (!executablePath.isAbsolute()) {
executablePath = executableRoot.resolve(executablePath);
}
if (Files.exists(executablePath, LinkOption.NOFOLLOW_LINKS)) {
result.add(new WSLDistribution(descriptor, executablePath));
}
}
}
// add legacy WSL if it's available and enabled
if (Experiments.getInstance().isFeatureEnabled("wsl.legacy.distribution")) {
ContainerUtil.addIfNotNull(result, WSLDistributionLegacy.getInstance());
}
return result;
}
/**
* @return root for WSL executable or null if unavailable
*/
@Nullable
private static Path getExecutableRootPath() {
String localAppDataPath = System.getenv().get("LOCALAPPDATA");
return StringUtil.isEmpty(localAppDataPath) ? null : Paths.get(localAppDataPath, "Microsoft\\WindowsApps");
}
/**
* @return instance of WSL distribution or null if it's unavailable
* @deprecated Use {@link WslDistributionManager#getOrCreateDistributionByMsId(String)}
*/
@Nullable
@Deprecated
public static WSLDistribution getDistributionByMsId(@Nullable String name) {
if (name == null) {
return null;
}
for (WSLDistribution distribution : getAvailableDistributions()) {
if (name.equals(distribution.getMsId())) {
return distribution;
}
}
return null;
}
public static boolean isSystemCompatible() {
return SystemInfo.isWin10OrNewer;
}
/**
* @param wslPath a path in WSL file system, e.g. "/mnt/c/Users/file.txt" or "/c/Users/file.txt"
* @param mntRoot a directory where fixed drives will be mounted. Default is "/mnt/" - {@link WSLDistribution#DEFAULT_WSL_MNT_ROOT}).
* See https://docs.microsoft.com/ru-ru/windows/wsl/wsl-config#configuration-options
* @return Windows-dependent path to the file, pointed by {@code wslPath} in WSL or null if the path is unmappable.
* For example, {@code getWindowsPath("/mnt/c/Users/file.txt", "/mnt/") returns "C:\Users\file.txt"}
*/
@Nullable
public static String getWindowsPath(@NotNull String wslPath, @NotNull String mntRoot) {
if (!wslPath.startsWith(mntRoot)) {
return null;
}
int driveLetterIndex = mntRoot.length();
if (driveLetterIndex >= wslPath.length() || !Character.isLetter(wslPath.charAt(driveLetterIndex))) {
return null;
}
int slashIndex = driveLetterIndex + 1;
if (slashIndex < wslPath.length() && wslPath.charAt(slashIndex) != '/') {
return null;
}
return FileUtil.toSystemDependentName(Character.toUpperCase(wslPath.charAt(driveLetterIndex)) + ":" + wslPath.substring(slashIndex));
}
/**
* @param distribution
* @return version if it can be determined or -1 instead
*/
static int getWslVersion(@NotNull WSLDistribution distribution) {
int version = getVersionFromWslCli(distribution);
if (version < 0) {
version = getVersionByUname(distribution);
}
return version;
}
private static int getVersionFromWslCli(@NotNull WSLDistribution distribution) {
try {
final List<WslDistributionAndVersion> versions = WslDistributionManager.getInstance().loadInstalledDistributionsWithVersions();
final WslDistributionAndVersion distributionAndVersion =
ContainerUtil.find(versions, version -> version.getDistributionName().equals(distribution.getMsId()));
if (distributionAndVersion != null) {
return distributionAndVersion.getVersion();
}
LOG.warn("WSL distribution '" + distribution.getMsId() + "' not found");
}
catch (IOException | IllegalStateException e) {
LOG.warn("Failed to calculate version for " + distribution.getMsId() + ": " + e.getMessage());
}
return -1;
}
// To be removed when old WSL installations (without wsl.exe) are gone.
private static int getVersionByUname(@NotNull WSLDistribution distribution) {
try {
ProcessOutput output = distribution.executeOnWsl(WSLDistribution.DEFAULT_TIMEOUT, "uname", "-v");
if (output.checkSuccess(LOG)) {
return output.getStdout().contains("Microsoft") ? 1 : 2;
}
}
catch (ExecutionException e) {
LOG.warn(e);
}
return -1;
}
public static @NotNull @NlsSafe String getMsId(@NotNull @NlsSafe String msOrInternalId) {
WslDistributionDescriptor descriptor = ContainerUtil.find(WSLDistributionService.getInstance().getDescriptors(),
d -> d.getId().equals(msOrInternalId));
return descriptor != null ? descriptor.getMsId() : msOrInternalId;
}
static class WSLToolFlags {
public final boolean isQuietFlagAvailable;
public final boolean isVerboseFlagAvailable;
WSLToolFlags(boolean isQuietAvailable, boolean isVerboseAvailable) {
isQuietFlagAvailable = isQuietAvailable;
isVerboseFlagAvailable = isVerboseAvailable;
}
}
private static final NullableLazyValue<WSLToolFlags> WSL_TOOL_FLAGS = lazyNullable(() -> getWSLToolFlagsInternal());
public static @Nullable WSLToolFlags getWSLToolFlags() {
return WSL_TOOL_FLAGS.getValue();
}
private static final Pattern QUIET = Pattern.compile("\\s--quiet,?\\b");
private static final Pattern VERBOSE = Pattern.compile("\\s--verbose,?\\b");
private static @Nullable WSLToolFlags getWSLToolFlagsInternal() {
final Path wslExe = WSLDistribution.findWslExe();
if (wslExe == null) return null;
final GeneralCommandLine commandLine = new GeneralCommandLine(wslExe.toString(), "--help").
withCharset(StandardCharsets.UTF_16LE);
try {
final ProcessOutput output = ExecUtil.execAndGetOutput(commandLine, 5000);
if (output.isTimeout()) return null;
// intentionally do no check "wsl --help" exit code because it returns -1
final String stdout = output.getStdout();
return new WSLToolFlags(QUIET.matcher(stdout).find(),
VERBOSE.matcher(stdout).find());
}
catch (Exception e) {
LOG.warn(e);
return null;
}
}
/**
* Change old (wsl://) prefix to the new one (\\wsl$\)
*
* @deprecated remove after everyone migrates to the new prefix
*/
@Deprecated(forRemoval = true)
public static void fixWslPrefix(@NotNull Sdk sdk) {
if (sdk instanceof ProjectJdkImpl) {
var path = sdk.getHomePath();
if (path != null && path.startsWith(WSL_PATH_TO_REMOVE)) {
((ProjectJdkImpl)sdk).setHomePath(WslConstants.UNC_PREFIX + path.substring(WSL_PATH_TO_REMOVE.length()));
}
}
}
}
| |
/*
* Copyright (c) 2007-present, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.threeten.extra.chrono;
import java.io.Serializable;
import java.time.Clock;
import java.time.DateTimeException;
import java.time.Instant;
import java.time.ZoneId;
import java.time.chrono.ChronoLocalDateTime;
import java.time.chrono.ChronoZonedDateTime;
import java.time.chrono.Chronology;
import java.time.chrono.Era;
import java.time.format.ResolverStyle;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalField;
import java.util.Arrays;
import java.util.List;
import java.util.Locale;
import java.util.Map;
/**
* The Coptic calendar system.
* <p>
* This chronology defines the rules of the Coptic calendar system.
* This calendar system is primarily used in Christian Egypt.
* Dates are aligned such that {@code 0001-01-01 (Coptic)} is {@code 0284-08-29 (ISO)}.
* <p>
* The fields are defined as follows:
* <ul>
* <li>era - There are two eras, the current 'Era of the Martyrs' (AM) and the previous era (BEFORE_AM).
* <li>year-of-era - The year-of-era for the current era increases uniformly from the epoch at year one.
* For the previous era the year increases from one as time goes backwards.
* <li>proleptic-year - The proleptic year is the same as the year-of-era for the
* current era. For the previous era, years have zero, then negative values.
* <li>month-of-year - There are 13 months in a Coptic year, numbered from 1 to 13.
* <li>day-of-month - There are 30 days in each of the first 12 Coptic months, numbered 1 to 30.
* The 13th month has 5 days, or 6 in a leap year, numbered 1 to 5 or 1 to 6.
* <li>day-of-year - There are 365 days in a standard Coptic year and 366 in a leap year.
* The days are numbered from 1 to 365 or 1 to 366.
* <li>leap-year - Leap years occur every 4 years.
* </ul>
*
* <h3>Implementation Requirements</h3>
* This class is immutable and thread-safe.
*/
public final class CopticChronology
extends AbstractNileChronology
implements Serializable {
/**
* Singleton instance for the Coptic chronology.
*/
public static final CopticChronology INSTANCE = new CopticChronology();
/**
* Serialization version.
*/
private static final long serialVersionUID = 7291205177830286973L;
/**
* Private constructor, that is public to satisfy the {@code ServiceLoader}.
* @deprecated Use the singleton {@link #INSTANCE} instead.
*/
@Deprecated
public CopticChronology() {
}
/**
* Resolve singleton.
*
* @return the singleton instance, not null
*/
private Object readResolve() {
return INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the ID of the chronology - 'Coptic'.
* <p>
* The ID uniquely identifies the {@code Chronology}.
* It can be used to lookup the {@code Chronology} using {@link Chronology#of(String)}.
*
* @return the chronology ID - 'Coptic'
* @see #getCalendarType()
*/
@Override
public String getId() {
return "Coptic";
}
/**
* Gets the calendar type of the underlying calendar system - 'coptic'.
* <p>
* The calendar type is an identifier defined by the
* <em>Unicode Locale Data Markup Language (LDML)</em> specification.
* It can be used to lookup the {@code Chronology} using {@link Chronology#of(String)}.
* It can also be used as part of a locale, accessible via
* {@link Locale#getUnicodeLocaleType(String)} with the key 'ca'.
*
* @return the calendar system type - 'coptic'
* @see #getId()
*/
@Override
public String getCalendarType() {
return "coptic";
}
//-----------------------------------------------------------------------
/**
* Obtains a local date in Coptic calendar system from the
* era, year-of-era, month-of-year and day-of-month fields.
*
* @param era the Coptic era, not null
* @param yearOfEra the year-of-era
* @param month the month-of-year
* @param dayOfMonth the day-of-month
* @return the Coptic local date, not null
* @throws DateTimeException if unable to create the date
* @throws ClassCastException if the {@code era} is not a {@code CopticEra}
*/
@Override
public CopticDate date(Era era, int yearOfEra, int month, int dayOfMonth) {
return date(prolepticYear(era, yearOfEra), month, dayOfMonth);
}
/**
* Obtains a local date in Coptic calendar system from the
* proleptic-year, month-of-year and day-of-month fields.
*
* @param prolepticYear the proleptic-year
* @param month the month-of-year
* @param dayOfMonth the day-of-month
* @return the Coptic local date, not null
* @throws DateTimeException if unable to create the date
*/
@Override
public CopticDate date(int prolepticYear, int month, int dayOfMonth) {
return CopticDate.of(prolepticYear, month, dayOfMonth);
}
/**
* Obtains a local date in Coptic calendar system from the
* era, year-of-era and day-of-year fields.
*
* @param era the Coptic era, not null
* @param yearOfEra the year-of-era
* @param dayOfYear the day-of-year
* @return the Coptic local date, not null
* @throws DateTimeException if unable to create the date
* @throws ClassCastException if the {@code era} is not a {@code CopticEra}
*/
@Override
public CopticDate dateYearDay(Era era, int yearOfEra, int dayOfYear) {
return dateYearDay(prolepticYear(era, yearOfEra), dayOfYear);
}
/**
* Obtains a local date in Coptic calendar system from the
* proleptic-year and day-of-year fields.
*
* @param prolepticYear the proleptic-year
* @param dayOfYear the day-of-year
* @return the Coptic local date, not null
* @throws DateTimeException if unable to create the date
*/
@Override
public CopticDate dateYearDay(int prolepticYear, int dayOfYear) {
return CopticDate.ofYearDay(prolepticYear, dayOfYear);
}
/**
* Obtains a local date in the Coptic calendar system from the epoch-day.
*
* @param epochDay the epoch day
* @return the Coptic local date, not null
* @throws DateTimeException if unable to create the date
*/
@Override // override with covariant return type
public CopticDate dateEpochDay(long epochDay) {
return CopticDate.ofEpochDay(epochDay);
}
//-------------------------------------------------------------------------
/**
* Obtains the current Coptic local date from the system clock in the default time-zone.
* <p>
* This will query the {@link Clock#systemDefaultZone() system clock} in the default
* time-zone to obtain the current date.
* <p>
* Using this method will prevent the ability to use an alternate clock for testing
* because the clock is hard-coded.
*
* @return the current Coptic local date using the system clock and default time-zone, not null
* @throws DateTimeException if unable to create the date
*/
@Override // override with covariant return type
public CopticDate dateNow() {
return CopticDate.now();
}
/**
* Obtains the current Coptic local date from the system clock in the specified time-zone.
* <p>
* This will query the {@link Clock#system(ZoneId) system clock} to obtain the current date.
* Specifying the time-zone avoids dependence on the default time-zone.
* <p>
* Using this method will prevent the ability to use an alternate clock for testing
* because the clock is hard-coded.
*
* @param zone the zone ID to use, not null
* @return the current Coptic local date using the system clock, not null
* @throws DateTimeException if unable to create the date
*/
@Override // override with covariant return type
public CopticDate dateNow(ZoneId zone) {
return CopticDate.now(zone);
}
/**
* Obtains the current Coptic local date from the specified clock.
* <p>
* This will query the specified clock to obtain the current date - today.
* Using this method allows the use of an alternate clock for testing.
* The alternate clock may be introduced using {@link Clock dependency injection}.
*
* @param clock the clock to use, not null
* @return the current Coptic local date, not null
* @throws DateTimeException if unable to create the date
*/
@Override // override with covariant return type
public CopticDate dateNow(Clock clock) {
return CopticDate.now(clock);
}
//-------------------------------------------------------------------------
/**
* Obtains a Coptic local date from another date-time object.
*
* @param temporal the date-time object to convert, not null
* @return the Coptic local date, not null
* @throws DateTimeException if unable to create the date
*/
@Override
public CopticDate date(TemporalAccessor temporal) {
return CopticDate.from(temporal);
}
/**
* Obtains a Coptic local date-time from another date-time object.
*
* @param temporal the date-time object to convert, not null
* @return the Coptic local date-time, not null
* @throws DateTimeException if unable to create the date-time
*/
@Override
@SuppressWarnings("unchecked")
public ChronoLocalDateTime<CopticDate> localDateTime(TemporalAccessor temporal) {
return (ChronoLocalDateTime<CopticDate>) super.localDateTime(temporal);
}
/**
* Obtains a Coptic zoned date-time from another date-time object.
*
* @param temporal the date-time object to convert, not null
* @return the Coptic zoned date-time, not null
* @throws DateTimeException if unable to create the date-time
*/
@Override
@SuppressWarnings("unchecked")
public ChronoZonedDateTime<CopticDate> zonedDateTime(TemporalAccessor temporal) {
return (ChronoZonedDateTime<CopticDate>) super.zonedDateTime(temporal);
}
/**
* Obtains a Coptic zoned date-time in this chronology from an {@code Instant}.
*
* @param instant the instant to create the date-time from, not null
* @param zone the time-zone, not null
* @return the Coptic zoned date-time, not null
* @throws DateTimeException if the result exceeds the supported range
*/
@Override
@SuppressWarnings("unchecked")
public ChronoZonedDateTime<CopticDate> zonedDateTime(Instant instant, ZoneId zone) {
return (ChronoZonedDateTime<CopticDate>) super.zonedDateTime(instant, zone);
}
//-----------------------------------------------------------------------
@Override
public int prolepticYear(Era era, int yearOfEra) {
if (era instanceof CopticEra == false) {
throw new ClassCastException("Era must be CopticEra");
}
return (era == CopticEra.AM ? yearOfEra : 1 - yearOfEra);
}
@Override
public CopticEra eraOf(int eraValue) {
return CopticEra.of(eraValue);
}
@Override
public List<Era> eras() {
return Arrays.<Era>asList(CopticEra.values());
}
//-----------------------------------------------------------------------
@Override // override for return type
public CopticDate resolveDate(Map<TemporalField, Long> fieldValues, ResolverStyle resolverStyle) {
return (CopticDate) super.resolveDate(fieldValues, resolverStyle);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.plan.volcano;
import org.apache.calcite.adapter.enumerable.EnumerableTableScan;
import org.apache.calcite.adapter.java.JavaTypeFactory;
import org.apache.calcite.jdbc.CalcitePrepare;
import org.apache.calcite.plan.Convention;
import org.apache.calcite.plan.ConventionTraitDef;
import org.apache.calcite.plan.RelOptAbstractTable;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptCost;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptRule;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelOptRuleOperand;
import org.apache.calcite.plan.RelOptSchema;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.plan.RelTrait;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.plan.volcano.AbstractConverter.ExpandConversionRule;
import org.apache.calcite.prepare.CalciteCatalogReader;
import org.apache.calcite.prepare.CalcitePrepareImpl;
import org.apache.calcite.rel.AbstractRelNode;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelCollationTraitDef;
import org.apache.calcite.rel.RelCollations;
import org.apache.calcite.rel.RelFieldCollation;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.ConverterRule;
import org.apache.calcite.rel.core.Aggregate;
import org.apache.calcite.rel.core.AggregateCall;
import org.apache.calcite.rel.core.Project;
import org.apache.calcite.rel.core.Sort;
import org.apache.calcite.rel.logical.LogicalAggregate;
import org.apache.calcite.rel.logical.LogicalProject;
import org.apache.calcite.rel.metadata.RelMdCollation;
import org.apache.calcite.rel.metadata.RelMetadataQuery;
import org.apache.calcite.rel.rules.SortRemoveRule;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.schema.Statistic;
import org.apache.calcite.schema.Statistics;
import org.apache.calcite.schema.Table;
import org.apache.calcite.schema.impl.AbstractTable;
import org.apache.calcite.server.CalciteServerStatement;
import org.apache.calcite.sql.SqlExplainFormat;
import org.apache.calcite.sql.SqlExplainLevel;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.tools.FrameworkConfig;
import org.apache.calcite.tools.Frameworks;
import org.apache.calcite.tools.RuleSet;
import org.apache.calcite.tools.RuleSets;
import org.apache.calcite.util.ImmutableBitSet;
import com.google.common.collect.ImmutableList;
import org.junit.Test;
import java.sql.Connection;
import java.sql.DriverManager;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
/**
* Tests that determine whether trait propagation work in Volcano Planner.
*/
public class TraitPropagationTest {
static final Convention PHYSICAL =
new Convention.Impl("PHYSICAL", Phys.class);
static final RelCollation COLLATION =
RelCollations.of(
new RelFieldCollation(0,
RelFieldCollation.Direction.ASCENDING,
RelFieldCollation.NullDirection.FIRST));
static final RuleSet RULES =
RuleSets.ofList(PhysAggRule.INSTANCE,
PhysProjRule.INSTANCE,
PhysTableRule.INSTANCE,
PhysSortRule.INSTANCE,
SortRemoveRule.INSTANCE,
ExpandConversionRule.INSTANCE);
@Test public void testOne() throws Exception {
RelNode planned = run(new PropAction(), RULES);
if (CalcitePrepareImpl.DEBUG) {
System.out.println(
RelOptUtil.dumpPlan("LOGICAL PLAN", planned, SqlExplainFormat.TEXT,
SqlExplainLevel.ALL_ATTRIBUTES));
}
final RelMetadataQuery mq = RelMetadataQuery.instance();
assertEquals("Sortedness was not propagated", 3,
mq.getCumulativeCost(planned).getRows(), 0);
}
/**
* Materialized anonymous class for simplicity
*/
private static class PropAction {
public RelNode apply(RelOptCluster cluster, RelOptSchema relOptSchema,
SchemaPlus rootSchema) {
final RelDataTypeFactory typeFactory = cluster.getTypeFactory();
final RexBuilder rexBuilder = cluster.getRexBuilder();
final RelOptPlanner planner = cluster.getPlanner();
final RelDataType stringType = typeFactory.createJavaType(String.class);
final RelDataType integerType = typeFactory.createJavaType(Integer.class);
final RelDataType sqlBigInt =
typeFactory.createSqlType(SqlTypeName.BIGINT);
// SELECT * from T;
final Table table = new AbstractTable() {
public RelDataType getRowType(RelDataTypeFactory typeFactory) {
return typeFactory.builder()
.add("s", stringType)
.add("i", integerType).build();
}
@Override public Statistic getStatistic() {
return Statistics.of(100d, ImmutableList.of(),
ImmutableList.of(COLLATION));
}
};
final RelOptAbstractTable t1 = new RelOptAbstractTable(relOptSchema,
"t1", table.getRowType(typeFactory)) {
@Override public <T> T unwrap(Class<T> clazz) {
return clazz.isInstance(table)
? clazz.cast(table)
: super.unwrap(clazz);
}
};
final RelNode rt1 = EnumerableTableScan.create(cluster, t1);
// project s column
RelNode project = LogicalProject.create(rt1,
ImmutableList.of(
(RexNode) rexBuilder.makeInputRef(stringType, 0),
rexBuilder.makeInputRef(integerType, 1)),
typeFactory.builder().add("s", stringType).add("i", integerType)
.build());
// aggregate on s, count
AggregateCall aggCall = AggregateCall.create(SqlStdOperatorTable.COUNT,
false, false, Collections.singletonList(1), -1, sqlBigInt, "cnt");
RelNode agg = new LogicalAggregate(cluster,
cluster.traitSetOf(Convention.NONE), project, false,
ImmutableBitSet.of(0), null, Collections.singletonList(aggCall));
final RelNode rootRel = agg;
RelOptUtil.dumpPlan("LOGICAL PLAN", rootRel, SqlExplainFormat.TEXT,
SqlExplainLevel.DIGEST_ATTRIBUTES);
RelTraitSet desiredTraits = rootRel.getTraitSet().replace(PHYSICAL);
final RelNode rootRel2 = planner.changeTraits(rootRel, desiredTraits);
planner.setRoot(rootRel2);
return planner.findBestExp();
}
}
// RULES
/** Rule for PhysAgg */
private static class PhysAggRule extends RelOptRule {
static final PhysAggRule INSTANCE = new PhysAggRule();
private PhysAggRule() {
super(anyChild(LogicalAggregate.class), "PhysAgg");
}
public void onMatch(RelOptRuleCall call) {
RelTraitSet empty = call.getPlanner().emptyTraitSet();
LogicalAggregate rel = call.rel(0);
assert rel.getGroupSet().cardinality() == 1;
int aggIndex = rel.getGroupSet().iterator().next();
RelTrait collation = RelCollations.of(
new RelFieldCollation(aggIndex,
RelFieldCollation.Direction.ASCENDING,
RelFieldCollation.NullDirection.FIRST));
RelTraitSet desiredTraits = empty.replace(PHYSICAL).replace(collation);
RelNode convertedInput = convert(rel.getInput(), desiredTraits);
call.transformTo(
new PhysAgg(rel.getCluster(), empty.replace(PHYSICAL),
convertedInput, rel.indicator, rel.getGroupSet(),
rel.getGroupSets(), rel.getAggCallList()));
}
}
/** Rule for PhysProj */
private static class PhysProjRule extends RelOptRule {
static final PhysProjRule INSTANCE = new PhysProjRule(false);
final boolean subsetHack;
private PhysProjRule(boolean subsetHack) {
super(
RelOptRule.operand(LogicalProject.class,
anyChild(RelNode.class)),
"PhysProj");
this.subsetHack = subsetHack;
}
public void onMatch(RelOptRuleCall call) {
LogicalProject rel = call.rel(0);
RelNode rawInput = call.rel(1);
RelNode input = convert(rawInput, PHYSICAL);
if (subsetHack && input instanceof RelSubset) {
RelSubset subset = (RelSubset) input;
for (RelNode child : subset.getRels()) {
// skip logical nodes
if (child.getTraitSet().getTrait(ConventionTraitDef.INSTANCE)
== Convention.NONE) {
continue;
} else {
RelTraitSet outcome = child.getTraitSet().replace(PHYSICAL);
call.transformTo(
new PhysProj(rel.getCluster(), outcome, convert(child, outcome),
rel.getChildExps(), rel.getRowType()));
}
}
} else {
call.transformTo(
PhysProj.create(input, rel.getChildExps(), rel.getRowType()));
}
}
}
/** Rule for PhysSort */
private static class PhysSortRule extends ConverterRule {
static final PhysSortRule INSTANCE = new PhysSortRule();
PhysSortRule() {
super(Sort.class, Convention.NONE, PHYSICAL, "PhysSortRule");
}
public RelNode convert(RelNode rel) {
final Sort sort = (Sort) rel;
final RelNode input = convert(sort.getInput(),
rel.getCluster().traitSetOf(PHYSICAL));
return new PhysSort(
rel.getCluster(),
input.getTraitSet().plus(sort.getCollation()),
convert(input, input.getTraitSet().replace(PHYSICAL)),
sort.getCollation(),
null,
null);
}
}
/** Rule for PhysTable */
private static class PhysTableRule extends RelOptRule {
static final PhysTableRule INSTANCE = new PhysTableRule();
private PhysTableRule() {
super(anyChild(EnumerableTableScan.class), "PhysScan");
}
public void onMatch(RelOptRuleCall call) {
EnumerableTableScan rel = call.rel(0);
call.transformTo(new PhysTable(rel.getCluster()));
}
}
/* RELS */
/** Market interface for Phys nodes */
private interface Phys extends RelNode { }
/** Physical Aggregate RelNode */
private static class PhysAgg extends Aggregate implements Phys {
PhysAgg(RelOptCluster cluster, RelTraitSet traits, RelNode child,
boolean indicator, ImmutableBitSet groupSet,
List<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) {
super(cluster, traits, child, indicator, groupSet, groupSets, aggCalls);
}
public Aggregate copy(RelTraitSet traitSet, RelNode input,
boolean indicator, ImmutableBitSet groupSet,
List<ImmutableBitSet> groupSets, List<AggregateCall> aggCalls) {
return new PhysAgg(getCluster(), traitSet, input, indicator, groupSet,
groupSets, aggCalls);
}
public RelOptCost computeSelfCost(RelOptPlanner planner,
RelMetadataQuery mq) {
return planner.getCostFactory().makeCost(1, 1, 1);
}
}
/** Physical Project RelNode */
private static class PhysProj extends Project implements Phys {
PhysProj(RelOptCluster cluster, RelTraitSet traits, RelNode child,
List<RexNode> exps, RelDataType rowType) {
super(cluster, traits, child, exps, rowType);
}
public static PhysProj create(final RelNode input,
final List<RexNode> projects, RelDataType rowType) {
final RelOptCluster cluster = input.getCluster();
final RelMetadataQuery mq = RelMetadataQuery.instance();
final RelTraitSet traitSet =
cluster.traitSet().replace(PHYSICAL)
.replaceIfs(
RelCollationTraitDef.INSTANCE,
() -> RelMdCollation.project(mq, input, projects));
return new PhysProj(cluster, traitSet, input, projects, rowType);
}
public PhysProj copy(RelTraitSet traitSet, RelNode input,
List<RexNode> exps, RelDataType rowType) {
return new PhysProj(getCluster(), traitSet, input, exps, rowType);
}
public RelOptCost computeSelfCost(RelOptPlanner planner,
RelMetadataQuery mq) {
return planner.getCostFactory().makeCost(1, 1, 1);
}
}
/** Physical Sort RelNode */
private static class PhysSort extends Sort implements Phys {
PhysSort(RelOptCluster cluster, RelTraitSet traits, RelNode child,
RelCollation collation, RexNode offset,
RexNode fetch) {
super(cluster, traits, child, collation, offset, fetch);
}
public PhysSort copy(RelTraitSet traitSet, RelNode newInput,
RelCollation newCollation, RexNode offset,
RexNode fetch) {
return new PhysSort(getCluster(), traitSet, newInput, newCollation,
offset, fetch);
}
public RelOptCost computeSelfCost(RelOptPlanner planner,
RelMetadataQuery mq) {
return planner.getCostFactory().makeCost(1, 1, 1);
}
}
/** Physical Table RelNode */
private static class PhysTable extends AbstractRelNode implements Phys {
PhysTable(RelOptCluster cluster) {
super(cluster, cluster.traitSet().replace(PHYSICAL).replace(COLLATION));
RelDataTypeFactory typeFactory = cluster.getTypeFactory();
final RelDataType stringType = typeFactory.createJavaType(String.class);
final RelDataType integerType = typeFactory.createJavaType(Integer.class);
this.rowType = typeFactory.builder().add("s", stringType)
.add("i", integerType).build();
}
public RelOptCost computeSelfCost(RelOptPlanner planner,
RelMetadataQuery mq) {
return planner.getCostFactory().makeCost(1, 1, 1);
}
}
/* UTILS */
public static RelOptRuleOperand anyChild(Class<? extends RelNode> first) {
return RelOptRule.operand(first, RelOptRule.any());
}
// Created so that we can control when the TraitDefs are defined (e.g.
// before the cluster is created).
private static RelNode run(PropAction action, RuleSet rules)
throws Exception {
FrameworkConfig config = Frameworks.newConfigBuilder()
.ruleSets(rules).build();
final Properties info = new Properties();
final Connection connection = DriverManager
.getConnection("jdbc:calcite:", info);
final CalciteServerStatement statement = connection
.createStatement().unwrap(CalciteServerStatement.class);
final CalcitePrepare.Context prepareContext =
statement.createPrepareContext();
final JavaTypeFactory typeFactory = prepareContext.getTypeFactory();
CalciteCatalogReader catalogReader =
new CalciteCatalogReader(prepareContext.getRootSchema(),
prepareContext.getDefaultSchemaPath(),
typeFactory,
prepareContext.config());
final RexBuilder rexBuilder = new RexBuilder(typeFactory);
final RelOptPlanner planner = new VolcanoPlanner(config.getCostFactory(),
config.getContext());
// set up rules before we generate cluster
planner.clearRelTraitDefs();
planner.addRelTraitDef(RelCollationTraitDef.INSTANCE);
planner.addRelTraitDef(ConventionTraitDef.INSTANCE);
planner.clear();
for (RelOptRule r : rules) {
planner.addRule(r);
}
final RelOptCluster cluster = RelOptCluster.create(planner, rexBuilder);
return action.apply(cluster, catalogReader,
prepareContext.getRootSchema().plus());
}
}
// End TraitPropagationTest.java
| |
/**
* Copyright 2004-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.hr.time.overtime.daily;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.joda.time.DateTime;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import org.junit.Assert;
import org.junit.Test;
import org.kuali.hr.time.util.TimeDetailTestUtils;
import org.kuali.hr.time.workflow.TimesheetWebTestBase;
import org.kuali.hr.util.HtmlUnitUtil;
import org.kuali.kpme.core.FunctionalTest;
import org.kuali.kpme.core.api.assignment.Assignment;
import org.kuali.kpme.core.api.calendar.entry.CalendarEntry;
import org.kuali.kpme.core.api.earncode.EarnCode;
import org.kuali.kpme.core.service.HrServiceLocator;
import org.kuali.kpme.core.util.HrContext;
import org.kuali.kpme.core.util.HrTestConstants;
import org.kuali.kpme.core.util.TKUtils;
import org.kuali.kpme.tklm.time.detail.web.TimeDetailActionFormBase;
import org.kuali.kpme.tklm.time.rules.overtime.daily.DailyOvertimeRule;
import org.kuali.kpme.tklm.time.service.TkServiceLocator;
import org.kuali.kpme.tklm.time.timesheet.TimesheetDocument;
import org.kuali.kpme.tklm.utils.TkTestConstants;
import com.gargoylesoftware.htmlunit.html.HtmlForm;
import com.gargoylesoftware.htmlunit.html.HtmlPage;
/**
* https://wiki.kuali.org/display/KPME/Test+Cases+-+Business+Logic+Daily+Overtime
*
* Implementations of tests from this page.
*/
@FunctionalTest
public class DOTIntegrationConfluenceTests extends TimesheetWebTestBase {
@Test
/**
* Daily OT - Shift hours met (in a single day) but Max Gap minutes exceeded (in a single gap)
* Daily OT - Shift hours met (in a single day) but Max Gap matches exactly
*
* https://jira.kuali.org/browse/KPME-788
* https://jira.kuali.org/browse/KPME-789
*
*/
public void testKPME788_789() throws Exception {
Long jobNumber = 30L;
Long workArea = 30L;
Long task = 30L;
DailyOvertimeRule rule = createDailyOvertimeRule("IU-IN", "REG", "OVT",
//"IN", // 05/08 remove location because adding groupkeycode to DailyOvertimeRule // changed from "SD1" to "IN" for changes of adding groupKeyCode to Job
"BW", "TEST-DEPT", workArea, task, new BigDecimal(8), new BigDecimal("9.00"), null);
String tdocId = KPME788_789(
new ArrayList<Map<String, Object>>() {{
add(new HashMap<String, Object>() {{
put("earnCode", "RGN");
put("hours", "5.0");
}});
}},
new HashMap<String, Object>() {{
put("earnCode", "RGN");
put("startNoTz", "2011-03-02T08:00:00");
put("endNoTz", "2011-03-02T13:00:00");
put("title", "SDR1 Work Area");
put("assignment", "IU-BL_30_30_30");
}},
new ArrayList<Map<String, Object>>() {{
add(new HashMap<String, Object>() {{
put("earnCode", "RGN");
put("hours", "5.0");
}});
}},
new HashMap<String, Object>() {{
put("earnCode", "RGN");
put("startNoTz", "2011-03-02T13:10:00");
put("endNoTz", "2011-03-02T18:10:00");
put("title", "SDR1 Work Area");
put("assignment", "IU-BL_30_30_30");
}}
);
deleteTimesheet(tdocId);
createDailyOvertimeRule("IN-IU", "REG", "OVT",
//"IN", // 05/08 remove location because adding groupkeycode to DailyOvertimeRule // changed from "SD1" to "IN" for changes of adding groupKeyCode to Job
"BW", "TEST-DEPT", workArea, task, new BigDecimal(8), new BigDecimal("10.00"), null);
tdocId = KPME788_789(
new ArrayList<Map<String, Object>>() {{
add(new HashMap<String, Object>() {{
put("earnCode", "RGN");
put("hours", "5.0");
}});
}},
new HashMap<String, Object>() {{
put("earnCode", "RGN");
put("startNoTz", "2011-03-02T08:00:00");
put("endNoTz", "2011-03-02T13:00:00");
put("title", "SDR1 Work Area");
put("assignment", "IU-BL_30_30_30");
}},
new ArrayList<Map<String, Object>>() {{
add(new HashMap<String, Object>() {{
put("earnCode", "RGN");
put("hours", "3.0");
put("earnCode", "OVT");
put("hours", "2.0");
}});
}},
new HashMap<String, Object>() {{
put("earnCode", "RGN");
put("startNoTz", "2011-03-02T13:10:00");
put("endNoTz", "2011-03-02T18:10:00");
put("title", "SDR1 Work Area");
put("assignment", "IU-BL_30_30_30");
}}
);
deleteTimesheet(tdocId);
}
public void deleteTimesheet(String tdocId) throws Exception {
HtmlPage page = HtmlUnitUtil.gotoPageAndLogin(getWebClient(), HrTestConstants.Urls.PORTAL_URL);
Assert.assertNotNull(page);
page = HtmlUnitUtil.gotoPageAndLogin(getWebClient(), TkTestConstants.Urls.DELETE_TIMESHEET_URL + "?deleteDocumentId="+tdocId);
HtmlUnitUtil.createTempFile(page, "Deleted");
}
public String KPME788_789(ArrayList<Map<String, Object>> tb1ThdItems, HashMap<String, Object> tb1Items, ArrayList<Map<String, Object>> tb2ThdItems, HashMap<String, Object> tb2Items) throws Exception {
DateTime asOfDate = new DateTime(2011, 3, 1, 12, 0, 0, 0, TKUtils.getSystemDateTimeZone());
CalendarEntry pcd = HrServiceLocator.getCalendarEntryService().getCurrentCalendarDates(USER_PRINCIPAL_ID, asOfDate);
Assert.assertNotNull("No PayCalendarDates", pcd);
TimesheetDocument tdoc = TkServiceLocator.getTimesheetService().openTimesheetDocument(USER_PRINCIPAL_ID, pcd);
String tdocId = tdoc.getDocumentId();
HtmlPage page = loginAndGetTimeDetailsHtmlPage(getWebClient(), "admin", tdocId, true);
Assert.assertNotNull(page);
HtmlForm form = page.getFormByName("TimeDetailActionForm");
Assert.assertNotNull(form);
List<Assignment> assignments = HrServiceLocator.getAssignmentService().getAssignments(HrContext.getPrincipalId(), JAN_AS_OF_DATE.toLocalDate());
Assignment assignment = assignments.get(0);
List<EarnCode> earnCodes = TkServiceLocator.getTimesheetService().getEarnCodesForTime(assignment, JAN_AS_OF_DATE.toLocalDate());
EarnCode earnCode = earnCodes.get(0);
Assert.assertEquals("There should be no existing time blocks.", 0, tdoc.getTimeBlocks().size());
// 2. Set Timeblock Start and End time
// 3/02/2011 - 8:00a to 6:00pm
// OVT - 0 Hrs Expected
DateTime start = new DateTime(2011, 3, 2, 8, 0, 0, 0, TKUtils.getSystemDateTimeZone());
DateTime end = new DateTime(2011, 3, 2, 13, 0, 0, 0, TKUtils.getSystemDateTimeZone());
TimeDetailActionFormBase tdaf = TimeDetailTestUtils.buildDetailActionForm(tdoc, assignment, earnCode, start, end, null, false, null, true, null, null, null, null, null, null);
List<String> errors = TimeDetailTestUtils.setTimeBlockFormDetails(form, tdaf);
Assert.assertEquals("There should be no errors in this time detail submission", 0, errors.size());
page = TimeDetailTestUtils.submitTimeDetails(getWebClient(), getTimesheetDocumentUrl(tdocId), tdaf);
Assert.assertNotNull(page);
start = new DateTime(2011, 3, 2, 13, 10, 0, 0, TKUtils.getSystemDateTimeZone());
end = new DateTime(2011, 3, 2, 18, 10, 0, 0, TKUtils.getSystemDateTimeZone());
tdaf = TimeDetailTestUtils.buildDetailActionForm(tdoc, assignment, earnCode, start, end, null, false, null, true, null, null, null, null, null, null);
errors = TimeDetailTestUtils.setTimeBlockFormDetails(form, tdaf);
Assert.assertEquals("There should be no errors in this time detail submission", 0, errors.size());
page = TimeDetailTestUtils.submitTimeDetails(getWebClient(), getTimesheetDocumentUrl(tdocId), tdaf);
HtmlUnitUtil.createTempFile(page, "Hours");
Assert.assertNotNull(page);
// Verify block present on rendered page.
String pageAsText = page.asText();
// Grab the timeblock data from the text area. We can check specifics there
// to be more fine grained in our validation.
String dataText = page.getElementById("timeBlockString").getFirstChild().getNodeValue();
JSONObject jsonData = (JSONObject) JSONValue.parse(dataText);
Assert.assertTrue("TimeBlock #1 Data Missing.", checkJSONValues(jsonData, tb1ThdItems, tb1Items));
Assert.assertTrue("TimeBlock #2 Data Missing.", checkJSONValues(jsonData, tb2ThdItems, tb2Items));
return tdocId;
}
private DailyOvertimeRule createDailyOvertimeRule(String groupKeyCode, String fromEarnGroup, String earnCode,
//String location,
String paytype, String dept, Long workArea, Long task, BigDecimal minHours, BigDecimal maxGap, String overtimePref) {
DailyOvertimeRule rule = new DailyOvertimeRule();
rule.setGroupKeyCode("IU-IN");
rule.setEffectiveLocalDate(JAN_AS_OF_DATE.toLocalDate());
rule.setFromEarnGroup(fromEarnGroup);
rule.setEarnCode(earnCode);
//rule.setLocation(location);
rule.setPaytype(paytype);
rule.setDept(dept);
rule.setWorkArea(workArea);
rule.setMaxGap(maxGap);
rule.setMinHours(minHours);
rule.setActive(true);
TkServiceLocator.getDailyOvertimeRuleService().saveOrUpdate(rule);
return rule;
}
}
| |
package gov.samhsa.consent2share.domain.clinicaldata;
import gov.samhsa.consent2share.domain.patient.PatientDataOnDemand;
import gov.samhsa.consent2share.domain.reference.BodySiteCodeDataOnDemand;
import gov.samhsa.consent2share.domain.reference.MedicationStatusCodeDataOnDemand;
import gov.samhsa.consent2share.domain.reference.ProductFormCodeDataOnDemand;
import gov.samhsa.consent2share.domain.reference.RouteCodeDataOnDemand;
import gov.samhsa.consent2share.domain.reference.UnitOfMeasureCode;
import gov.samhsa.consent2share.domain.valueobject.CodedConcept;
import gov.samhsa.consent2share.domain.valueobject.Quantity;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Configurable;
import org.springframework.data.domain.PageRequest;
import org.springframework.stereotype.Component;
@Component
@Configurable
public class MedicationDataOnDemand {
private Random rnd = new SecureRandom();
private List<Medication> data;
@Autowired
BodySiteCodeDataOnDemand bodySiteCodeDataOnDemand;
@Autowired
MedicationStatusCodeDataOnDemand medicationStatusCodeDataOnDemand;
@Autowired
PatientDataOnDemand patientDataOnDemand;
@Autowired
ProductFormCodeDataOnDemand productFormCodeDataOnDemand;
@Autowired
RouteCodeDataOnDemand routeCodeDataOnDemand;
@Autowired
MedicationRepository medicationRepository;
public Medication getNewTransientMedication(int index) {
Medication obj = new Medication();
setMedicationInformationCode(obj, index);
setDoseQuantity(obj, index);
setFreeTextSig(obj, index);
setMedicationEndDate(obj, index);
setMedicationStartDate(obj, index);
return obj;
}
public void setMedicationInformationCode(Medication obj, int index) {
CodedConcept embeddedClass = new CodedConcept();
setMedicationInformationCodeCode(embeddedClass, index);
setMedicationInformationCodeCodeSystem(embeddedClass, index);
setMedicationInformationCodeDisplayName(embeddedClass, index);
setMedicationInformationCodeCodeSystemName(embeddedClass, index);
setMedicationInformationCodeOriginalText(embeddedClass, index);
obj.setMedicationInformationCode(embeddedClass);
}
public void setMedicationInformationCodeCode(CodedConcept obj, int index) {
String code = "code_" + index;
if (code.length() > 250) {
code = code.substring(0, 250);
}
obj.setCode(code);
}
public void setMedicationInformationCodeCodeSystem(CodedConcept obj, int index) {
String codeSystem = "codeSystem_" + index;
if (codeSystem.length() > 250) {
codeSystem = codeSystem.substring(0, 250);
}
obj.setCodeSystem(codeSystem);
}
public void setMedicationInformationCodeDisplayName(CodedConcept obj, int index) {
String displayName = "displayName_" + index;
if (displayName.length() > 250) {
displayName = displayName.substring(0, 250);
}
obj.setDisplayName(displayName);
}
public void setMedicationInformationCodeCodeSystemName(CodedConcept obj, int index) {
String codeSystemName = "codeSystemName_" + index;
if (codeSystemName.length() > 250) {
codeSystemName = codeSystemName.substring(0, 250);
}
obj.setCodeSystemName(codeSystemName);
}
public void setMedicationInformationCodeOriginalText(CodedConcept obj, int index) {
String originalText = "originalText_" + index;
if (originalText.length() > 250) {
originalText = originalText.substring(0, 250);
}
obj.setOriginalText(originalText);
}
public void setDoseQuantity(Medication obj, int index) {
Quantity embeddedClass = new Quantity();
setDoseQuantityMeasuredValue(embeddedClass, index);
setDoseQuantityUnitOfMeasureCode(embeddedClass, index);
obj.setDoseQuantity(embeddedClass);
}
public void setDoseQuantityMeasuredValue(Quantity obj, int index) {
Double measuredValue = new Integer(index).doubleValue();
obj.setMeasuredValue(measuredValue);
}
public void setDoseQuantityUnitOfMeasureCode(Quantity obj, int index) {
UnitOfMeasureCode unitOfMeasureCode = null;
obj.setUnitOfMeasureCode(unitOfMeasureCode);
}
public void setFreeTextSig(Medication obj, int index) {
String freeTextSig = "freeTextSig_" + index;
if (freeTextSig.length() > 250) {
freeTextSig = freeTextSig.substring(0, 250);
}
obj.setFreeTextSig(freeTextSig);
}
public void setMedicationEndDate(Medication obj, int index) {
Date medicationEndDate = new GregorianCalendar(Calendar.getInstance().get(Calendar.YEAR), Calendar.getInstance().get(Calendar.MONTH), Calendar.getInstance().get(Calendar.DAY_OF_MONTH), Calendar.getInstance().get(Calendar.HOUR_OF_DAY), Calendar.getInstance().get(Calendar.MINUTE), Calendar.getInstance().get(Calendar.SECOND) + new Double(Math.random() * 1000).intValue()).getTime();
obj.setMedicationEndDate(medicationEndDate);
}
public void setMedicationStartDate(Medication obj, int index) {
Date medicationStartDate = new GregorianCalendar(Calendar.getInstance().get(Calendar.YEAR), Calendar.getInstance().get(Calendar.MONTH), Calendar.getInstance().get(Calendar.DAY_OF_MONTH), Calendar.getInstance().get(Calendar.HOUR_OF_DAY), Calendar.getInstance().get(Calendar.MINUTE), Calendar.getInstance().get(Calendar.SECOND) + new Double(Math.random() * 1000).intValue()).getTime();
obj.setMedicationStartDate(medicationStartDate);
}
public Medication getSpecificMedication(int index) {
init();
if (index < 0) {
index = 0;
}
if (index > (data.size() - 1)) {
index = data.size() - 1;
}
Medication obj = data.get(index);
Long id = obj.getId();
return medicationRepository.findOne(id);
}
public Medication getRandomMedication() {
init();
Medication obj = data.get(rnd.nextInt(data.size()));
Long id = obj.getId();
return medicationRepository.findOne(id);
}
public boolean modifyMedication(Medication obj) {
return false;
}
public void init() {
int pageNumber = 0;
int pageSize = 10;
data = medicationRepository.findAll(
new PageRequest(pageNumber, pageSize)).getContent();
if (data == null) {
throw new IllegalStateException("Find entries implementation for 'Medication' illegally returned null");
}
if (!data.isEmpty()) {
return;
}
data = new ArrayList<Medication>();
for (int i = 0; i < 10; i++) {
Medication obj = getNewTransientMedication(i);
try {
medicationRepository.save(obj);
} catch (ConstraintViolationException e) {
StringBuilder msg = new StringBuilder();
for (Iterator<ConstraintViolation<?>> iter = e.getConstraintViolations().iterator(); iter.hasNext();) {
ConstraintViolation<?> cv = iter.next();
msg.append("[").append(cv.getConstraintDescriptor()).append(":").append(cv.getMessage()).append("=").append(cv.getInvalidValue()).append("]");
}
throw new RuntimeException(msg.toString(), e);
}
medicationRepository.flush();
data.add(obj);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.common.utils.Triple;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.IVariableContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.metadata.IProjectionInfo;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DelegateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DistinctOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.EmptyTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ExchangeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ForwardOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.InnerJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.IntersectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LeftOuterUnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.LimitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.MaterializeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.OrderOperator.IOrder;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ReplicateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.RunningAggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ScriptOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SplitOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.TokenizeOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnionAllOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestMapOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.WindowOperator;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
import org.apache.hyracks.algebricks.core.algebra.typing.ITypingContext;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
import org.apache.hyracks.algebricks.core.algebra.visitors.IQueryOperatorVisitor;
/**
* This visitor deep-copies a query plan but uses a new set of variables. Method
* getInputToOutputVariableMapping() will return a map that maps input variables
* to their corresponding output variables.
*/
public class LogicalOperatorDeepCopyWithNewVariablesVisitor
implements IQueryOperatorVisitor<ILogicalOperator, ILogicalOperator> {
private final ITypingContext typeContext;
private final IVariableContext varContext;
private final LogicalExpressionDeepCopyWithNewVariablesVisitor exprDeepCopyVisitor;
// Key: Variable in the original plan. Value: New variable replacing the
// original one in the copied plan.
private final LinkedHashMap<LogicalVariable, LogicalVariable> inputVarToOutputVarMapping;
// Key: New variable in the new plan. Value: The old variable in the
// original plan.
private final LinkedHashMap<LogicalVariable, LogicalVariable> outputVarToInputVarMapping = new LinkedHashMap<>();
// Free variables: variables that shouldn't be deep copied, i.e., mapped.
private final Set<LogicalVariable> freeVars = new HashSet<>();
// Whether free variables in the given plan subtree should be reused.
private final boolean reuseFreeVars;
/**
* @param varContext
* , the variable context.
* @param typeContext
* the type context.
*/
public LogicalOperatorDeepCopyWithNewVariablesVisitor(IVariableContext varContext, ITypingContext typeContext) {
this(varContext, typeContext, new LinkedHashMap<>(), false);
}
/**
* @param varContext
* , the variable context.
* @param typeContext
* the type context.
* @param reuseFreeVars
* whether free variables in the given plan tree should be reused.
*/
public LogicalOperatorDeepCopyWithNewVariablesVisitor(IVariableContext varContext, ITypingContext typeContext,
boolean reuseFreeVars) {
this(varContext, typeContext, new LinkedHashMap<>(), reuseFreeVars);
}
/**
* @param varContext
* , the variable context.
* @param typeContext
* the type context.
* @param inVarMapping
* Variable mapping keyed by variables in the original plan.
* Those variables are replaced by their corresponding value in
* the map in the copied plan.
* @param reuseFreeVars
* whether free variables in the given plan tree should be reused.
*/
public LogicalOperatorDeepCopyWithNewVariablesVisitor(IVariableContext varContext, ITypingContext typeContext,
LinkedHashMap<LogicalVariable, LogicalVariable> inVarMapping, boolean reuseFreeVars) {
this.varContext = varContext;
this.typeContext = typeContext;
this.inputVarToOutputVarMapping = inVarMapping;
this.exprDeepCopyVisitor = new LogicalExpressionDeepCopyWithNewVariablesVisitor(varContext, inVarMapping,
inputVarToOutputVarMapping, freeVars);
this.reuseFreeVars = reuseFreeVars;
}
private void copyAnnotations(ILogicalOperator src, ILogicalOperator dest) {
dest.getAnnotations().putAll(src.getAnnotations());
}
private void copySourceLocation(ILogicalOperator src, AbstractLogicalOperator dest) {
dest.setSourceLocation(src.getSourceLocation());
}
public ILogicalOperator deepCopy(ILogicalOperator op) throws AlgebricksException {
if (reuseFreeVars) {
// If the reuseFreeVars flag is set, we collect all free variables in the
// given operator subtree and do not re-map them in the deep-copied plan.
OperatorPropertiesUtil.getFreeVariablesInSelfOrDesc((AbstractLogicalOperator) op, freeVars);
}
ILogicalOperator opCopy = deepCopyOperator(op, null);
if (typeContext != null) {
OperatorManipulationUtil.computeTypeEnvironmentBottomUp(opCopy, typeContext);
}
return opCopy;
}
private void deepCopyInputs(ILogicalOperator src, ILogicalOperator dest, ILogicalOperator arg)
throws AlgebricksException {
List<Mutable<ILogicalOperator>> inputs = src.getInputs();
List<Mutable<ILogicalOperator>> inputsCopy = dest.getInputs();
for (Mutable<ILogicalOperator> input : inputs) {
inputsCopy.add(deepCopyOperatorReference(input, arg));
}
}
private ILogicalOperator deepCopyOperator(ILogicalOperator op, ILogicalOperator arg) throws AlgebricksException {
return op != null ? op.accept(this, arg) : null;
}
private Mutable<ILogicalOperator> deepCopyOperatorReference(Mutable<ILogicalOperator> opRef, ILogicalOperator arg)
throws AlgebricksException {
return new MutableObject<>(deepCopyOperator(opRef.getValue(), arg));
}
private List<Mutable<ILogicalOperator>> deepCopyOperatorReferenceList(List<Mutable<ILogicalOperator>> list,
ILogicalOperator arg) throws AlgebricksException {
List<Mutable<ILogicalOperator>> listCopy = new ArrayList<Mutable<ILogicalOperator>>(list.size());
for (Mutable<ILogicalOperator> opRef : list) {
listCopy.add(deepCopyOperatorReference(opRef, arg));
}
return listCopy;
}
private IOrder deepCopyOrder(IOrder order) {
switch (order.getKind()) {
case ASC:
case DESC:
return order;
case FUNCTIONCALL:
default:
throw new UnsupportedOperationException();
}
}
private List<Pair<IOrder, Mutable<ILogicalExpression>>> deepCopyOrderExpressionReferencePairList(
List<Pair<IOrder, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>> listCopy =
new ArrayList<Pair<IOrder, Mutable<ILogicalExpression>>>(list.size());
for (Pair<IOrder, Mutable<ILogicalExpression>> pair : list) {
listCopy.add(new Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>>(deepCopyOrder(pair.first),
exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
}
return listCopy;
}
private ILogicalPlan deepCopyPlan(ILogicalPlan plan, ILogicalOperator arg) throws AlgebricksException {
List<Mutable<ILogicalOperator>> rootsCopy = deepCopyOperatorReferenceList(plan.getRoots(), arg);
ILogicalPlan planCopy = new ALogicalPlanImpl(rootsCopy);
return planCopy;
}
private List<ILogicalPlan> deepCopyPlanList(List<ILogicalPlan> list, List<ILogicalPlan> listCopy,
ILogicalOperator arg) throws AlgebricksException {
for (ILogicalPlan plan : list) {
listCopy.add(deepCopyPlan(plan, arg));
}
return listCopy;
}
private LogicalVariable deepCopyVariable(LogicalVariable var) {
if (var == null) {
return null;
}
if (freeVars.contains(var)) {
return var;
}
LogicalVariable givenVarReplacement = outputVarToInputVarMapping.get(var);
if (givenVarReplacement != null) {
inputVarToOutputVarMapping.put(var, givenVarReplacement);
return givenVarReplacement;
}
LogicalVariable varCopy = inputVarToOutputVarMapping.get(var);
if (varCopy == null) {
varCopy = varContext.newVar();
inputVarToOutputVarMapping.put(var, varCopy);
}
return varCopy;
}
private List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> deepCopyVariableExpressionReferencePairList(
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> list) throws AlgebricksException {
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> listCopy =
new ArrayList<Pair<LogicalVariable, Mutable<ILogicalExpression>>>(list.size());
for (Pair<LogicalVariable, Mutable<ILogicalExpression>> pair : list) {
listCopy.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(deepCopyVariable(pair.first),
exprDeepCopyVisitor.deepCopyExpressionReference(pair.second)));
}
return listCopy;
}
private List<LogicalVariable> deepCopyVariableList(List<LogicalVariable> list) {
ArrayList<LogicalVariable> listCopy = new ArrayList<LogicalVariable>(list.size());
for (LogicalVariable var : list) {
listCopy.add(deepCopyVariable(var));
}
return listCopy;
}
private void deepCopyInputsAnnotationsAndExecutionMode(ILogicalOperator op, ILogicalOperator arg,
AbstractLogicalOperator opCopy) throws AlgebricksException {
deepCopyInputs(op, opCopy, arg);
copyAnnotations(op, opCopy);
copySourceLocation(op, opCopy);
opCopy.setExecutionMode(op.getExecutionMode());
}
public void reset() {
freeVars.clear();
inputVarToOutputVarMapping.clear();
outputVarToInputVarMapping.clear();
}
public void updatePrimaryKeys(IOptimizationContext context) {
inputVarToOutputVarMapping.forEach((key, value) -> {
List<LogicalVariable> primaryKey = context.findPrimaryKey(key);
if (primaryKey != null) {
List<LogicalVariable> head = new ArrayList<>();
for (LogicalVariable variable : primaryKey) {
head.add(inputVarToOutputVarMapping.get(variable));
}
List<LogicalVariable> tail = new ArrayList<>(1);
tail.add(value);
context.addPrimaryKey(new FunctionalDependency(head, tail));
}
});
}
public LogicalVariable varCopy(LogicalVariable var) throws AlgebricksException {
return inputVarToOutputVarMapping.get(var);
}
@Override
public ILogicalOperator visitAggregateOperator(AggregateOperator op, ILogicalOperator arg)
throws AlgebricksException {
AggregateOperator opCopy = new AggregateOperator(deepCopyVariableList(op.getVariables()),
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitAssignOperator(AssignOperator op, ILogicalOperator arg) throws AlgebricksException {
AssignOperator opCopy = new AssignOperator(deepCopyVariableList(op.getVariables()),
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitDataScanOperator(DataSourceScanOperator op, ILogicalOperator arg)
throws AlgebricksException {
Mutable<ILogicalExpression> newSelectCondition = op.getSelectCondition() != null
? exprDeepCopyVisitor.deepCopyExpressionReference(op.getSelectCondition()) : null;
IProjectionInfo<?> projectionInfo = op.getProjectionInfo() != null ? op.getProjectionInfo().createCopy() : null;
DataSourceScanOperator opCopy = new DataSourceScanOperator(deepCopyVariableList(op.getVariables()),
op.getDataSource(), newSelectCondition, op.getOutputLimit(), projectionInfo);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitDistinctOperator(DistinctOperator op, ILogicalOperator arg)
throws AlgebricksException {
DistinctOperator opCopy =
new DistinctOperator(exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitEmptyTupleSourceOperator(EmptyTupleSourceOperator op, ILogicalOperator arg) {
EmptyTupleSourceOperator opCopy = new EmptyTupleSourceOperator();
copySourceLocation(op, opCopy);
opCopy.setExecutionMode(op.getExecutionMode());
return opCopy;
}
@Override
public ILogicalOperator visitExchangeOperator(ExchangeOperator op, ILogicalOperator arg)
throws AlgebricksException {
ExchangeOperator opCopy = new ExchangeOperator();
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitGroupByOperator(GroupByOperator op, ILogicalOperator arg) throws AlgebricksException {
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> groupByListCopy =
deepCopyVariableExpressionReferencePairList(op.getGroupByList());
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> decorListCopy =
deepCopyVariableExpressionReferencePairList(op.getDecorList());
List<ILogicalPlan> nestedPlansCopy = new ArrayList<ILogicalPlan>();
GroupByOperator opCopy = new GroupByOperator(groupByListCopy, decorListCopy, nestedPlansCopy, op.isGroupAll());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
deepCopyPlanList(op.getNestedPlans(), nestedPlansCopy, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitInnerJoinOperator(InnerJoinOperator op, ILogicalOperator arg)
throws AlgebricksException {
InnerJoinOperator opCopy =
new InnerJoinOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
deepCopyOperatorReference(op.getInputs().get(0), arg),
deepCopyOperatorReference(op.getInputs().get(1), arg));
copyAnnotations(op, opCopy);
copySourceLocation(op, opCopy);
opCopy.setExecutionMode(op.getExecutionMode());
return opCopy;
}
@Override
public ILogicalOperator visitLeftOuterJoinOperator(LeftOuterJoinOperator op, ILogicalOperator arg)
throws AlgebricksException {
LeftOuterJoinOperator opCopy =
new LeftOuterJoinOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
deepCopyOperatorReference(op.getInputs().get(0), arg),
deepCopyOperatorReference(op.getInputs().get(1), arg));
copyAnnotations(op, opCopy);
copySourceLocation(op, opCopy);
opCopy.setExecutionMode(op.getExecutionMode());
return opCopy;
}
@Override
public ILogicalOperator visitLimitOperator(LimitOperator op, ILogicalOperator arg) throws AlgebricksException {
LimitOperator opCopy = new LimitOperator(exprDeepCopyVisitor.deepCopy(op.getMaxObjects().getValue()),
exprDeepCopyVisitor.deepCopy(op.getOffset().getValue()), op.isTopmostLimitOp());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitNestedTupleSourceOperator(NestedTupleSourceOperator op, ILogicalOperator arg)
throws AlgebricksException {
Mutable<ILogicalOperator> dataSourceReference =
arg == null ? op.getDataSourceReference() : new MutableObject<>(arg);
NestedTupleSourceOperator opCopy = new NestedTupleSourceOperator(dataSourceReference);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitOrderOperator(OrderOperator op, ILogicalOperator arg) throws AlgebricksException {
OrderOperator opCopy = new OrderOperator(deepCopyOrderExpressionReferencePairList(op.getOrderExpressions()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitProjectOperator(ProjectOperator op, ILogicalOperator arg) throws AlgebricksException {
ProjectOperator opCopy = new ProjectOperator(deepCopyVariableList(op.getVariables()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitReplicateOperator(ReplicateOperator op, ILogicalOperator arg)
throws AlgebricksException {
boolean[] outputMatFlags = op.getOutputMaterializationFlags();
boolean[] copiedOutputMatFlags = new boolean[outputMatFlags.length];
System.arraycopy(outputMatFlags, 0, copiedOutputMatFlags, 0, outputMatFlags.length);
ReplicateOperator opCopy = new ReplicateOperator(op.getOutputArity(), copiedOutputMatFlags);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitSplitOperator(SplitOperator op, ILogicalOperator arg) throws AlgebricksException {
SplitOperator opCopy = new SplitOperator(op.getOutputArity(), op.getBranchingExpression());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitMaterializeOperator(MaterializeOperator op, ILogicalOperator arg)
throws AlgebricksException {
MaterializeOperator opCopy = new MaterializeOperator();
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitRunningAggregateOperator(RunningAggregateOperator op, ILogicalOperator arg)
throws AlgebricksException {
RunningAggregateOperator opCopy = new RunningAggregateOperator(deepCopyVariableList(op.getVariables()),
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitScriptOperator(ScriptOperator op, ILogicalOperator arg) throws AlgebricksException {
ScriptOperator opCopy = new ScriptOperator(op.getScriptDescription(),
deepCopyVariableList(op.getInputVariables()), deepCopyVariableList(op.getOutputVariables()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitSelectOperator(SelectOperator op, ILogicalOperator arg) throws AlgebricksException {
SelectOperator opCopy = new SelectOperator(exprDeepCopyVisitor.deepCopyExpressionReference(op.getCondition()),
op.getRetainMissing(), deepCopyVariable(op.getMissingPlaceholderVariable()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitSubplanOperator(SubplanOperator op, ILogicalOperator arg) throws AlgebricksException {
List<ILogicalPlan> nestedPlansCopy = new ArrayList<ILogicalPlan>();
SubplanOperator opCopy = new SubplanOperator(nestedPlansCopy);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
deepCopyPlanList(op.getNestedPlans(), nestedPlansCopy, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitUnionOperator(UnionAllOperator op, ILogicalOperator arg) throws AlgebricksException {
List<Triple<LogicalVariable, LogicalVariable, LogicalVariable>> variableMappingsCopy =
new ArrayList<>(op.getVariableMappings().size());
UnionAllOperator opCopy = new UnionAllOperator(variableMappingsCopy);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
for (Triple<LogicalVariable, LogicalVariable, LogicalVariable> triple : op.getVariableMappings()) {
LogicalVariable producedVar = deepCopyVariable(triple.third);
LogicalVariable newLeftVar = Objects.requireNonNull(inputVarToOutputVarMapping.get(triple.first));
LogicalVariable newRightVar = Objects.requireNonNull(inputVarToOutputVarMapping.get(triple.second));
Triple<LogicalVariable, LogicalVariable, LogicalVariable> copiedTriple =
new Triple<>(newLeftVar, newRightVar, producedVar);
variableMappingsCopy.add(copiedTriple);
}
return opCopy;
}
@Override
public ILogicalOperator visitIntersectOperator(IntersectOperator op, ILogicalOperator arg)
throws AlgebricksException {
int nInput = op.getNumInput();
List<LogicalVariable> outputCompareVarsCopy = deepCopyVariableList(op.getOutputCompareVariables());
boolean hasExtraVars = op.hasExtraVariables();
List<LogicalVariable> outputExtraVarsCopy =
hasExtraVars ? deepCopyVariableList(op.getOutputExtraVariables()) : null;
List<List<LogicalVariable>> inputCompareVarsCopy = new ArrayList<>(nInput);
List<List<LogicalVariable>> inputExtraVarsCopy = hasExtraVars ? new ArrayList<>(nInput) : null;
for (int i = 0; i < nInput; i++) {
inputCompareVarsCopy.add(deepCopyVariableList(op.getInputCompareVariables(i)));
if (hasExtraVars) {
inputExtraVarsCopy.add(deepCopyVariableList(op.getInputExtraVariables(i)));
}
}
IntersectOperator opCopy = new IntersectOperator(outputCompareVarsCopy, outputExtraVarsCopy,
inputCompareVarsCopy, inputExtraVarsCopy);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitUnnestMapOperator(UnnestMapOperator op, ILogicalOperator arg)
throws AlgebricksException {
Mutable<ILogicalExpression> newSelectCondition = op.getSelectCondition() != null
? exprDeepCopyVisitor.deepCopyExpressionReference(op.getSelectCondition()) : null;
UnnestMapOperator opCopy = new UnnestMapOperator(deepCopyVariableList(op.getVariables()),
exprDeepCopyVisitor.deepCopyExpressionReference(op.getExpressionRef()), op.getVariableTypes(),
op.propagatesInput(), newSelectCondition, op.getOutputLimit());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitLeftOuterUnnestMapOperator(LeftOuterUnnestMapOperator op, ILogicalOperator arg)
throws AlgebricksException {
LeftOuterUnnestMapOperator opCopy = new LeftOuterUnnestMapOperator(deepCopyVariableList(op.getVariables()),
exprDeepCopyVisitor.deepCopyExpressionReference(op.getExpressionRef()), op.getVariableTypes(),
op.propagatesInput());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitUnnestOperator(UnnestOperator op, ILogicalOperator arg) throws AlgebricksException {
UnnestOperator opCopy = new UnnestOperator(deepCopyVariable(op.getVariable()),
exprDeepCopyVisitor.deepCopyExpressionReference(op.getExpressionRef()),
deepCopyVariable(op.getPositionalVariable()), op.getPositionalVariableType());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitTokenizeOperator(TokenizeOperator op, ILogicalOperator arg)
throws AlgebricksException {
TokenizeOperator opCopy = new TokenizeOperator(op.getDataSourceIndex(),
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getPrimaryKeyExpressions()),
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getSecondaryKeyExpressions()),
this.deepCopyVariableList(op.getTokenizeVars()),
exprDeepCopyVisitor.deepCopyExpressionReference(op.getFilterExpression()), op.getOperation(),
op.isBulkload(), op.isPartitioned(), op.getTokenizeVarTypes());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitForwardOperator(ForwardOperator op, ILogicalOperator arg) throws AlgebricksException {
ForwardOperator opCopy = new ForwardOperator(op.getSideDataKey(),
exprDeepCopyVisitor.deepCopyExpressionReference(op.getSideDataExpression()));
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitDelegateOperator(DelegateOperator op, ILogicalOperator arg)
throws AlgebricksException {
throw new UnsupportedOperationException();
}
@Override
public ILogicalOperator visitLeftOuterUnnestOperator(LeftOuterUnnestOperator op, ILogicalOperator arg)
throws AlgebricksException {
LeftOuterUnnestOperator opCopy = new LeftOuterUnnestOperator(deepCopyVariable(op.getVariable()),
exprDeepCopyVisitor.deepCopyExpressionReference(op.getExpressionRef()),
deepCopyVariable(op.getPositionalVariable()), op.getPositionalVariableType());
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
return opCopy;
}
@Override
public ILogicalOperator visitWindowOperator(WindowOperator op, ILogicalOperator arg) throws AlgebricksException {
List<Mutable<ILogicalExpression>> partitionExprCopy =
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getPartitionExpressions());
List<Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>>> orderExprCopy =
deepCopyOrderExpressionReferencePairList(op.getOrderExpressions());
List<Pair<OrderOperator.IOrder, Mutable<ILogicalExpression>>> frameValueExprCopy =
deepCopyOrderExpressionReferencePairList(op.getFrameValueExpressions());
List<Mutable<ILogicalExpression>> frameStartExprCopy =
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getFrameStartExpressions());
List<Mutable<ILogicalExpression>> frameStartValidationExprCopy =
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getFrameStartValidationExpressions());
List<Mutable<ILogicalExpression>> frameEndExprCopy =
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getFrameEndExpressions());
List<Mutable<ILogicalExpression>> frameEndValidationExprCopy =
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getFrameEndValidationExpressions());
List<Mutable<ILogicalExpression>> frameExcludeExprCopy =
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getFrameExcludeExpressions());
ILogicalExpression frameExcludeUnaryExprCopy =
exprDeepCopyVisitor.deepCopy(op.getFrameExcludeUnaryExpression().getValue());
ILogicalExpression frameOffsetExprCopy = exprDeepCopyVisitor.deepCopy(op.getFrameOffsetExpression().getValue());
List<LogicalVariable> varCopy = deepCopyVariableList(op.getVariables());
List<Mutable<ILogicalExpression>> exprCopy =
exprDeepCopyVisitor.deepCopyExpressionReferenceList(op.getExpressions());
List<ILogicalPlan> nestedPlansCopy = new ArrayList<>();
WindowOperator opCopy = new WindowOperator(partitionExprCopy, orderExprCopy, frameValueExprCopy,
frameStartExprCopy, frameStartValidationExprCopy, frameEndExprCopy, frameEndValidationExprCopy,
frameExcludeExprCopy, op.getFrameExcludeNegationStartIdx(), frameExcludeUnaryExprCopy,
frameOffsetExprCopy, op.getFrameMaxObjects(), varCopy, exprCopy, nestedPlansCopy);
deepCopyInputsAnnotationsAndExecutionMode(op, arg, opCopy);
deepCopyPlanList(op.getNestedPlans(), nestedPlansCopy, opCopy);
return opCopy;
}
public LinkedHashMap<LogicalVariable, LogicalVariable> getInputToOutputVariableMapping() {
return inputVarToOutputVarMapping;
}
}
| |
import java.io.IOException;
import java.net.*;
import java.util.*;
public class Server {
public int totalSeats = 5;
public int numServers = 1;
public int serverIndex;
List<InetSocketAddress> serverList;
InetSocketAddress server;
TCPListenerThread tcpThread;
MsgHandler msg;
ArrayList<String> criticalSectionUpdates = new ArrayList<>();
LamportMutex lmp;
public HashMap<Integer,String> seatingChart;
/**
* Server for a ticket reservation system for a movie.
* The system functions with TCP connections.
* There can be multiple server, and multiple clients may access each server concurrently.
* A person can reserve only one seat at any given time.
*
* @param serverList list of all servers with hosts and ports
* @param serverIndex Index in server list for current server
* @param numServers total number of servers
* @param totalSeats total number of seats in theater
*/
public Server(List<InetSocketAddress> serverList, int serverIndex, int numServers, int totalSeats){
this.serverList = serverList;
this.totalSeats = totalSeats;
this.numServers = numServers;
this.serverIndex = serverIndex;
this.seatingChart = new HashMap<>();
this.msg = new MsgHandler(this, numServers, serverIndex, serverList);
lmp = new LamportMutex(msg, this);
this.server = msg.getServer(serverIndex);
MsgHandler.debug("Server configured at " + server.toString());
requestExternalSync();
//Set up listener thread for TCP
try {
tcpThread = new TCPListenerThread(this, server.getPort());
} catch (IOException e) {
e.printStackTrace();
}
tcpThread.start();
}
public void shutDown() throws IOException {
tcpThread.interrupt();
}
public void clearSeatingChart() {
seatingChart.clear();
}
public void makeAllSynchedUpdates(){
MsgHandler.debug("Making all synched updates in list size(" + Integer.toString(criticalSectionUpdates.size()) + ")");
String request;
synchronized(criticalSectionUpdates) {
Iterator<String> it=criticalSectionUpdates.iterator();
while(it.hasNext()) {
request = it.next();
MsgHandler.debug("Making external update: " + request);
externalRequest(request);
}
criticalSectionUpdates.clear();
}
}
private void externalRequest(String request){
MsgHandler.debug("Processing request: " + request);
ArrayList<String> requestList = Utils.interpretStringAsList(request);
String method = requestList.get(0);
int seat = Integer.parseInt(requestList.get(1));
switch(method) {
case ("externalPut"):
String name = requestList.get(2);
externalReserve(name, seat);
break;
case ("externalDelete"):
externalDelete(seat);
break;
case("request"):
lmp.interpretMessage(request);
break;
case("release"):
lmp.interpretMessage(request);
break;
case("ack"):
MsgHandler.debug("Gettin Acks.");
lmp.interpretMessage(request);
break;
}
}
private void externalDelete(int seat){
seatingChart.remove(seat);
}
private void externalReserve(String name, int seat){
seatingChart.put(seat, name);
}
public ArrayList<String> queueExternalRequest(String request){
ArrayList<String> empty = new ArrayList<>();
criticalSectionUpdates.add(request);
return empty;
}
public void addReservation(int seat, String name){
seatingChart.put(seat, name);
String request = "externalPut," + seat + "," + name;
msg.broadCastChange(request, lmp.v);
}
public void removeReservation(int seat){
seatingChart.remove(seat);
String request = "externalDelete," + seat;
msg.broadCastChange(request, lmp.v);
}
public void requestExternalSync() {
String request = "externalSync";
int syncServerIndex = getClosestServerIndex();
try {
applyExternalSync(msg.makeServerRequest(serverList.get(syncServerIndex), syncServerIndex, request, true));
} catch (IOException e) {
e.printStackTrace();
}
}
public ArrayList<String> supplyExternalSync() {
ArrayList<String> response = new ArrayList<>();
for (Integer seat : seatingChart.keySet()) {
response.add(seat + "|" + seatingChart.get(seat));
}
return response;
}
private void applyExternalSync(String response) {
if (response == null || response.equals("[]")) return;
ArrayList<String> entries = Utils.interpretStringAsList(response);
for (String entry : entries) {
String[] pair = entry.split("\\|");
seatingChart.put(Integer.valueOf(pair[0]), pair[1]);
}
}
private int getClosestServerIndex() {
return (serverIndex + 1) % serverList.size();
}
/**
* Find and hold the next available seat in the seating chart.
*
* @return The next seat number (by index) available.
* Returns -1 if no seats available.
*/
private int getNextSeat(String name){
for (int seat = 1; seat < totalSeats + 1; seat++){
if (! seatAlreadyReserved(seat)){
addReservation(seat, name);
return seat;
}
}
return -1;
}
/**
* Search for a seat reservation based on an input name
*
* @param name The string to search for in seating chart
* @return The seat number (by index) for the input name.
* Returns -1 if not found.
*/
private int search(String name){
if (seatingChart.containsValue(name)){
for (int i = 1; i < totalSeats + 1; i++){
if (seatingChart.containsKey(i)){
if (seatingChart.get(i).equals(name)){
lmp.releaseCriticalSection();
return i;
}
}
}
}
return -1;
}
private boolean soldOut(){
boolean soldOut = seatingChart.size() == totalSeats;
return soldOut;
}
private boolean existingReservation(String name){
int seat = search(name);
return seat != -1;
}
private boolean seatGreaterThanCapacity(int seat){
return seat > totalSeats;
}
private boolean seatAlreadyReserved(int seat){
lmp.requestCS();
boolean seatReserved = seatingChart.containsKey(seat);
lmp.releaseCriticalSection();
return seatReserved;
}
private ArrayList<String> soldOutResponse(String name){
ArrayList<String> response = new ArrayList<>();
response.add("True");
response.add("False");
response.add("False");
response.add("None");
response.add("-1");
response.add(name);
return response;
}
private ArrayList<String> existingReservationResponse(String name){
ArrayList<String> response = new ArrayList<>();
response.add("False");
response.add("True");
response.add("False");
response.add("None");
response.add("-1");
response.add(name);
return response;
}
private ArrayList<String> reserveNextSeatResponse(int seat, String name){
ArrayList<String> response = new ArrayList<>();
response.add("False");
response.add("False");
response.add("False");
response.add("None");
response.add(Integer.toString(seat));
response.add(name);
return response;
}
private ArrayList<String> seatUnavailableResponse(int seat, String name){
ArrayList<String> response = new ArrayList<>();
response.add("False");
response.add("False");
response.add("True");
response.add("None");
response.add(Integer.toString(seat));
response.add(name);
return response;
}
private ArrayList<String> bookSeatResponse(int seat, String name){
ArrayList<String> response = new ArrayList<>();
response.add("False");
response.add("False");
response.add("False");
response.add("None");
response.add(Integer.toString(seat));
response.add(name);
return response;
}
private ArrayList<String> searchResponse(boolean existingReservation, int seat, String name){
ArrayList<String> response = new ArrayList<>();
response.add("False");
response.add("False");
response.add("False");
if (existingReservation){
response.add("True");
}
else{
response.add("False");
}
response.add(Integer.toString(seat));
response.add(name);
return response;
}
private ArrayList<String> deleteResponse(boolean existingReservation, int seat, String name){
ArrayList<String> response = new ArrayList<>();
response.add("False");
response.add("False");
response.add("False");
if (existingReservation){
response.add("Delete");
}
else{
response.add("False");
}
response.add(Integer.toString(seat));
response.add(name);
return response;
}
/**
* Procedural logic for reserving a seat and formulating the appropriate response.
* 1. Check for sold out.
* 2. Check for existing reservation
* 3. Get the next available seat
*
* @param name The name for which to reserve a seat
* @return The formatted response ArrayList
*/
public ArrayList<String> reserve(String name){
lmp.requestCS();
if (soldOut()){
lmp.releaseCriticalSection();
return soldOutResponse(name);
}
else {
if(existingReservation(name)){
lmp.releaseCriticalSection();
return existingReservationResponse(name);
}
else {
int seat = getNextSeat(name);
lmp.releaseCriticalSection();
return reserveNextSeatResponse(seat, name);
}
}
}
/**
* Procedural logic for booking a seat and formulating the appropriate response.
* 1. Check for sold out.
* 2. Check for existing reservation
* 3. Check for seat number out of bounds (0 and negative integers removed at Client)
* 4. Check is seat is available and if so, book it.
*
* @param name The name for which to reserve a seat
* @return The formatted response ArrayList
*/
public ArrayList<String> bookSeat(String name, int seat){
lmp.requestCS();
if (soldOut()){
lmp.releaseCriticalSection();
return soldOutResponse(name);
}
else {
if(existingReservation(name)){
lmp.releaseCriticalSection();
return existingReservationResponse(name);
}
else if (seatGreaterThanCapacity(seat) || seatAlreadyReserved(seat)){
lmp.releaseCriticalSection();
return seatUnavailableResponse(seat, name);
}
else {
addReservation(seat, name);
lmp.releaseCriticalSection();
return bookSeatResponse(seat, name);
}
}
}
/**
* Procedural logic for searching for a reservation and formulating the appropriate response.
* Was a reservation found? If so, return seat number
*
* @param name The name for which to search
* @return The formatted response ArrayList
*/
public ArrayList<String> searchReservations(String name){
lmp.requestCS();
ArrayList<String> response = searchResponse(existingReservation(name), search(name), name);
lmp.releaseCriticalSection();
return response;
}
/**
* Procedural logic for deleting a reservation and formulating the appropriate response.
* Was a reservation found? If so, delete reservation
*
* @param name The name for which to delete
* @return The formatted response ArrayList
*/
public ArrayList<String> delete(String name){
lmp.requestCS();
int seat = search(name);
boolean existing = existingReservation(name);
if (existing){
removeReservation(seat);
}
lmp.releaseCriticalSection();
return deleteResponse(existing, seat, name);
}
/**
* Creates server responses for the Server class
*
* @param request server request to act upon.
* @return Formatted ArrayList response to send back to client
*/
public synchronized ArrayList<String> accessBackend(String request){
ArrayList<String> requestList = Utils.interpretStringAsList(request);
ArrayList<String> response = new ArrayList<>();
String method = requestList.get(0);
String name = requestList.get(1);
String seat;
MsgHandler.debug("Accessing server with request: " + request);
switch(method){
case("bookSeat"):
seat = requestList.get(2);
response = bookSeat(name, Integer.parseInt(seat));
break;
case("reserve"):
response = reserve(name);
break;
case("search"):
response = searchReservations(name);
break;
case("delete"):
response = delete(name);
break;
}
return response;
}
public static void main (String[] args) {
InputReader reader = new InputReader();
ArrayList<Integer> serverConfig = reader.inputServerConfig();
int serverIndex = serverConfig.get(0);
int numServers = serverConfig.get(1);
int numSeats = serverConfig.get(2);
List<InetSocketAddress> serverList = reader.gatherServerList(numServers);
//Set up server backend to accept requests from worker threads
new Server(serverList, serverIndex, numServers, numSeats);
}
}
| |
/**
*
*/
package soottocfg.soot.transformers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import com.google.common.base.Verify;
import soot.ArrayType;
import soot.IntType;
import soot.Local;
import soot.Modifier;
import soot.RefType;
import soot.Scene;
import soot.SootClass;
import soot.SootField;
import soot.SootMethod;
import soot.Type;
import soot.Unit;
import soot.Value;
import soot.VoidType;
import soot.jimple.ArrayRef;
import soot.jimple.CastExpr;
import soot.jimple.ClassConstant;
import soot.jimple.DefinitionStmt;
import soot.jimple.FieldRef;
import soot.jimple.IdentityStmt;
import soot.jimple.InstanceOfExpr;
import soot.jimple.IntConstant;
import soot.jimple.InvokeExpr;
import soot.jimple.Jimple;
import soot.jimple.JimpleBody;
import soot.jimple.LengthExpr;
import soot.jimple.NewArrayExpr;
import soot.jimple.NewMultiArrayExpr;
import soot.jimple.ParameterRef;
import soot.jimple.Stmt;
import soottocfg.soot.util.SootTranslationHelpers;
/**
* @author schaef
* @author rodykers
*
* The ArrayTransformer iterates over all classes in the Scene
* and replaces Java arrays by generated JayArrays.
* For each array type A[] we generate a corresponding JayArray type:
* class JayArray1 {
* public final int size;
* public JayArray1(int size);
* public A get(int idx);
* public void set (int idx, A elem);
* }
* For multi arrays, the constructor takes one int per dimension.
*
* Then ArrayTransformer replaces all usages of arrays:
* Array reads x=a[i] become x = a.get(i).
* Array writes a[i]=x become a.set(i,x).
* Array length a.length becomes a.$length.
* New array a = new A[1] becomes
* a = new JayArrayA;
* specialinvoke a.<init>(1);
* New multi array a = new A[1][2] becomes
* a = new JayArrayA;
* specialinvoke a.<init>(1, 2);
*
* Currently, get and set are not implemented, so the program behavior
* is changed.
*
* Further, the ArrayTransformer changes the signature of main(String[]
* args), so
* the program cannot be run from main after this transformation.
*/
public class ArrayTransformer extends AbstractSceneTransformer {
public static final String arraySetName = "set";
public static final String arrayGetName = "get";
public static final String arrayTypeName = "JayArray";
public static final String arrayElementPrefix = "atIndex";
public final static String AElem = "$arElem";
private final Map<Type, RefType> arrayTypeMap = new HashMap<Type, RefType>();
// private static final int NumberOfModeledElements = 0;
public static boolean isArrayClass(SootClass sc) {
return sc.getName().startsWith(arrayTypeName);
}
public ArrayTransformer() {
}
private final Map<String, SootField> fieldSubstitutionMap = new HashMap<String, SootField>();
private final Map<String, SootMethod> methodSubstitutionMap = new HashMap<String, SootMethod>();
public void applyTransformation() {
// make sure we have the Object[] replacement for this dimension
ArrayType objAr = ArrayType.v(Scene.v().getSootClass("java.lang.Object").getType(), 1);
RefType objArRepl = getArrayReplacementType(objAr);
/*
* We have to do two passes. In the first pass, we update all fields and
* method signatures
* but not the method bodies. This will break all MethodsRefs and
* FieldRefs in the bodies.
* In the second pass, we update the body and replace newarray,
* newmultiarray, fieldrefs and
* lengthexpr by the appropriate expressions.
* For the broken FieldRefs and MethodRefs, the toString will not
* change, so we can do a
* lookup to find the original field/method that we created in the first
* pass and create a fresh
* refs.
*/
List<SootClass> classes = new LinkedList<SootClass>(Scene.v().getClasses());
List<JimpleBody> bodies = new LinkedList<JimpleBody>();
List<SootMethod> entryPoints = new LinkedList<SootMethod>(Scene.v().getEntryPoints());
for (SootClass sc : classes) {
if (sc.resolvingLevel() >= SootClass.SIGNATURES) {
// change the type of all array fields.
for (SootField f : sc.getFields()) {
if (f.getType() instanceof ArrayType) {
final String oldSignature = f.getSignature();
f.setType(arrayTypeToRefType(f.getType()));
fieldSubstitutionMap.put(oldSignature, f);
}
}
for (SootMethod sm : sc.getMethods()) {
final String oldSignature = sm.getSignature();
// we also have to update the refs in the EntryPoint list.
boolean wasMain = sm.isEntryMethod();
if (wasMain) {
entryPoints.remove(sm);
}
if (sc.resolvingLevel() >= SootClass.BODIES && sm.isConcrete() && !sc.isLibraryClass()
&& !sc.isJavaLibraryClass()) {
// record all methods for which we found a body.
bodies.add((JimpleBody) sm.retrieveActiveBody());
}
// update return type
sm.setReturnType(arrayTypeToRefType(sm.getReturnType()));
// update parameter types
List<Type> newParamTypes = new LinkedList<Type>();
for (Type t : sm.getParameterTypes()) {
newParamTypes.add(arrayTypeToRefType(t));
}
sm.setParameterTypes(newParamTypes);
if (wasMain) {
entryPoints.add(sm);
}
methodSubstitutionMap.put(oldSignature, sm);
}
}
}
Scene.v().setEntryPoints(entryPoints);
for (JimpleBody body : bodies) {
for (Local local : body.getLocals()) {
local.setType(arrayTypeToRefType(local.getType()));
}
// now replace ArrayRefs and NewArray, NewMulitArray
// statements.
for (Unit u : new LinkedList<Unit>(body.getUnits())) {
/*
* Changing the types from ArrayType to RefType breaks the soot
* 'references', FieldRef, MethodRef,
* and ParameterRef since they do not get updated automatically.
* Hence, we need to re-build these
* Refs by hand:
*/
if (((Stmt) u).containsFieldRef() && ((Stmt) u).getFieldRef().getType() instanceof ArrayType) {
FieldRef fr = ((Stmt) u).getFieldRef();
String sig = fr.getField().getSignature();
Verify.verify(fieldSubstitutionMap.containsKey(sig), "No entry found for " + fr + " in stmt " + u);
fr.setFieldRef(fieldSubstitutionMap.get(sig).makeRef());
} else if (((Stmt) u).containsInvokeExpr()) {
InvokeExpr ive = ((Stmt) u).getInvokeExpr();
final String oldSignature = ive.getMethodRef().toString();
if (methodSubstitutionMap.containsKey(oldSignature)) {
ive.setMethodRef(methodSubstitutionMap.get(oldSignature).makeRef());
}
} else if (((Stmt) u) instanceof IdentityStmt
&& ((IdentityStmt) u).getRightOp() instanceof ParameterRef) {
ParameterRef pr = (ParameterRef) ((IdentityStmt) u).getRightOp();
((IdentityStmt) u).getRightOpBox().setValue(Jimple.v()
.newParameterRef(body.getMethod().getParameterType(pr.getIndex()), pr.getIndex()));
}
if (((Stmt) u).containsArrayRef()) {
ArrayRef aref = ((Stmt) u).getArrayRef();
// Note that the baseType has already been replaced, so it
// is
// a RefType not an ArrayType!
RefType refType = (RefType) aref.getBase().getType();
// check if its an array write or read.
if (u instanceof DefinitionStmt && ((DefinitionStmt) u).getLeftOp() instanceof ArrayRef) {
// replace the a[i]=x by a.set(i,x);
// bit of a hack...
if (! refType.getSootClass().declaresMethodByName(arraySetName))
refType = objArRepl;
SootMethod am = refType.getSootClass().getMethodByName(arraySetName);
Stmt ivk = Jimple.v().newInvokeStmt(Jimple.v().newVirtualInvokeExpr((Local) aref.getBase(),
am.makeRef(),
Arrays.asList(new Value[] { aref.getIndex(), ((DefinitionStmt) u).getRightOp() })));
ivk.addAllTagsOf(u);
// replace u by ivk
body.getUnits().insertAfter(ivk, u);
body.getUnits().remove(u);
} else {
// bit of a hack...
if (! refType.getSootClass().declaresMethodByName(arrayGetName))
refType = objArRepl;
SootMethod am = refType.getSootClass().getMethodByName(arrayGetName);
// replace the x = a[i] by x = a.get(i)
Value ivk = Jimple.v().newVirtualInvokeExpr((Local) aref.getBase(), am.makeRef(),
aref.getIndex());
((Stmt) u).getArrayRefBox().setValue(ivk);
}
}
if (u instanceof DefinitionStmt && ((DefinitionStmt) u).getRightOp() instanceof NewArrayExpr) {
NewArrayExpr na = (NewArrayExpr) ((DefinitionStmt) u).getRightOp();
// replace the NewArrayExpr by a NewExpr of
// appropriate type.
RefType refType = getArrayReplacementType((ArrayType) na.getType());
((DefinitionStmt) u).getRightOpBox().setValue(Jimple.v().newNewExpr(refType));
// now add a constructor call where we pass the size of the
// array.
SootClass arrClass = refType.getSootClass();
SootMethod constructor = arrClass.getMethod(SootMethod.constructorName,
Arrays.asList(new Type[] { IntType.v() }));
Local lhs = (Local) ((DefinitionStmt) u).getLeftOp();
Stmt ccall = Jimple.v()
.newInvokeStmt(Jimple.v().newSpecialInvokeExpr(lhs, constructor.makeRef(), na.getSize()));
ccall.addAllTagsOf(u);
body.getUnits().insertAfter(ccall, u);
} else if (u instanceof DefinitionStmt
&& ((DefinitionStmt) u).getRightOp() instanceof NewMultiArrayExpr) {
NewMultiArrayExpr na = (NewMultiArrayExpr) ((DefinitionStmt) u).getRightOp();
RefType refType = getArrayReplacementType((ArrayType) na.getType());
((DefinitionStmt) u).getRightOpBox().setValue(Jimple.v().newNewExpr(refType));
SootClass arrClass = refType.getSootClass();
List<Type> paramTypes = new ArrayList<Type>(
Collections.nCopies(((ArrayType) na.getType()).numDimensions, IntType.v()));
SootMethod constructor = arrClass.getMethod(SootMethod.constructorName, paramTypes);
List<Value> args = new LinkedList<Value>(na.getSizes());
while (args.size() < paramTypes.size()) {
args.add(IntConstant.v(0));
}
Local lhs = (Local) ((DefinitionStmt) u).getLeftOp();
Stmt ccall = Jimple.v()
.newInvokeStmt(Jimple.v().newSpecialInvokeExpr(lhs, constructor.makeRef(), args));
ccall.addAllTagsOf(u);
body.getUnits().insertAfter(ccall, u);
} else if (u instanceof DefinitionStmt && ((DefinitionStmt) u).getRightOp() instanceof LengthExpr) {
LengthExpr le = (LengthExpr) ((DefinitionStmt) u).getRightOp();
SootClass arrayClass = ((RefType) le.getOp().getType()).getSootClass();
// bit of a hack...
if (! arrayClass.declaresMethodByName(arraySetName))
arrayClass = objArRepl.getSootClass();
SootField lenField = arrayClass.getFieldByName(SootTranslationHelpers.lengthFieldName);
Value fieldRef = Jimple.v().newInstanceFieldRef(le.getOp(),
lenField.makeRef());
((DefinitionStmt) u).getRightOpBox().setValue(fieldRef);
} else if (u instanceof DefinitionStmt && ((DefinitionStmt) u).getRightOp() instanceof InstanceOfExpr) {
InstanceOfExpr ioe = (InstanceOfExpr) ((DefinitionStmt) u).getRightOp();
if (ioe.getCheckType() instanceof ArrayType) {
ioe.setCheckType(getArrayReplacementType((ArrayType) ioe.getCheckType()));
}
} else if (u instanceof DefinitionStmt && ((DefinitionStmt) u).getRightOp() instanceof CastExpr) {
CastExpr ce = (CastExpr) ((DefinitionStmt) u).getRightOp();
if (ce.getCastType() instanceof ArrayType) {
ce.setCastType(getArrayReplacementType((ArrayType) ce.getCastType()));
}
} else if (u instanceof DefinitionStmt && ((DefinitionStmt) u).getRightOp() instanceof ClassConstant) {
ClassConstant cc = (ClassConstant) ((DefinitionStmt) u).getRightOp();
if (cc.getValue().contains("[")) {
// TODO, here we have to parse the cc name and
// find the corresponding array class.
throw new RuntimeException("Not implemented: " + cc);
}
}
}
/*
* Changing the types of array fields
*/
for (SootClass sc : classes) {
if (sc.resolvingLevel() < SootClass.SIGNATURES) {
continue;
}
for (SootField f : new LinkedList<SootField>(sc.getFields())) {
if (f.getType() instanceof ArrayType) {
sc.removeField(f);
}
}
}
try {
body.validate();
} catch (soot.validation.ValidationException e) {
throw e;
} catch (RuntimeException e) {
throw e;
}
}
}
protected Type arrayTypeToRefType(Type t) {
if (t instanceof ArrayType) {
return getArrayReplacementType((ArrayType) t);
}
return t;
}
protected RefType getArrayReplacementType(ArrayType t) {
Type base = arrayTypeToRefType(t.getElementType());
if (!this.arrayTypeMap.containsKey(base)) {
SootClass arrClass = createArrayClass(base, t.numDimensions);
this.arrayTypeMap.put(base, RefType.v(arrClass));
}
return this.arrayTypeMap.get(base);
}
protected SootClass createArrayClass(Type elementType, int numDimensions) {
SootClass arrayClass = new SootClass(
arrayTypeName + "_" + elementType.toString().replace(".", "_").replace("$", "D"),
Modifier.PUBLIC | Modifier.FINAL);
// set the superclass to object for now (there will be a pass to fix this later
SootClass objCls = Scene.v().getSootClass("java.lang.Object");
Type objType = objCls.getType();
if (elementType instanceof RefType
&& !elementType.equals(objType)
&& !elementType.toString().contains("java_lang_Object")
) {
// super class is JayArray_java_lang_Object
SootClass objReplCls = this.arrayTypeMap.get(objType).getSootClass();
arrayClass.setSuperclass(objReplCls);
// Build constructor
List<Type> argTypes = new ArrayList<Type>(Collections.nCopies(numDimensions, IntType.v()));
SootMethod constructor = new SootMethod(SootMethod.constructorName, argTypes, VoidType.v(),
Modifier.PUBLIC);
// add the constructor to the class.
arrayClass.addMethod(constructor);
JimpleBody body = Jimple.v().newBody(constructor);
// add a local for the first param
body.insertIdentityStmts();
// Add call to superclass constructor
ArrayType objAr = ArrayType.v(Scene.v().getSootClass("java.lang.Object").getType(), numDimensions);
SootClass objArrayClass = getArrayReplacementType(objAr).getSootClass();
SootMethod superConstructor = objArrayClass.getMethod(SootMethod.constructorName, argTypes);
body.getUnits().add(Jimple.v().newInvokeStmt(
Jimple.v().newSpecialInvokeExpr(body.getThisLocal(),
superConstructor.makeRef(),
body.getParameterLocals())));
Stmt retStmt = Jimple.v().newReturnVoidStmt();
// set the element type
RefType objArReplacement = getArrayReplacementType(objAr);
SootField elemTypeField = objArReplacement.getSootClass().getFieldByName(
SootTranslationHelpers.arrayElementTypeFieldName);
String elementTypeName = ((RefType) elementType).getSootClass().getJavaStyleName();
elementTypeName = elementTypeName.replace('.', '/');
body.getUnits().add(Jimple.v().newAssignStmt(
Jimple.v().newInstanceFieldRef(body.getThisLocal(), elemTypeField.makeRef()),
ClassConstant.v(elementTypeName)));
body.getUnits().add(retStmt);
body.validate();
constructor.setActiveBody(body);
} else {
// super class is java.lang.Object
arrayClass.setSuperclass(objCls);
// add a field for array.length
SootField lengthField = new SootField(SootTranslationHelpers.lengthFieldName, IntType.v(),
Modifier.PUBLIC | Modifier.FINAL);
arrayClass.addField(lengthField);
// type of the array elements (e.g., float for float[])
SootField elemTypeField = new SootField(SootTranslationHelpers.arrayElementTypeFieldName,
RefType.v(Scene.v().getSootClass("java.lang.Class")), Modifier.PUBLIC | Modifier.FINAL);
arrayClass.addField(elemTypeField);
// number of exactly modeled elements
int num_exact = soottocfg.Options.v().exactArrayElements();
if (num_exact < 0)
num_exact = 0;
/**
* New array model stuff
*/
if (numDimensions <= 1 && elementType instanceof RefType)
elementType = Scene.v().getSootClass("java.lang.Object").getType();
SootField elemField = new SootField(AElem, elementType);
if (soottocfg.Options.v().arrayInv()) {
arrayClass.addField(elemField);
}
// create one field for the first N elements of the array which we
// model precisely:
SootField[] arrFields = new SootField[num_exact];
for (int i = 0; i < num_exact; i++) {
arrFields[i] = new SootField(ArrayTransformer.arrayElementPrefix + "_" + i, elementType, Modifier.PUBLIC);
arrayClass.addField(arrFields[i]);
}
/**
* GET METHOD
*/
SootMethod getElement = new SootMethod(arrayGetName, Arrays.asList(new Type[] { IntType.v() }), elementType,
Modifier.PUBLIC);
arrayClass.addMethod(getElement);
JimpleBody body = Jimple.v().newBody(getElement);
body.insertIdentityStmts();
Local retLocal = Jimple.v().newLocal("retVal", elementType);
body.getLocals().add(retLocal);
List<Unit> retStmts = new LinkedList<Unit>();
for (int i = 0; i < num_exact; i++) {
Unit ret = Jimple.v().newAssignStmt(retLocal,
Jimple.v().newInstanceFieldRef(body.getThisLocal(), arrFields[i].makeRef()));
retStmts.add(ret);
retStmts.add(Jimple.v().newReturnStmt(retLocal));
Value cond = Jimple.v().newEqExpr(body.getParameterLocal(0), IntConstant.v(i));
body.getUnits().add(Jimple.v().newIfStmt(cond, ret));
}
/**
* New array model stuff
*/
if (soottocfg.Options.v().arrayInv()) {
// ret = element; return ret;
Unit ret = Jimple.v().newAssignStmt(retLocal,
Jimple.v().newInstanceFieldRef(body.getThisLocal(), elemField.makeRef()));
body.getUnits().add(ret);
body.getUnits().add(Jimple.v().newReturnStmt(retLocal));
} else {
// if none of the modeled fields was requested, add return havoc as
// fall
// through case.
// ret = havoc; return ret;
// body.getUnits().add(Jimple.v().newAssignStmt(retLocal,
// Jimple.v().newStaticInvokeExpr(SootTranslationHelpers.v().getHavocMethod(elementType).makeRef())));
// body.getUnits().add(Jimple.v().newReturnStmt(retLocal));
throw new RuntimeException("Let's stick to the new Array model");
}
// now add all the return statements
body.getUnits().addAll(retStmts);
body.validate();
getElement.setActiveBody(body);
/**
* SET METHOD
*/
SootMethod setElement = new SootMethod(arraySetName, Arrays.asList(new Type[] { IntType.v(), elementType }),
VoidType.v(), Modifier.PUBLIC);
arrayClass.addMethod(setElement);
body = Jimple.v().newBody(setElement);
body.insertIdentityStmts();
List<Unit> updates = new LinkedList<Unit>();
for (int i = 0; i < num_exact; i++) {
Unit asn = Jimple.v().newAssignStmt(
Jimple.v().newInstanceFieldRef(body.getThisLocal(), arrFields[i].makeRef()),
body.getParameterLocal(1));
updates.add(asn);
updates.add(Jimple.v().newReturnVoidStmt());
Value cond = Jimple.v().newEqExpr(body.getParameterLocal(0), IntConstant.v(i));
body.getUnits().add(Jimple.v().newIfStmt(cond, asn));
}
/**
* New array model stuff
*/
if (soottocfg.Options.v().arrayInv()) {
Unit asn = Jimple.v().newAssignStmt(
Jimple.v().newInstanceFieldRef(body.getThisLocal(), elemField.makeRef()),
body.getParameterLocal(1));
body.getUnits().add(asn);
}
body.getUnits().add(Jimple.v().newReturnVoidStmt());
body.getUnits().addAll(updates);
body.validate();
setElement.setActiveBody(body);
/**
* CONSTRUCTOR
*/
/*
* Assume you have an:
* int[][][] arr;
* you can initialize that with
* arr = new int[1][][];
* arr = new int[1][2][];
* or
* arr = new int[1][2][3];
* so you need to create one constructor for each dimension ...
* a bit annoying, but sound.
*
*/
if (numDimensions > 1 && !elementType.toString().contains("_java_"))
{
System.err.println("[WARNING] Multi-dimensional arrays not supported. Result will be unsound.");
}
// Now create constructors that takes the array size as input
// For int[][][] we have to create 3 constructors since one
// could create new int[1][][], new int[1][2][], or new int[1][2][3]
// first, add the signatures for all constructors to the class.
// this is necessary, because the constructors call each other.
SootMethod[] constructors = new SootMethod[numDimensions];
for (int i = 0; i < numDimensions; i++) {
List<Type> argTypes = new ArrayList<Type>(Collections.nCopies(i + 1, IntType.v()));
SootMethod constructor = new SootMethod(SootMethod.constructorName, argTypes, VoidType.v(),
Modifier.PUBLIC);
// add the constructor to the class.
arrayClass.addMethod(constructor);
constructors[i] = constructor;
}
/*
* Now create the bodies for all constructors. Note that this
* loop starts from 1 not from 0.
* For simple arrays, we initialize all elements to default values
* (i.e., zero or null). For multi arrays, we may have to initialize
* the elements to new arrays. E.g.,
* for new int[1][2] we create a constructor call
* <init>(1, 2) which contains one element of type int[] and
* we have to initialize this to a new array int[2] instead of null.
*/
for (int i = 1; i <= numDimensions; i++) {
SootMethod constructor = constructors[i - 1];
body = Jimple.v().newBody(constructor);
// add a local for the first param
body.insertIdentityStmts();
Local newElement = Jimple.v().newLocal("elem", elementType);
body.getLocals().add(newElement);
Stmt retStmt = Jimple.v().newReturnVoidStmt();
// create the assignment for the length field, so we can jump back to it
Stmt lengthSet = Jimple.v().newAssignStmt(
Jimple.v().newInstanceFieldRef(body.getThisLocal(), lengthField.makeRef()),
body.getParameterLocal(0));
// set the length field.
body.getUnits().add(lengthSet);
// set the element type
String elementTypeName = elementType.toString();
if (elementType instanceof RefType) {
elementTypeName = ((RefType) elementType).getSootClass().getJavaStyleName();
}
elementTypeName = elementTypeName.replace('.', '/');
body.getUnits().add(Jimple.v().newAssignStmt(
Jimple.v().newInstanceFieldRef(body.getThisLocal(), elemTypeField.makeRef()),
ClassConstant.v(elementTypeName)));
// set element to default value
Unit def = Jimple.v().newAssignStmt(
Jimple.v().newInstanceFieldRef(body.getThisLocal(), elemField.makeRef()),
SootTranslationHelpers.v().getDefaultValue(elemField.getType()));
body.getUnits().add(def);
// initialize inner arrays if multi-dimensional
if (i > 1) {
initializeMultiArrayVars(elementType, elemField, newElement,
body, setElement, constructor, retStmt);
}
// pull element if multi-dim or set to default value else
if (i > 1) {
// TODO exactly modeled fields
// for new array model
Unit asn1 = Jimple.v().newAssignStmt(
newElement,
Jimple.v().newInstanceFieldRef(body.getThisLocal(), elemField.makeRef()));
body.getUnits().add(asn1);
// Unit asn = Jimple.v().newAssignStmt(
// Jimple.v().newInstanceFieldRef(body.getThisLocal(), elemField.makeRef()),
// newElement);
// body.getUnits().add(asn);
} else {
// for exactly modeled fields
for (int j = 0; j < num_exact; j++) {
Unit asn = Jimple.v().newAssignStmt(
Jimple.v().newInstanceFieldRef(body.getThisLocal(), arrFields[j].makeRef()),
SootTranslationHelpers.v().getDefaultValue(arrFields[j].getType()));
body.getUnits().add(asn);
}
// for new array model
if (soottocfg.Options.v().arrayInv()) {
Unit asn = Jimple.v().newAssignStmt(
Jimple.v().newInstanceFieldRef(body.getThisLocal(), elemField.makeRef()),
SootTranslationHelpers.v().getDefaultValue(elemField.getType()));
body.getUnits().add(asn);
}
}
body.getUnits().add(retStmt);
body.validate();
constructor.setActiveBody(body);
}
}
// add the new class to the scene
Scene.v().addClass(arrayClass);
arrayClass.setResolvingLevel(SootClass.BODIES);
arrayClass.setApplicationClass();
return arrayClass;
}
private void initializeMultiArrayVars(Type elementType, SootField elemField, Local newElement,
JimpleBody body, SootMethod setElement,
SootMethod constructor, Stmt returnStmt) {
/*
* Create n objects of the next smaller dimension of
* appropriate size.
*
* ctr = 0;
* LoopHead: if (ctr>=param) goto exit;
*
* this.set(
*/
Local counter = Jimple.v().newLocal("ctr", IntType.v());
body.getLocals().add(counter);
body.getUnits().add(Jimple.v().newAssignStmt(counter, IntConstant.v(0)));
Stmt loopHead = Jimple.v().newIfStmt(Jimple.v().newGeExpr(counter, body.getParameterLocal(0)),
returnStmt);
body.getUnits().add(loopHead);
RefType elRefType = (RefType) elementType;
//create a new object
body.getUnits().add(Jimple.v().newAssignStmt(newElement, Jimple.v().newNewExpr(elRefType)));
//the elements have one dimension less than the current one.
int elParamCount = constructor.getParameterCount()-1;
//call the constructor
List<Type> parameterTypes = new ArrayList<Type>(Collections.nCopies(elParamCount, IntType.v()));
SootMethod elConstructor = elRefType.getSootClass().getMethod(SootMethod.constructorName, parameterTypes, VoidType.v());
List<Value> elConstructorArgs = new LinkedList<Value>();
for (int k=1; k<constructor.getParameterCount();k++) {
elConstructorArgs.add(body.getParameterLocal(k));
}
body.getUnits().add(
Jimple.v().newInvokeStmt(
Jimple.v().newSpecialInvokeExpr(newElement, elConstructor.makeRef(), elConstructorArgs)
));
//update the current field to that new object.
List<Value> args = new LinkedList<Value>();
args.add(counter);
args.add(newElement);
body.getUnits().add(Jimple.v().newInvokeStmt(Jimple.v().newVirtualInvokeExpr(body.getThisLocal(), setElement.makeRef(), args)));
body.getUnits().add(Jimple.v().newAssignStmt(counter, Jimple.v().newAddExpr(counter, IntConstant.v(1))));
body.getUnits().add(Jimple.v().newGotoStmt(loopHead));
}
// private void fixSubtypingRelation() {
// for (Map.Entry<Type, RefType> t : this.arrayTypeMap.entrySet()) {
// for (int numDimensions : this.arrayDimensionMap.get(t.getValue())) {
//
// Type elementType = t.getKey();
// SootClass arrayClass = t.getValue().getSootClass();
//
// // if elements are of ref type T extends S, set super class to S[]
// if (elementType instanceof RefType) {
// SootClass superCls = getRefArraySuperClass((RefType) elementType, numDimensions);
//// System.out.println("Superclass of " + arrayClass.getName() + " set to " + superCls.getName());
// arrayClass.setSuperclass(superCls);
// }
// }
// }
// }
//
// private SootClass getRefArraySuperClass(RefType elType, int numDimensions) {
// SootClass obj = Scene.v().getSootClass("java.lang.Object");
// RefType objType = obj.getType();
//
// // try to find array superclass
// // e.g. see if there is a jayhorn equivalent of Number[] for Integer[]
// SootClass cur = elType.getSootClass();
// while (!cur.equals(obj) && cur.resolvingLevel() >= SootClass.HIERARCHY) {
// ArrayType at = ArrayType.v(cur.getType(), numDimensions);
// if (this.arrayTypeMap.containsKey(at)) {
// return this.arrayTypeMap.get(at).getSootClass();
// }
// cur = cur.getSuperclass();
// }
//
// // else return Object[] jayarray if it exists
// if (!elType.equals(objType) && this.arrayTypeMap.containsKey(objType)) {
// return this.arrayTypeMap.get(objType).getSootClass();
// }
//
// // else superclass is java.lang.Object
// return obj;
// }
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.update;
import org.apache.lucene.index.AtomicReader;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.DocsEnum;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.FilterAtomicReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.OpenBitSet;
import org.apache.solr.common.cloud.CompositeIdRouter;
import org.apache.solr.common.cloud.DocRouter;
import org.apache.solr.common.cloud.HashBasedRouter;
import org.apache.solr.core.SolrCore;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.SolrIndexSearcher;
import org.apache.solr.util.RefCounted;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class SolrIndexSplitter {
public static Logger log = LoggerFactory.getLogger(SolrIndexSplitter.class);
SolrIndexSearcher searcher;
SchemaField field;
List<DocRouter.Range> ranges;
DocRouter.Range[] rangesArr; // same as ranges list, but an array for extra speed in inner loops
List<String> paths;
List<SolrCore> cores;
DocRouter router;
HashBasedRouter hashRouter;
int numPieces;
int currPartition = 0;
String routeFieldName;
String splitKey;
public SolrIndexSplitter(SplitIndexCommand cmd) {
searcher = cmd.getReq().getSearcher();
ranges = cmd.ranges;
paths = cmd.paths;
cores = cmd.cores;
router = cmd.router;
hashRouter = router instanceof HashBasedRouter ? (HashBasedRouter)router : null;
if (ranges == null) {
numPieces = paths != null ? paths.size() : cores.size();
} else {
numPieces = ranges.size();
rangesArr = ranges.toArray(new DocRouter.Range[ranges.size()]);
}
routeFieldName = cmd.routeFieldName;
if (routeFieldName == null) {
field = searcher.getSchema().getUniqueKeyField();
} else {
field = searcher.getSchema().getField(routeFieldName);
}
if (cmd.splitKey != null) {
splitKey = getRouteKey(cmd.splitKey);
}
}
public void split() throws IOException {
List<AtomicReaderContext> leaves = searcher.getTopReaderContext().leaves();
List<OpenBitSet[]> segmentDocSets = new ArrayList<OpenBitSet[]>(leaves.size());
log.info("SolrIndexSplitter: partitions=" + numPieces + " segments="+leaves.size());
for (AtomicReaderContext readerContext : leaves) {
assert readerContext.ordInParent == segmentDocSets.size(); // make sure we're going in order
OpenBitSet[] docSets = split(readerContext);
segmentDocSets.add( docSets );
}
// would it be more efficient to write segment-at-a-time to each new index?
// - need to worry about number of open descriptors
// - need to worry about if IW.addIndexes does a sync or not...
// - would be more efficient on the read side, but prob less efficient merging
IndexReader[] subReaders = new IndexReader[leaves.size()];
for (int partitionNumber=0; partitionNumber<numPieces; partitionNumber++) {
log.info("SolrIndexSplitter: partition #" + partitionNumber + (ranges != null ? " range=" + ranges.get(partitionNumber) : ""));
for (int segmentNumber = 0; segmentNumber<subReaders.length; segmentNumber++) {
subReaders[segmentNumber] = new LiveDocsReader( leaves.get(segmentNumber), segmentDocSets.get(segmentNumber)[partitionNumber] );
}
boolean success = false;
RefCounted<IndexWriter> iwRef = null;
IndexWriter iw = null;
if (cores != null) {
SolrCore subCore = cores.get(partitionNumber);
iwRef = subCore.getUpdateHandler().getSolrCoreState().getIndexWriter(subCore);
iw = iwRef.get();
} else {
SolrCore core = searcher.getCore();
String path = paths.get(partitionNumber);
iw = SolrIndexWriter.create("SplittingIndexWriter"+partitionNumber + (ranges != null ? " " + ranges.get(partitionNumber) : ""), path,
core.getDirectoryFactory(), true, core.getLatestSchema(),
core.getSolrConfig().indexConfig, core.getDeletionPolicy(), core.getCodec());
}
try {
// This merges the subreaders and will thus remove deletions (i.e. no optimize needed)
iw.addIndexes(subReaders);
success = true;
} finally {
if (iwRef != null) {
iwRef.decref();
} else {
if (success) {
IOUtils.close(iw);
} else {
IOUtils.closeWhileHandlingException(iw);
}
}
}
}
}
OpenBitSet[] split(AtomicReaderContext readerContext) throws IOException {
AtomicReader reader = readerContext.reader();
OpenBitSet[] docSets = new OpenBitSet[numPieces];
for (int i=0; i<docSets.length; i++) {
docSets[i] = new OpenBitSet(reader.maxDoc());
}
Bits liveDocs = reader.getLiveDocs();
Fields fields = reader.fields();
Terms terms = fields==null ? null : fields.terms(field.getName());
TermsEnum termsEnum = terms==null ? null : terms.iterator(null);
if (termsEnum == null) return docSets;
BytesRef term = null;
DocsEnum docsEnum = null;
CharsRef idRef = new CharsRef(100);
for (;;) {
term = termsEnum.next();
if (term == null) break;
// figure out the hash for the term
// FUTURE: if conversion to strings costs too much, we could
// specialize and use the hash function that can work over bytes.
idRef = field.getType().indexedToReadable(term, idRef);
String idString = idRef.toString();
if (splitKey != null) {
// todo have composite routers support these kind of things instead
String part1 = getRouteKey(idString);
if (part1 == null)
continue;
if (!splitKey.equals(part1)) {
continue;
}
}
int hash = 0;
if (hashRouter != null) {
hash = hashRouter.sliceHash(idString, null, null, null);
}
docsEnum = termsEnum.docs(liveDocs, docsEnum, DocsEnum.FLAG_NONE);
for (;;) {
int doc = docsEnum.nextDoc();
if (doc == DocsEnum.NO_MORE_DOCS) break;
if (ranges == null) {
docSets[currPartition].fastSet(doc);
currPartition = (currPartition + 1) % numPieces;
} else {
for (int i=0; i<rangesArr.length; i++) { // inner-loop: use array here for extra speed.
if (rangesArr[i].includes(hash)) {
docSets[i].fastSet(doc);
}
}
}
}
}
return docSets;
}
private String getRouteKey(String idString) {
int idx = idString.indexOf(CompositeIdRouter.separator);
if (idx <= 0) return null;
String part1 = idString.substring(0, idx);
int commaIdx = part1.indexOf(CompositeIdRouter.bitsSeparator);
if (commaIdx > 0) {
if (commaIdx + 1 < part1.length()) {
char ch = part1.charAt(commaIdx + 1);
if (ch >= '0' && ch <= '9') {
part1 = part1.substring(0, commaIdx);
}
}
}
return part1;
}
// change livedocs on the reader to delete those docs we don't want
static class LiveDocsReader extends FilterAtomicReader {
final OpenBitSet liveDocs;
final int numDocs;
public LiveDocsReader(AtomicReaderContext context, OpenBitSet liveDocs) throws IOException {
super(context.reader());
this.liveDocs = liveDocs;
this.numDocs = (int)liveDocs.cardinality();
}
@Override
public int numDocs() {
return numDocs;
}
@Override
public Bits getLiveDocs() {
return liveDocs;
}
}
}
| |
package org.newdawn.swingle.slicktest;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JDesktopPane;
import javax.swing.JInternalFrame;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTabbedPane;
import javax.swing.JTextArea;
import org.lwjgl.opengl.GL11;
import org.newdawn.slick.AppGameContainer;
import org.newdawn.slick.BasicGame;
import org.newdawn.slick.Color;
import org.newdawn.slick.GameContainer;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.Image;
import org.newdawn.slick.Input;
import org.newdawn.slick.SlickException;
import org.newdawn.slick.opengl.Texture;
import org.newdawn.slick.util.Log;
import org.newdawn.swingle.HeadlessFrame;
/**
* A slick based test for the Swing rendering to OpenGL stuff
*
* @author bgjlib
* @author kevin
*/
public class TestFrame extends BasicGame {
/** The frame used to render swing */
private HeadlessFrame frame;
/** The Slick image rendered into - used to get a texture */
private Image image;
/** The number of open frames */
private int openFrameCount = 0;
/** The xoffset of the next frame to be displayed */
private int xOffset = 30;
/** The yoffset of the next frame to be displayed */
private int yOffset = 30;
/** The angle of the background rotation */
private float ang;
/** The dir of fade */
private float dir = 1;
/** The current fade in and out */
private float alpha = 0;
/**
* Create a new test frame
*/
public TestFrame() {
super("Swingle Test Frame");
}
/**
* @see org.newdawn.slick.BasicGame#init(org.newdawn.slick.GameContainer)
*/
public void init(GameContainer container) throws SlickException {
container.setShowFPS(false);
container.getGraphics().setBackground(new Color(0.5f,0.7f,0.9f,1f));
frame = new HeadlessFrame(800,600);
image = new Image(800,600);
JMenuBar bar = new JMenuBar();
JMenu file = new JMenu("File");
file.add(new JMenuItem("Open"));
file.add(new JMenuItem("Close"));
JMenuItem quit = new JMenuItem("Quit");
quit.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
System.exit(0);
}
});
file.add(quit);
bar.add(file);
frame.setJMenuBar(bar);
JDesktopPane desktop = new JDesktopPane();
desktop.setOpaque(false);
desktop.setBackground(new java.awt.Color(0,0,0,0f));
desktop.setPreferredSize(new Dimension(800, 600));
createFrame(desktop);
createFrame(desktop);
createFrame(desktop);
frame.setContentPane(desktop);
frame.pack();
frame.setSize(800, 600);
}
/**
* Create an internal frame
*
* @param desktop The desktop the frame will belong to
*/
private void createFrame(JDesktopPane desktop)
{
ExampleFrame iframe = new ExampleFrame();
iframe.setVisible(true);
desktop.add(iframe);
}
/**
* @see org.newdawn.slick.BasicGame#update(org.newdawn.slick.GameContainer, int)
*/
public void update(GameContainer container, int delta) throws SlickException {
ang += delta * 0.1f;
alpha += delta * (dir * 0.002f);
alpha = Math.min(alpha,1);
alpha = Math.max(alpha,0);
if (container.getInput().isKeyPressed(Input.KEY_F1)) {
dir = -dir;
}
frame.setInputEnabled(alpha == 1);
}
/**
* @see org.newdawn.slick.Game#render(org.newdawn.slick.GameContainer, org.newdawn.slick.Graphics)
*/
public void render(GameContainer container, Graphics g) throws SlickException {
g.translate(400, 300);
g.rotate(0,0,ang);
for (int a=0;a<360;a+=40) {
g.rotate(0,0,40);
g.setColor(Color.red);
g.fillRect(-40,-250,80,20);
}
g.resetTransform();
frame.updateTexture(image.getTexture().getTextureID());
if (alpha != 0) {
// alpha test makes things slightly faster in this case
GL11.glAlphaFunc(GL11.GL_NOTEQUAL, 0);
GL11.glEnable(GL11.GL_ALPHA_TEST);
image.draw(0,0,new Color(1,1,1f,alpha));
GL11.glDisable(GL11.GL_ALPHA_TEST);
}
Texture.bindNone();
g.setColor(Color.black);
g.drawString("FPS: "+container.getFPS(), 10, 200);
g.drawString("F1 - Toggle GUI", 10, 220);
}
public static void main(String[] argv) {
try {
AppGameContainer container = new AppGameContainer(new TestFrame(), 800, 600, false);
container.start();
} catch (Exception e) {
Log.error(e);
}
}
/**
* A test frame to be displayed within the desktop
*
* @author bjglib
*/
private class ExampleFrame extends JInternalFrame {
/**
* Create a new test frame
*/
public ExampleFrame() {
super("Document #" + (++openFrameCount), true, true, true, true);
setSize(300,300);
setLocation(xOffset*openFrameCount, yOffset*openFrameCount);
JPanel p = new JPanel( new BorderLayout() );
p.setBackground(java.awt.Color.red);
JPanel tmp = new JPanel();
JButton b = new JButton("Start Game");
b.setFocusable(false);
b.setFont(b.getFont().deriveFont(32.0f));
b.addActionListener( new ActionListener()
{
public void actionPerformed(ActionEvent e)
{
System.out.println( "test button on internal frame " + getTitle() );
}
});
tmp.add( b );
p.add( tmp, BorderLayout.SOUTH );
tmp = new JPanel();
JComboBox cb = new JComboBox( new Object[]{ "item 1", "item 2", "item 3" } );
tmp.add( cb );
p.add( tmp, BorderLayout.NORTH );
JTabbedPane tabs = new JTabbedPane();
tabs.addTab( "Test", p );
JTextArea textArea = new JTextArea();
textArea.setEditable(true);
textArea.setFocusable(true);
textArea.append("Some sample text");
tabs.addTab( "Test 2", new JScrollPane(textArea) );
this.add( tabs );
}
}
/**
* @see org.newdawn.slick.BasicGame#mouseMoved(int, int, int, int)
*/
public void mouseMoved(int oldx, int oldy, int newx, int newy) {
super.mouseMoved(oldx, oldy, newx, newy);
frame.mouseMoved(newx, newy);
}
/**
* @see org.newdawn.slick.BasicGame#mousePressed(int, int, int)
*/
public void mousePressed(int button, int x, int y) {
super.mousePressed(button, x, y);
frame.mousePressed(x, y, button);
}
/**
* @see org.newdawn.slick.BasicGame#mouseReleased(int, int, int)
*/
public void mouseReleased(int button, int x, int y) {
super.mouseReleased(button, x, y);
frame.mouseReleased(x, y, button);
}
/**
* @see org.newdawn.slick.BasicGame#keyPressed(int, char)
*/
public void keyPressed(int key, char c) {
super.keyPressed(key, c);
frame.keyPressed(key, c);
}
/**
* @see org.newdawn.slick.BasicGame#keyReleased(int, char)
*/
public void keyReleased(int key, char c) {
super.keyReleased(key, c);
frame.keyReleased(key, c);
}
}
| |
/*
* IzPack - Copyright 2001-2012 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.panels.userinput;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.swing.BorderFactory;
import javax.swing.JComponent;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.border.Border;
import com.izforge.izpack.api.adaptator.IXMLElement;
import com.izforge.izpack.api.data.Panel;
import com.izforge.izpack.api.exception.IzPackException;
import com.izforge.izpack.api.factory.ObjectFactory;
import com.izforge.izpack.api.handler.Prompt;
import com.izforge.izpack.api.resource.Resources;
import com.izforge.izpack.api.rules.Condition;
import com.izforge.izpack.api.rules.RulesEngine;
import com.izforge.izpack.gui.ButtonFactory;
import com.izforge.izpack.gui.TwoColumnLayout;
import com.izforge.izpack.installer.data.GUIInstallData;
import com.izforge.izpack.installer.gui.InstallerFrame;
import com.izforge.izpack.installer.gui.IzPanel;
import com.izforge.izpack.panels.userinput.field.ElementReader;
import com.izforge.izpack.panels.userinput.field.Field;
import com.izforge.izpack.panels.userinput.field.FieldHelper;
import com.izforge.izpack.panels.userinput.field.UserInputPanelSpec;
import com.izforge.izpack.panels.userinput.gui.Component;
import com.izforge.izpack.panels.userinput.gui.GUIField;
import com.izforge.izpack.panels.userinput.gui.GUIFieldFactory;
import com.izforge.izpack.panels.userinput.gui.UpdateListener;
import com.izforge.izpack.panels.userinput.gui.custom.GUICustomField;
import com.izforge.izpack.util.PlatformModelMatcher;
/**
* User input panel.
*
* @author Anthonin Bonnefoy
*/
public class UserInputPanel extends IzPanel
{
private static final String SUMMARY_KEY = "summaryKey";
private static final String TOPBUFFER = "topBuffer";
private static final String RIGID = "rigid";
private static final String DISPLAY_HIDDEN = "displayHidden";
private static final String DISPLAY_HIDDEN_CONDITION = "displayHiddenCondition";
private static final String READONLY = "readonly";
private static final String READONLY_CONDITION = "readonlyCondition";
/**
* The parsed result from reading the XML specification from the file
*/
private IXMLElement spec;
private boolean eventsActivated = false;
private List<GUIField> views = new ArrayList<GUIField>();
private JPanel panel;
private JComponent firstFocusedComponent;
private RulesEngine rules;
/**
* The factory for creating validators.
*/
private final ObjectFactory factory;
/**
* The platform-model matcher.
*/
private final PlatformModelMatcher matcher;
/**
* The prompt.
*/
private final Prompt prompt;
/**
* The delegating prompt. This is used to switch between the above prompt and a no-op prompt when performing
* updates.
*/
private final DelegatingPrompt delegatingPrompt;
private UserInputPanelSpec userInputModel;
/*--------------------------------------------------------------------------*/
// This method can be used to search for layout problems. If this class is
// compiled with this method uncommented, the layout guides will be shown
// on the panel, making it possible to see if all components are placed
// correctly.
/*--------------------------------------------------------------------------*/
// public void paint (Graphics graphics)
// {
// super.paint (graphics);
// layout.showRules ((Graphics2D)graphics, Color.red);
// }
/*--------------------------------------------------------------------------*/
/**
* Constructs an {@code UserInputPanel}.
*
* @param panel the panel meta-data
* @param parent the parent IzPack installer frame
* @param installData the installation data
* @param resources the resources
* @param rules the rules engine
* @param factory factory
* @param matcher the platform-model matcher
* @param prompt the prompt
*/
public UserInputPanel(Panel panel, InstallerFrame parent, GUIInstallData installData, Resources resources,
RulesEngine rules, ObjectFactory factory, final PlatformModelMatcher matcher, Prompt prompt)
{
super(panel, parent, installData, resources);
this.rules = rules;
this.factory = factory;
this.matcher = matcher;
this.prompt = prompt;
this.delegatingPrompt = new DelegatingPrompt(prompt);
this.spec = readSpec();
boolean isDisplayingHidden = false;
try
{
isDisplayingHidden = Boolean.parseBoolean(spec.getAttribute(DISPLAY_HIDDEN));
}
catch (Exception ignore)
{
isDisplayingHidden = false;
}
panel.setDisplayHidden(isDisplayingHidden);
String condition = spec.getAttribute(DISPLAY_HIDDEN_CONDITION);
if (condition != null && !condition.isEmpty())
{
panel.setDisplayHiddenCondition(condition);
}
// Prevent activating on certain global conditions
ElementReader reader = new ElementReader(userInputModel.getConfig());
Condition globalConstraint = reader.getComplexPanelCondition(spec, matcher, installData, rules);
if (globalConstraint != null)
{
rules.addPanelCondition(panel, globalConstraint);
}
boolean readonly = false;
try
{
readonly = Boolean.parseBoolean(spec.getAttribute(READONLY));
}
catch (Exception ignore)
{
readonly = false;
}
panel.setReadonly(readonly);
condition = spec.getAttribute(READONLY_CONDITION);
if (condition != null && !condition.isEmpty())
{
panel.setReadonlyCondition(condition);
}
init();
addScrollPane();
Dimension size = getMaximumSize();
setSize(size.width, size.height);
buildUI();
updateUIElements();
validate();
}
/**
* Indicates whether the panel has been validated or not. The installer won't let the user go
* further through the installation process until the panel is validated.
*
* @return a boolean stating whether the panel has been validated or not.
*/
@Override
public boolean isValidated()
{
return readInput(prompt);
}
/**
* Save visible contents of the this panel into install data.
*/
@Override
public void saveData() { readInput(prompt, true); }
/**
* This method is called when the panel becomes active.
*/
@Override
public void panelActivate()
{
if (spec == null)
{
// TODO: translate
emitError("User input specification could not be found.",
"The specification for the user input panel could not be found. Please contact the packager.");
parent.skipPanel();
}
else
{
buildUI();
updateUIElements();
}
if (firstFocusedComponent != null)
{
setInitialFocus(firstFocusedComponent);
firstFocusedComponent.requestFocusInWindow();
}
}
/**
* Creates an installation record for unattended installations on {@link UserInputPanel},
* created during GUI installations.
*/
@Override
public void createInstallationRecord(IXMLElement rootElement)
{
new UserInputPanelAutomationHelper(views).createInstallationRecord(installData, rootElement);
}
/**
* Initialize the panel.
*/
private void init()
{
eventsActivated = false;
super.removeAll();
views.clear();
setLayout(new BorderLayout());
panel = new JPanel();
if (spec == null)
{
// return if we could not read the spec. further
// processing will only lead to problems. In this
// case we must skip the panel when it gets activated.
return;
}
// clear button mnemonics map
ButtonFactory.clearPanelButtonMnemonics();
// ----------------------------------------------------
// process all field nodes. Each field node is analyzed
// for its type, then an appropriate member function
// is called that will create the correct UI elements.
// ----------------------------------------------------
GUIFieldFactory viewFactory = new GUIFieldFactory(installData, this, parent, delegatingPrompt);
UpdateListener listener = new UpdateListener()
{
@Override
public void updated()
{
updateDialog();
}
};
List<Field> fields = userInputModel.createFields(spec);
for (Field field : fields)
{
GUIField view = viewFactory.create(field, userInputModel, spec);
view.setUpdateListener(listener);
views.add(view);
}
eventsActivated = true;
}
/**
* Set elements to be visible or not depending on field conditions - dynamic update of field visibility.
*/
protected void updateUIElements()
{
boolean updated = false;
firstFocusedComponent = null;
for (GUIField view : views)
{
Field field = view.getField();
if (field.isConditionTrue())
{
view.setDisplayed(true);
if (firstFocusedComponent == null)
{
firstFocusedComponent = view.getFirstFocusableComponent();
}
}
else
{
view.setDisplayed(false);
}
updated |= view.translateStaticText();
updated |= view.updateView();
}
if (updated)
{
super.invalidate();
}
}
/**
* Builds the UI and makes it ready for display.
*/
private void buildUI()
{
Set<String> affectedVariables = new HashSet<String>();
// need to recreate the panel as TwoColumnLayout doesn't correctly support component removal
panel.removeAll();
panel.setLayout(createPanelLayout());
for (GUIField view : views)
{
boolean enabled = false;
boolean addToPanel = false;
Field fieldDefinition = view.getField();
Panel metadata = getMetadata();
boolean required = FieldHelper.isRequired(fieldDefinition, installData, matcher);
if (required && fieldDefinition.isConditionTrue())
{
enabled = !(fieldDefinition.isEffectiveReadonly(
metadata.isReadonly()
|| (metadata.getReadonlyCondition() != null && rules.isConditionTrue(metadata.getReadonlyCondition())),
rules));
addToPanel = true;
view.setDisplayed(true);
}
else if (required
&& (
fieldDefinition.isEffectiveDisplayHidden(
metadata.isDisplayHidden()
|| (metadata.getDisplayHiddenCondition() != null && rules.isConditionTrue(metadata.getDisplayHiddenCondition())),
rules)
)
)
{
enabled = false;
addToPanel = true;
view.setDisplayed(true);
}
else
{
enabled = false;
addToPanel = false;
view.setDisplayed(false);
}
if (addToPanel)
{
for (Component component : view.getComponents())
{
component.setEnabled(enabled);
panel.add(component.getComponent(), component.getConstraints());
String var = view.getVariable();
if (var != null)
{
affectedVariables.add(var);
}
}
}
}
getMetadata().setAffectedVariableNames(affectedVariables);
}
/**
* Reads the input installDataGUI from all UI elements and sets the associated variables.
*
* @param prompt the prompt to display messages
* @param skipValidation set to true when wanting to save field data without validating
* @return {@code true} if the operation is successful, otherwise {@code false}.
*/
private boolean readInput(Prompt prompt, boolean skipValidation)
{
delegatingPrompt.setPrompt(prompt);
for (GUIField view : views)
{
if (view.isDisplayed() && view.getField().isConditionTrue())
{
if (skipValidation)
{
view.updateField(prompt, skipValidation);
}
else if (!view.updateField(prompt))
{
return false;
}
}
}
return true;
}
/**
* Reads the input installDataGUI from all UI elements and sets the associated variables.
*
* @param prompt the prompt to display messages
* @return {@code true} if the operation is successful, otherwise {@code false}.
*/
private boolean readInput(Prompt prompt)
{
return readInput(prompt, false);
}
/**
* Reads the XML specification for the panel layout.
*
* @return the panel specification
* @throws IzPackException for any problems in reading the specification
*/
private IXMLElement readSpec()
{
userInputModel = new UserInputPanelSpec(getResources(), installData, factory, /*rules,*/ matcher);
return userInputModel.getPanelSpec(getMetadata());
}
/**
* Called by fields that allow revalidation.
* No validation is required since we do not progress through the installer.
*/
private void updateDialog()
{
boolean skipValidation = true;
if (this.eventsActivated)
{
this.eventsActivated = false;
readInput(LoggingPrompt.INSTANCE, skipValidation); // read from the input fields, but don't display a prompt for errors
updateUIElements();
buildUI();
revalidate();
repaint();
this.eventsActivated = true;
}
}
/**
* Creates the panel layout.
*
* @return a new layout
*/
private TwoColumnLayout createPanelLayout()
{
TwoColumnLayout layout;
// ----------------------------------------------------
// Set the topBuffer from the attribute. topBuffer=0 is useful
// if you don't want your panel to be moved up and down during
// dynamic validation (showing and hiding components within the
// same panel)
// Alternativley set the attribute rigid to true and topBuffer will be treated as pixel space
// rather than the percentage of the screen
// ----------------------------------------------------
int topbuff = 25;
boolean rigid = false;
try
{
topbuff = Integer.parseInt(spec.getAttribute(TOPBUFFER));
}
catch (Exception ignore)
{
// do nothing
}
try
{
rigid = Boolean.parseBoolean(spec.getAttribute(RIGID));
}
catch (Exception ignore)
{
// do nothing
}
finally
{
layout = new TwoColumnLayout(10, 5, 30, topbuff, rigid, TwoColumnLayout.LEFT);
}
return layout;
}
/**
* Adds a scroll pane to the panel.
*/
private void addScrollPane()
{
JScrollPane scroller = new JScrollPane(panel);
Border emptyBorder = BorderFactory.createEmptyBorder();
scroller.setBorder(emptyBorder);
scroller.setViewportBorder(emptyBorder);
scroller.getVerticalScrollBar().setBorder(emptyBorder);
scroller.getHorizontalScrollBar().setBorder(emptyBorder);
add(scroller, BorderLayout.CENTER);
}
/**
* @return Caption for the summary panel. Returns null if summaryKey is not specified.
*/
@Override
public String getSummaryCaption()
{
String associatedLabel;
try
{
associatedLabel = spec.getAttribute(SUMMARY_KEY);
}
catch (Exception setToNull)
{
associatedLabel = null;
}
return installData.getMessages().get(associatedLabel);
}
/**
* Summarize all the visible views in the panel.
* @return
*/
@Override
public String getSummaryBody()
{
if (getMetadata().hasCondition() && !rules.isConditionTrue(getMetadata().getCondition()))
{
return null;
}
else
{
StringBuilder entries = new StringBuilder();
for (GUIField view : views)
{
if (view.isDisplayed() && view.getVariable() != null)
{
if (view instanceof GUICustomField)
{
entries.append(getCustomSummary((GUICustomField) view));
}
else
{
entries.append(getViewSummary(view));
}
}
}
return entries.toString();
}
}
/**
* Extract summary information from regular fields
*
* @param view
* @return summary information for a field
*/
private String getViewSummary(GUIField view)
{
String associatedVariable, associatedLabel, key, value;
associatedVariable = view.getVariable();
associatedLabel = view.getSummaryKey();
if (associatedLabel != null)
{
key = installData.getMessages().get(associatedLabel);
value = installData.getVariable(associatedVariable);
return (key + " " + value + "<br>");
}
return "";
}
/**
* Extract summary information from custom fields.
*
* @param customField
* @return summary information for a custom field
*/
private String getCustomSummary(GUICustomField customField)
{
List<String> labels = customField.getLabels();
List<String> variables = customField.getVariables();
int numberOfColumns = labels.size();
int column = 0;
int row = 0;
String tab = "";
String entry = "";
String key = "";
String value = "";
for(String variable : variables)
{
boolean firstColumn = (column % numberOfColumns == 0);
if(!firstColumn)
{
tab = " ";
column++;
}
else
{
tab = "";
column=1; //Reset to first column
}
key = installData.getMessages().get(installData.getMessages().get(labels.get(column-1)));
value = installData.getVariable(variable);
if (key != null)
{
if (firstColumn)
{
row++;
entry += String.format("%1$-3s", row + ". ");
}
entry += String.format(tab + key);
entry += String.format(" " + value);
entry += "<br>";
}
}
return entry;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.jdbc.thin;
import java.sql.BatchUpdateException;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.concurrent.Callable;
import org.apache.ignite.testframework.GridTestUtils;
/**
* Statement test.
*/
public class JdbcThinBatchSelfTest extends JdbcThinAbstractDmlStatementSelfTest {
/** SQL query. */
private static final String SQL_PREPARED = "insert into Person(_key, id, firstName, lastName, age) values " +
"(?, ?, ?, ?, ?)";
/** Statement. */
private Statement stmt;
/** Prepared statement. */
private PreparedStatement pstmt;
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
super.beforeTest();
stmt = conn.createStatement();
pstmt = conn.prepareStatement(SQL_PREPARED);
assertNotNull(stmt);
assertFalse(stmt.isClosed());
assertNotNull(pstmt);
assertFalse(pstmt.isClosed());
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
if (stmt != null && !stmt.isClosed())
stmt.close();
if (pstmt != null && !pstmt.isClosed())
pstmt.close();
assertTrue(pstmt.isClosed());
assertTrue(stmt.isClosed());
super.afterTest();
}
/**
* @throws SQLException If failed.
*/
public void testBatch() throws SQLException {
final int BATCH_SIZE = 10;
for (int idx = 0, i = 0; i < BATCH_SIZE; ++i, idx += i) {
stmt.addBatch("insert into Person (_key, id, firstName, lastName, age) values "
+ generateValues(idx, i + 1));
}
int [] updCnts = stmt.executeBatch();
assertEquals("Invalid update counts size", BATCH_SIZE, updCnts.length);
for (int i = 0; i < BATCH_SIZE; ++i)
assertEquals("Invalid update count",i + 1, updCnts[i]);
}
/**
* @throws SQLException If failed.
*/
public void testBatchOnClosedStatement() throws SQLException {
final Statement stmt2 = conn.createStatement();
final PreparedStatement pstmt2 = conn.prepareStatement("");
stmt2.close();
pstmt2.close();
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
stmt2.addBatch("");
return null;
}
}, SQLException.class, "Statement is closed.");
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
stmt2.clearBatch();
return null;
}
}, SQLException.class, "Statement is closed.");
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
stmt2.executeBatch();
return null;
}
}, SQLException.class, "Statement is closed.");
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
pstmt2.addBatch();
return null;
}
}, SQLException.class, "Statement is closed.");
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
pstmt2.clearBatch();
return null;
}
}, SQLException.class, "Statement is closed.");
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
pstmt2.executeBatch();
return null;
}
}, SQLException.class, "Statement is closed.");
}
/**
* @throws SQLException If failed.
*/
public void testBatchException() throws SQLException {
final int BATCH_SIZE = 7;
for (int idx = 0, i = 0; i < BATCH_SIZE; ++i, idx += i) {
stmt.addBatch("insert into Person (_key, id, firstName, lastName, age) values "
+ generateValues(idx, i + 1));
}
stmt.addBatch("select * from Person");
stmt.addBatch("insert into Person (_key, id, firstName, lastName, age) values "
+ generateValues(100, 1));
try {
stmt.executeBatch();
fail("BatchUpdateException must be thrown");
} catch(BatchUpdateException e) {
int [] updCnts = e.getUpdateCounts();
assertEquals("Invalid update counts size", BATCH_SIZE, updCnts.length);
for (int i = 0; i < BATCH_SIZE; ++i)
assertEquals("Invalid update count",i + 1, updCnts[i]);
if (!e.getMessage().contains("Query produced result set [qry=select * from Person, args=[]]")) {
log.error("Invalid exception: ", e);
fail();
}
}
}
/**
* @throws SQLException If failed.
*/
public void testBatchClear() throws SQLException {
final int BATCH_SIZE = 7;
for (int idx = 0, i = 0; i < BATCH_SIZE; ++i, idx += i) {
stmt.addBatch("insert into Person (_key, id, firstName, lastName, age) values "
+ generateValues(idx, i + 1));
}
stmt.clearBatch();
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
stmt.executeBatch();
return null;
}
}, SQLException.class, "Batch is empty.");
}
/**
* @throws SQLException If failed.
*/
public void testBatchPrepared() throws SQLException {
final int BATCH_SIZE = 10;
for (int i = 0; i < BATCH_SIZE; ++i) {
int paramCnt = 1;
pstmt.setString(paramCnt++, "p" + i);
pstmt.setInt(paramCnt++, i);
pstmt.setString(paramCnt++, "Name" + i);
pstmt.setString(paramCnt++, "Lastname" + i);
pstmt.setInt(paramCnt++, 20 + i);
pstmt.addBatch();
}
int [] updCnts = pstmt.executeBatch();
assertEquals("Invalid update counts size", BATCH_SIZE, updCnts.length);
for (int i = 0; i < BATCH_SIZE; ++i)
assertEquals("Invalid update count",1, updCnts[i]);
}
/**
* @throws SQLException If failed.
*/
public void testBatchExceptionPrepared() throws SQLException {
final int BATCH_SIZE = 7;
for (int i = 0; i < BATCH_SIZE; ++i) {
int paramCnt = 1;
pstmt.setString(paramCnt++, "p" + i);
pstmt.setInt(paramCnt++, i);
pstmt.setString(paramCnt++, "Name" + i);
pstmt.setString(paramCnt++, "Lastname" + i);
pstmt.setInt(paramCnt++, 20 + i);
pstmt.addBatch();
}
int paramCnt = 1;
pstmt.setString(paramCnt++, "p" + 100);
pstmt.setString(paramCnt++, "x");
pstmt.setString(paramCnt++, "Name" + 100);
pstmt.setString(paramCnt++, "Lastname" + 100);
pstmt.setInt(paramCnt++, 20 + 100);
pstmt.addBatch();
try {
pstmt.executeBatch();
fail("BatchUpdateException must be thrown");
} catch(BatchUpdateException e) {
int [] updCnts = e.getUpdateCounts();
assertEquals("Invalid update counts size", BATCH_SIZE, updCnts.length);
for (int i = 0; i < BATCH_SIZE; ++i)
assertEquals("Invalid update count",1, updCnts[i]);
if (!e.getMessage().contains("Failed to execute SQL query.")) {
log.error("Invalid exception: ", e);
fail();
}
}
}
/**
* @throws SQLException If failed.
*/
public void testBatchClearPrepared() throws SQLException {
final int BATCH_SIZE = 10;
for (int i = 0; i < BATCH_SIZE; ++i) {
int paramCnt = 1;
pstmt.setString(paramCnt++, "p" + i);
pstmt.setInt(paramCnt++, i);
pstmt.setString(paramCnt++, "Name" + i);
pstmt.setString(paramCnt++, "Lastname" + i);
pstmt.setInt(paramCnt++, 20 + i);
pstmt.addBatch();
}
pstmt.clearBatch();
GridTestUtils.assertThrows(log, new Callable<Object>() {
@Override public Object call() throws Exception {
pstmt.executeBatch();
return null;
}
}, SQLException.class, "Batch is empty.");
}
/**
* @param beginIndex Begin row index.
* @param cnt Count of rows.
* @return String contains values for 'cnt' rows.
*/
private String generateValues(int beginIndex, int cnt) {
StringBuilder sb = new StringBuilder();
int lastIdx = beginIndex + cnt - 1;
for (int i = beginIndex; i < lastIdx; ++i)
sb.append(valuesRow(i)).append(',');
sb.append(valuesRow(lastIdx));
return sb.toString();
}
/**
* @param idx Index of the row.
* @return String with row values.
*/
private String valuesRow(int idx) {
return String.format("('p%d', %d, 'Name%d', 'Lastname%d', %d)", idx, idx, idx, idx, 20 + idx);
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.identitymanagement.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/iam-2010-05-08/ListAttachedGroupPolicies" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListAttachedGroupPoliciesRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name (friendly name, not ARN) of the group to list attached policies for.
* </p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of
* characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of
* the following characters: _+=,.@-
* </p>
*/
private String groupName;
/**
* <p>
* The path prefix for filtering the results. This parameter is optional. If it is not included, it defaults to a
* slash (/), listing all policies.
* </p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of
* characters consisting of either a forward slash (/) by itself or a string that must begin and end with forward
* slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL character (\u007F),
* including most punctuation characters, digits, and upper and lowercased letters.
* </p>
*/
private String pathPrefix;
/**
* <p>
* Use this parameter only when paginating results and only after you receive a response indicating that the results
* are truncated. Set it to the value of the <code>Marker</code> element in the response that you received to
* indicate where the next call should start.
* </p>
*/
private String marker;
/**
* <p>
* Use this only when paginating results to indicate the maximum number of items you want in the response. If
* additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is
* <code>true</code>.
* </p>
* <p>
* If you do not include this parameter, the number of items defaults to 100. Note that IAM might return fewer
* results, even when there are more results available. In that case, the <code>IsTruncated</code> response element
* returns <code>true</code>, and <code>Marker</code> contains a value to include in the subsequent call that tells
* the service where to continue from.
* </p>
*/
private Integer maxItems;
/**
* <p>
* The name (friendly name, not ARN) of the group to list attached policies for.
* </p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of
* characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of
* the following characters: _+=,.@-
* </p>
*
* @param groupName
* The name (friendly name, not ARN) of the group to list attached policies for.</p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string
* of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also
* include any of the following characters: _+=,.@-
*/
public void setGroupName(String groupName) {
this.groupName = groupName;
}
/**
* <p>
* The name (friendly name, not ARN) of the group to list attached policies for.
* </p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of
* characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of
* the following characters: _+=,.@-
* </p>
*
* @return The name (friendly name, not ARN) of the group to list attached policies for.</p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string
* of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also
* include any of the following characters: _+=,.@-
*/
public String getGroupName() {
return this.groupName;
}
/**
* <p>
* The name (friendly name, not ARN) of the group to list attached policies for.
* </p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of
* characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also include any of
* the following characters: _+=,.@-
* </p>
*
* @param groupName
* The name (friendly name, not ARN) of the group to list attached policies for.</p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string
* of characters consisting of upper and lowercase alphanumeric characters with no spaces. You can also
* include any of the following characters: _+=,.@-
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedGroupPoliciesRequest withGroupName(String groupName) {
setGroupName(groupName);
return this;
}
/**
* <p>
* The path prefix for filtering the results. This parameter is optional. If it is not included, it defaults to a
* slash (/), listing all policies.
* </p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of
* characters consisting of either a forward slash (/) by itself or a string that must begin and end with forward
* slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL character (\u007F),
* including most punctuation characters, digits, and upper and lowercased letters.
* </p>
*
* @param pathPrefix
* The path prefix for filtering the results. This parameter is optional. If it is not included, it defaults
* to a slash (/), listing all policies.</p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string
* of characters consisting of either a forward slash (/) by itself or a string that must begin and end with
* forward slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL
* character (\u007F), including most punctuation characters, digits, and upper and lowercased letters.
*/
public void setPathPrefix(String pathPrefix) {
this.pathPrefix = pathPrefix;
}
/**
* <p>
* The path prefix for filtering the results. This parameter is optional. If it is not included, it defaults to a
* slash (/), listing all policies.
* </p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of
* characters consisting of either a forward slash (/) by itself or a string that must begin and end with forward
* slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL character (\u007F),
* including most punctuation characters, digits, and upper and lowercased letters.
* </p>
*
* @return The path prefix for filtering the results. This parameter is optional. If it is not included, it defaults
* to a slash (/), listing all policies.</p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string
* of characters consisting of either a forward slash (/) by itself or a string that must begin and end with
* forward slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL
* character (\u007F), including most punctuation characters, digits, and upper and lowercased letters.
*/
public String getPathPrefix() {
return this.pathPrefix;
}
/**
* <p>
* The path prefix for filtering the results. This parameter is optional. If it is not included, it defaults to a
* slash (/), listing all policies.
* </p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string of
* characters consisting of either a forward slash (/) by itself or a string that must begin and end with forward
* slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL character (\u007F),
* including most punctuation characters, digits, and upper and lowercased letters.
* </p>
*
* @param pathPrefix
* The path prefix for filtering the results. This parameter is optional. If it is not included, it defaults
* to a slash (/), listing all policies.</p>
* <p>
* This parameter allows (through its <a href="http://wikipedia.org/wiki/regex">regex pattern</a>) a string
* of characters consisting of either a forward slash (/) by itself or a string that must begin and end with
* forward slashes. In addition, it can contain any ASCII character from the ! (\u0021) through the DEL
* character (\u007F), including most punctuation characters, digits, and upper and lowercased letters.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedGroupPoliciesRequest withPathPrefix(String pathPrefix) {
setPathPrefix(pathPrefix);
return this;
}
/**
* <p>
* Use this parameter only when paginating results and only after you receive a response indicating that the results
* are truncated. Set it to the value of the <code>Marker</code> element in the response that you received to
* indicate where the next call should start.
* </p>
*
* @param marker
* Use this parameter only when paginating results and only after you receive a response indicating that the
* results are truncated. Set it to the value of the <code>Marker</code> element in the response that you
* received to indicate where the next call should start.
*/
public void setMarker(String marker) {
this.marker = marker;
}
/**
* <p>
* Use this parameter only when paginating results and only after you receive a response indicating that the results
* are truncated. Set it to the value of the <code>Marker</code> element in the response that you received to
* indicate where the next call should start.
* </p>
*
* @return Use this parameter only when paginating results and only after you receive a response indicating that the
* results are truncated. Set it to the value of the <code>Marker</code> element in the response that you
* received to indicate where the next call should start.
*/
public String getMarker() {
return this.marker;
}
/**
* <p>
* Use this parameter only when paginating results and only after you receive a response indicating that the results
* are truncated. Set it to the value of the <code>Marker</code> element in the response that you received to
* indicate where the next call should start.
* </p>
*
* @param marker
* Use this parameter only when paginating results and only after you receive a response indicating that the
* results are truncated. Set it to the value of the <code>Marker</code> element in the response that you
* received to indicate where the next call should start.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedGroupPoliciesRequest withMarker(String marker) {
setMarker(marker);
return this;
}
/**
* <p>
* Use this only when paginating results to indicate the maximum number of items you want in the response. If
* additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is
* <code>true</code>.
* </p>
* <p>
* If you do not include this parameter, the number of items defaults to 100. Note that IAM might return fewer
* results, even when there are more results available. In that case, the <code>IsTruncated</code> response element
* returns <code>true</code>, and <code>Marker</code> contains a value to include in the subsequent call that tells
* the service where to continue from.
* </p>
*
* @param maxItems
* Use this only when paginating results to indicate the maximum number of items you want in the response. If
* additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is
* <code>true</code>.</p>
* <p>
* If you do not include this parameter, the number of items defaults to 100. Note that IAM might return
* fewer results, even when there are more results available. In that case, the <code>IsTruncated</code>
* response element returns <code>true</code>, and <code>Marker</code> contains a value to include in the
* subsequent call that tells the service where to continue from.
*/
public void setMaxItems(Integer maxItems) {
this.maxItems = maxItems;
}
/**
* <p>
* Use this only when paginating results to indicate the maximum number of items you want in the response. If
* additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is
* <code>true</code>.
* </p>
* <p>
* If you do not include this parameter, the number of items defaults to 100. Note that IAM might return fewer
* results, even when there are more results available. In that case, the <code>IsTruncated</code> response element
* returns <code>true</code>, and <code>Marker</code> contains a value to include in the subsequent call that tells
* the service where to continue from.
* </p>
*
* @return Use this only when paginating results to indicate the maximum number of items you want in the response.
* If additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element
* is <code>true</code>.</p>
* <p>
* If you do not include this parameter, the number of items defaults to 100. Note that IAM might return
* fewer results, even when there are more results available. In that case, the <code>IsTruncated</code>
* response element returns <code>true</code>, and <code>Marker</code> contains a value to include in the
* subsequent call that tells the service where to continue from.
*/
public Integer getMaxItems() {
return this.maxItems;
}
/**
* <p>
* Use this only when paginating results to indicate the maximum number of items you want in the response. If
* additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is
* <code>true</code>.
* </p>
* <p>
* If you do not include this parameter, the number of items defaults to 100. Note that IAM might return fewer
* results, even when there are more results available. In that case, the <code>IsTruncated</code> response element
* returns <code>true</code>, and <code>Marker</code> contains a value to include in the subsequent call that tells
* the service where to continue from.
* </p>
*
* @param maxItems
* Use this only when paginating results to indicate the maximum number of items you want in the response. If
* additional items exist beyond the maximum you specify, the <code>IsTruncated</code> response element is
* <code>true</code>.</p>
* <p>
* If you do not include this parameter, the number of items defaults to 100. Note that IAM might return
* fewer results, even when there are more results available. In that case, the <code>IsTruncated</code>
* response element returns <code>true</code>, and <code>Marker</code> contains a value to include in the
* subsequent call that tells the service where to continue from.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListAttachedGroupPoliciesRequest withMaxItems(Integer maxItems) {
setMaxItems(maxItems);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGroupName() != null)
sb.append("GroupName: ").append(getGroupName()).append(",");
if (getPathPrefix() != null)
sb.append("PathPrefix: ").append(getPathPrefix()).append(",");
if (getMarker() != null)
sb.append("Marker: ").append(getMarker()).append(",");
if (getMaxItems() != null)
sb.append("MaxItems: ").append(getMaxItems());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListAttachedGroupPoliciesRequest == false)
return false;
ListAttachedGroupPoliciesRequest other = (ListAttachedGroupPoliciesRequest) obj;
if (other.getGroupName() == null ^ this.getGroupName() == null)
return false;
if (other.getGroupName() != null && other.getGroupName().equals(this.getGroupName()) == false)
return false;
if (other.getPathPrefix() == null ^ this.getPathPrefix() == null)
return false;
if (other.getPathPrefix() != null && other.getPathPrefix().equals(this.getPathPrefix()) == false)
return false;
if (other.getMarker() == null ^ this.getMarker() == null)
return false;
if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false)
return false;
if (other.getMaxItems() == null ^ this.getMaxItems() == null)
return false;
if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGroupName() == null) ? 0 : getGroupName().hashCode());
hashCode = prime * hashCode + ((getPathPrefix() == null) ? 0 : getPathPrefix().hashCode());
hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode());
hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode());
return hashCode;
}
@Override
public ListAttachedGroupPoliciesRequest clone() {
return (ListAttachedGroupPoliciesRequest) super.clone();
}
}
| |
///////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2002, Eric D. Friedman All Rights Reserved.
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
///////////////////////////////////////////////////////////////////////////////
/*
* Contains changes for GemFireXD distributed data platform.
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*/
package com.gemstone.gnu.trove.decorator;
import com.gemstone.gnu.trove.TFloatLongHashMap;
import com.gemstone.gnu.trove.TFloatLongIterator;
import java.util.AbstractMap;
import java.util.AbstractSet;
import java.util.Collection;
import java.util.Iterator;
//import java.util.Map.Entry;
import java.util.Map;
import java.util.Set;
/**
* Wrapper class to make a TFloatLongHashMap conform to the <tt>java.util.Map</tt> API.
* This class simply decorates an underlying TFloatLongHashMap and translates the Object-based
* APIs into their Trove primitive analogs.
*
* <p>
* Note that wrapping and unwrapping primitive values is extremely inefficient. If
* possible, users of this class should override the appropriate methods in this class
* and use a table of canonical values.
* </p>
*
* Created: Mon Sep 23 22:07:40 PDT 2002
*
* @author Eric D. Friedman
* @version $Id: TFloatLongHashMapDecorator.java,v 1.4 2004/03/18 15:30:34 ericdf Exp $
* @since trove 0.1.8
*/
public class TFloatLongHashMapDecorator extends AbstractMap implements Map, Cloneable {
/** the wrapped primitive map */
protected TFloatLongHashMap _map;
/**
* Creates a wrapper that decorates the specified primitive map.
*/
public TFloatLongHashMapDecorator(TFloatLongHashMap map) {
super();
this._map = map;
}
/**
* Clones the underlying trove collection and returns the clone wrapped in a new
* decorator instance. This is a shallow clone except where primitives are
* concerned.
*
* @return a copy of the receiver
*/
@Override // GemStoneAddition
public Object clone() {
try {
TFloatLongHashMapDecorator copy = (TFloatLongHashMapDecorator) super.clone();
copy._map = (TFloatLongHashMap)_map.clone();
return copy;
} catch (CloneNotSupportedException e) {
// assert(false);
throw new InternalError(); // we are cloneable, so this does not happen
}
}
/**
* Inserts a key/value pair into the map.
*
* @param key an <code>Object</code> value
* @param value an <code>Object</code> value
* @return the previous value associated with <tt>key</tt>,
* or Integer(0) if none was found.
*/
@Override // GemStoneAddition
public Object put(Object key, Object value) {
return wrapValue(_map.put(unwrapKey(key), unwrapValue(value)));
}
/**
* Compares this map with another map for equality of their stored
* entries.
*
* @param other an <code>Object</code> value
* @return true if the maps are identical
*/
@Override // GemStoneAddition
public boolean equals(Object other) {
if (_map.equals(other)) {
return true; // comparing two trove maps
} else if (other instanceof Map) {
Map that = (Map)other;
if (that.size() != _map.size()) {
return false; // different sizes, no need to compare
} else { // now we have to do it the hard way
Iterator it = that.entrySet().iterator();
for (int i = that.size(); i-- > 0;) {
Map.Entry e = (Map.Entry)it.next();
Object key = e.getKey();
Object val = e.getValue();
if (key instanceof Float && val instanceof Long) {
float k = unwrapKey(key);
long v = unwrapValue(val);
if (_map.containsKey(k) && v == _map.get(k)) {
// match, ok to continue
} else {
return false; // no match: we're done
}
} else {
return false; // different type in other map
}
}
return true; // all entries match
}
} else {
return false;
}
}
/**
* Retrieves the value for <tt>key</tt>
*
* @param key an <code>Object</code> value
* @return the value of <tt>key</tt> or null if no such mapping exists.
*/
@Override // GemStoneAddition
public Object get(Object key) {
float k = unwrapKey(key);
long v = _map.get(k);
// 0 may be a false positive since primitive maps
// cannot return null, so we have to do an extra
// check here.
if (v == 0) {
return _map.containsKey(k) ? wrapValue(v) : null;
} else {
return wrapValue(v);
}
}
/**
* Empties the map.
*/
@Override // GemStoneAddition
public void clear() {
this._map.clear();
}
/**
* Deletes a key/value pair from the map.
*
* @param key an <code>Object</code> value
* @return the removed value, or Integer(0) if it was not found in the map
*/
@Override // GemStoneAddition
public Object remove(Object key) {
return wrapValue(_map.remove(unwrapKey(key)));
}
/**
* Returns a Set view on the entries of the map.
*
* @return a <code>Set</code> value
*/
@Override // GemStoneAddition
public Set entrySet() {
return new AbstractSet() {
@Override // GemStoneAddition
public int size() {
return _map.size();
}
@Override // GemStoneAddition
public boolean isEmpty() {
return TFloatLongHashMapDecorator.this.isEmpty();
}
@Override // GemStoneAddition
public boolean contains(Object o) {
if (o instanceof Map.Entry) {
Object k = ((Map.Entry)o).getKey();
Object v = ((Map.Entry)o).getValue();
return (TFloatLongHashMapDecorator.this.containsKey(k) &&
TFloatLongHashMapDecorator.this.get(k).equals(v));
} else {
return false;
}
}
@Override // GemStoneAddition
public Iterator iterator() {
return new Iterator() {
private final TFloatLongIterator it = _map.iterator();
public Object next() {
it.advance();
final Object key = wrapKey(it.key());
final Object v = wrapValue(it.value());
return new Map.Entry() {
private Object val = v;
@Override // GemStoneAddition
public boolean equals(Object o) {
return ((o instanceof Map.Entry) &&
((Map.Entry)o).getKey().equals(key) &&
((Map.Entry)o).getValue().equals(val));
}
public Object getKey() {
return key;
}
public Object getValue() {
return val;
}
@Override // GemStoneAddition
public int hashCode() {
return key.hashCode() + val.hashCode();
}
public Object setValue(Object value) {
val = value;
return put(key, value);
}
};
}
public boolean hasNext() {
return it.hasNext();
}
public void remove() {
it.remove();
}
};
}
@Override // GemStoneAddition
public boolean add(Object o) {
throw new UnsupportedOperationException();
}
@Override // GemStoneAddition
public boolean remove(Object o) {
throw new UnsupportedOperationException();
}
@Override // GemStoneAddition
public boolean addAll(Collection c) {
throw new UnsupportedOperationException();
}
@Override // GemStoneAddition
public boolean retainAll(Collection c) {
throw new UnsupportedOperationException();
}
@Override // GemStoneAddition
public boolean removeAll(Collection c) {
throw new UnsupportedOperationException();
}
@Override // GemStoneAddition
public void clear() {
TFloatLongHashMapDecorator.this.clear();
}
};
}
/**
* Checks for the presence of <tt>val</tt> in the values of the map.
*
* @param val an <code>Object</code> value
* @return a <code>boolean</code> value
*/
@Override // GemStoneAddition
public boolean containsValue(Object val) {
return _map.containsValue(unwrapValue(val));
}
/**
* Checks for the present of <tt>key</tt> in the keys of the map.
*
* @param key an <code>Object</code> value
* @return a <code>boolean</code> value
*/
@Override // GemStoneAddition
public boolean containsKey(Object key) {
return _map.containsKey(unwrapKey(key));
}
/**
* Returns the number of entries in the map.
* @return the map's size.
*/
@Override // GemStoneAddition
public int size() {
return this._map.size();
}
/**
* Indicates whether map has any entries.
* @return true if the map is empty
*/
@Override // GemStoneAddition
public boolean isEmpty() {
return (size() == 0);
}
/**
* Copies the key/value mappings in <tt>map</tt> into this map.
* Note that this will be a <b>deep</b> copy, as storage is by
* primitive value.
*
* @param map a <code>Map</code> value
*/
@Override // GemStoneAddition
public void putAll(Map map) {
Iterator it = map.entrySet().iterator();
for (int i = map.size(); i-- > 0;) {
Map.Entry e = (Map.Entry)it.next();
this.put(e.getKey(), e.getValue());
}
}
/**
* Wraps a key
*
* @param k a key in the underlying map
* @return an Object representation of the key
*/
protected Float wrapKey(float k) {
return new Float(k);
}
/**
* Unwraps a key
*
* @param key a wrapped key
* @return an unwrapped representation of the key
*/
protected float unwrapKey(Object key) {
return ((Float)key).floatValue();
}
/**
* Wraps a value
*
* @param k a value in the underlying map
* @return an Object representation of the value
*/
protected Long wrapValue(long k) {
return Long.valueOf(k);
}
/**
* Unwraps a value
*
* @param value a wrapped value
* @return an unwrapped representation of the value
*/
protected long unwrapValue(Object value) {
return ((Long)value).longValue();
}
} // TFloatLongHashMapDecorator
| |
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.omnibox.geo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.argThat;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.net.wifi.ScanResult;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.telephony.CellIdentityCdma;
import android.telephony.CellIdentityGsm;
import android.telephony.CellIdentityLte;
import android.telephony.CellIdentityWcdma;
import android.telephony.CellInfoCdma;
import android.telephony.CellInfoGsm;
import android.telephony.CellInfoLte;
import android.telephony.CellInfoWcdma;
import android.telephony.TelephonyManager;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatcher;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.annotation.Config;
import org.robolectric.util.ReflectionHelpers;
import org.chromium.base.Callback;
import org.chromium.base.test.BaseRobolectricTestRunner;
import org.chromium.chrome.browser.omnibox.geo.VisibleNetworks.VisibleCell;
import org.chromium.chrome.browser.omnibox.geo.VisibleNetworks.VisibleCell.RadioType;
import org.chromium.chrome.browser.omnibox.geo.VisibleNetworks.VisibleWifi;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
/**
* Robolectric tests for {@link PlatformNetworksManager}.
*/
@RunWith(BaseRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class PlatformNetworksManagerTest {
private static final VisibleWifi CONNECTED_WIFI =
VisibleWifi.create("ssid1", "11:11:11:11:11:11", -1, 10L);
private static final VisibleWifi NOT_CONNECTED_WIFI =
VisibleWifi.create("ssid2", "11:11:11:11:11:12", -2, 20L);
private static final long CONNECTED_WIFI_AGE = 1000L;
private static final long NOT_CONNECTED_WIFI_AGE = 2000L;
private static final VisibleCell CDMA_CELL = VisibleCell.builder(RadioType.CDMA)
.setCellId(40)
.setLocationAreaCode(41)
.setMobileNetworkCode(43)
.setTimestamp(47L)
.build();
private static final VisibleCell LTE_CELL = VisibleCell.builder(RadioType.LTE)
.setCellId(50)
.setMobileCountryCode(52)
.setMobileNetworkCode(53)
.setPhysicalCellId(55)
.setTrackingAreaCode(56)
.setTimestamp(57L)
.build();
private static final VisibleCell WCDMA_CELL = VisibleCell.builder(RadioType.WCDMA)
.setCellId(60)
.setLocationAreaCode(61)
.setMobileCountryCode(62)
.setMobileNetworkCode(63)
.setPrimaryScramblingCode(64)
.setTimestamp(67L)
.build();
private static final VisibleCell GSM_CELL = VisibleCell.builder(RadioType.GSM)
.setCellId(70)
.setLocationAreaCode(71)
.setMobileCountryCode(72)
.setMobileNetworkCode(73)
.setTimestamp(77L)
.build();
private static final VisibleCell UNKNOWN_VISIBLE_CELL =
VisibleCell.builder(RadioType.UNKNOWN).build();
private static final VisibleCell UNKNOWN_MISSING_LOCATION_PERMISSION_VISIBLE_CELL =
VisibleCell.builder(RadioType.UNKNOWN_MISSING_LOCATION_PERMISSION).build();
private static final VisibleWifi UNKNOWN_VISIBLE_WIFI =
VisibleWifi.create(null, null, null, null);
private static final long LTE_CELL_AGE = 3000L;
private static final long WCDMA_CELL_AGE = 4000L;
private static final long GSM_CELL_AGE = 5000L;
private static final long CDMA_CELL_AGE = 6000L;
private static final long CURRENT_TIME_MS = 90000000L;
private static final long CURRENT_ELAPSED_TIME_MS = 7000L;
@Mock
private Context mContext;
@Mock
private TelephonyManager mTelephonyManager;
@Mock
private WifiManager mWifiManager;
@Mock
private NetworkInfo mCellNetworkInfo;
@Mock
private NetworkInfo mWifiNetworkInfo;
@Mock
private NetworkInfo mEthernetNetworkInfo;
@Mock
private WifiInfo mWifiInfo;
@Mock
private ScanResult mWifiScanResult;
@Mock
private ScanResult mWifiScanResultNotConnected;
@Mock
private ScanResult mWifiScanResultUnknown;
@Mock
private CellInfoLte mCellInfoLte;
@Mock
private CellInfoWcdma mCellInfoWcdma;
@Mock
private CellInfoGsm mCellInfoGsm;
@Mock
private CellInfoCdma mCellInfoCdma;
@Mock
private CellIdentityLte mCellIdentityLte;
@Mock
private CellIdentityWcdma mCellIdentityWcdma;
@Mock
private CellIdentityGsm mCellIdentityGsm;
@Mock
private CellIdentityCdma mCellIdentityCdma;
@Mock
private Intent mNetworkStateChangedIntent;
@Mock
private Callback<Set<VisibleCell>> mVisibleCellCallback;
@Captor
ArgumentCaptor<Set<VisibleCell>> mVisibleCellsArgument;
@Mock
private Callback<VisibleNetworks> mVisibleNetworksCallback;
@Captor
ArgumentCaptor<VisibleNetworks> mVisibleNetworksArgument;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
PlatformNetworksManager.sTimeProvider = new PlatformNetworksManager.TimeProvider() {
@Override
public long getCurrentTime() {
return CURRENT_TIME_MS;
}
@Override
public long getElapsedRealtime() {
return CURRENT_ELAPSED_TIME_MS;
}
};
when(mContext.getSystemService(Context.TELEPHONY_SERVICE)).thenReturn(mTelephonyManager);
when(mContext.getSystemService(Context.WIFI_SERVICE)).thenReturn(mWifiManager);
when(mContext.getApplicationContext()).thenReturn(mContext);
when(mCellNetworkInfo.getType()).thenReturn(ConnectivityManager.TYPE_MOBILE);
when(mWifiNetworkInfo.getType()).thenReturn(ConnectivityManager.TYPE_WIFI);
when(mEthernetNetworkInfo.getType()).thenReturn(ConnectivityManager.TYPE_ETHERNET);
// Add double quotation, since getSSID would add it.
when(mWifiInfo.getSSID()).thenReturn("\"" + CONNECTED_WIFI.ssid() + "\"");
when(mWifiInfo.getBSSID()).thenReturn(CONNECTED_WIFI.bssid());
when(mWifiManager.getConnectionInfo()).thenReturn(mWifiInfo);
mWifiScanResult.SSID = CONNECTED_WIFI.ssid();
mWifiScanResult.BSSID = CONNECTED_WIFI.bssid();
mWifiScanResult.level = CONNECTED_WIFI.level();
mWifiScanResult.timestamp =
TimeUnit.MILLISECONDS.toMicros(CURRENT_ELAPSED_TIME_MS - CONNECTED_WIFI_AGE);
mWifiScanResultNotConnected.SSID = NOT_CONNECTED_WIFI.ssid();
mWifiScanResultNotConnected.BSSID = NOT_CONNECTED_WIFI.bssid();
mWifiScanResultNotConnected.level = NOT_CONNECTED_WIFI.level();
mWifiScanResultNotConnected.timestamp =
TimeUnit.MILLISECONDS.toMicros(CURRENT_ELAPSED_TIME_MS - NOT_CONNECTED_WIFI_AGE);
mWifiScanResultUnknown.SSID = "any_value";
mWifiScanResultUnknown.BSSID = null; // This is what makes it unknown.
mWifiScanResultUnknown.level = 0;
when(mWifiManager.getScanResults())
.thenReturn(Arrays.asList(
mWifiScanResult, mWifiScanResultNotConnected, mWifiScanResultUnknown));
when(mCellIdentityLte.getCi()).thenReturn(LTE_CELL.cellId().intValue());
when(mCellIdentityLte.getPci()).thenReturn(LTE_CELL.physicalCellId().intValue());
when(mCellIdentityLte.getTac()).thenReturn(LTE_CELL.trackingAreaCode().intValue());
when(mCellIdentityLte.getMcc()).thenReturn(LTE_CELL.mobileCountryCode().intValue());
when(mCellIdentityLte.getMnc()).thenReturn(LTE_CELL.mobileNetworkCode().intValue());
when(mCellInfoLte.getCellIdentity()).thenReturn(mCellIdentityLte);
when(mCellInfoLte.isRegistered()).thenReturn(true);
when(mCellInfoLte.getTimeStamp())
.thenReturn(TimeUnit.MILLISECONDS.toNanos(CURRENT_ELAPSED_TIME_MS - LTE_CELL_AGE));
when(mCellIdentityWcdma.getCid()).thenReturn(WCDMA_CELL.cellId().intValue());
when(mCellIdentityWcdma.getLac()).thenReturn(WCDMA_CELL.locationAreaCode().intValue());
when(mCellIdentityWcdma.getMcc()).thenReturn(WCDMA_CELL.mobileCountryCode().intValue());
when(mCellIdentityWcdma.getMnc()).thenReturn(WCDMA_CELL.mobileNetworkCode().intValue());
when(mCellIdentityWcdma.getPsc()).thenReturn(WCDMA_CELL.primaryScramblingCode().intValue());
when(mCellInfoWcdma.getCellIdentity()).thenReturn(mCellIdentityWcdma);
when(mCellInfoWcdma.isRegistered()).thenReturn(false);
when(mCellInfoWcdma.getTimeStamp())
.thenReturn(
TimeUnit.MILLISECONDS.toNanos(CURRENT_ELAPSED_TIME_MS - WCDMA_CELL_AGE));
when(mCellIdentityGsm.getCid()).thenReturn(GSM_CELL.cellId().intValue());
when(mCellIdentityGsm.getLac()).thenReturn(GSM_CELL.locationAreaCode().intValue());
when(mCellIdentityGsm.getMcc()).thenReturn(GSM_CELL.mobileCountryCode().intValue());
when(mCellIdentityGsm.getMnc()).thenReturn(GSM_CELL.mobileNetworkCode().intValue());
when(mCellInfoGsm.getCellIdentity()).thenReturn(mCellIdentityGsm);
when(mCellInfoGsm.isRegistered()).thenReturn(false);
when(mCellInfoGsm.getTimeStamp())
.thenReturn(TimeUnit.MILLISECONDS.toNanos(CURRENT_ELAPSED_TIME_MS - GSM_CELL_AGE));
when(mCellIdentityCdma.getBasestationId()).thenReturn(CDMA_CELL.cellId().intValue());
when(mCellIdentityCdma.getNetworkId()).thenReturn(CDMA_CELL.locationAreaCode().intValue());
when(mCellIdentityCdma.getSystemId()).thenReturn(CDMA_CELL.mobileNetworkCode().intValue());
when(mCellInfoCdma.getCellIdentity()).thenReturn(mCellIdentityCdma);
when(mCellInfoCdma.isRegistered()).thenReturn(false);
when(mCellInfoCdma.getTimeStamp())
.thenReturn(TimeUnit.MILLISECONDS.toNanos(CURRENT_ELAPSED_TIME_MS - CDMA_CELL_AGE));
when(mTelephonyManager.getAllCellInfo())
.thenReturn(
Arrays.asList(mCellInfoLte, mCellInfoWcdma, mCellInfoGsm, mCellInfoCdma));
allPermissionsGranted();
when(mContext.registerReceiver(eq(null), any(IntentFilter.class)))
.thenReturn(mNetworkStateChangedIntent);
when(mNetworkStateChangedIntent.getParcelableExtra(eq(WifiManager.EXTRA_WIFI_INFO)))
.thenReturn(mWifiInfo);
}
@Test
public void testGetConnectedCell_allPermissionsDenied() {
ReflectionHelpers.setStaticField(Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.M);
allPermissionsDenied();
VisibleCell visibleCell =
PlatformNetworksManager.getConnectedCell(mContext, mTelephonyManager);
assertEquals(UNKNOWN_MISSING_LOCATION_PERMISSION_VISIBLE_CELL, visibleCell);
assertNull(visibleCell.timestampMs());
}
@Test
public void testGetAllVisibleCells_Q() {
// TODO(chouinard): Add this test once Q is supported by Robolectric (crbug.com/965286).
}
@Test
public void testGetAllVisibleCells_JBMR2() {
ReflectionHelpers.setStaticField(
Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.JELLY_BEAN_MR2);
PlatformNetworksManager.getAllVisibleCells(
mContext, mTelephonyManager, mVisibleCellCallback);
verify(mVisibleCellCallback).onResult(mVisibleCellsArgument.capture());
assertEquals(4, mVisibleCellsArgument.getValue().size());
for (VisibleCell visibleCell : mVisibleCellsArgument.getValue()) {
switch (visibleCell.radioType()) {
case RadioType.LTE:
assertEquals(LTE_CELL, visibleCell);
assertEquals(Long.valueOf(CURRENT_TIME_MS - LTE_CELL_AGE),
visibleCell.timestampMs());
break;
case RadioType.WCDMA:
assertEquals(visibleCell, WCDMA_CELL);
assertEquals(Long.valueOf(CURRENT_TIME_MS - WCDMA_CELL_AGE),
visibleCell.timestampMs());
break;
case RadioType.GSM:
assertEquals(visibleCell, GSM_CELL);
assertEquals(Long.valueOf(CURRENT_TIME_MS - GSM_CELL_AGE),
visibleCell.timestampMs());
break;
case RadioType.CDMA:
assertEquals(visibleCell, CDMA_CELL);
assertEquals(Long.valueOf(CURRENT_TIME_MS - CDMA_CELL_AGE),
visibleCell.timestampMs());
break;
default:
break;
}
}
}
@Test
public void testGetAllVisibleCells_allPermissionsDenied() {
ReflectionHelpers.setStaticField(Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.M);
allPermissionsDenied();
PlatformNetworksManager.getAllVisibleCells(
mContext, mTelephonyManager, mVisibleCellCallback);
verify(mVisibleCellCallback).onResult(mVisibleCellsArgument.capture());
// Empty set expected
assertEquals(0, mVisibleCellsArgument.getValue().size());
}
@Test
public void testGetConnectedWifi() {
VisibleWifi visibleWifi = PlatformNetworksManager.getConnectedWifi(mContext, mWifiManager);
assertEquals(CONNECTED_WIFI, visibleWifi);
// When we get it through get connected wifi, we should see the current time.
assertEquals(Long.valueOf(CURRENT_TIME_MS), visibleWifi.timestampMs());
}
@Test
public void testGetConnectedWifi_allPermissionsDenied() {
ReflectionHelpers.setStaticField(Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.M);
allPermissionsDenied();
VisibleWifi visibleWifi = PlatformNetworksManager.getConnectedWifi(mContext, mWifiManager);
assertEquals(UNKNOWN_VISIBLE_WIFI, visibleWifi);
assertNull(visibleWifi.timestampMs());
}
@Test
public void testGetConnectedWifi_locationGrantedWifiDenied() {
ReflectionHelpers.setStaticField(
Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.LOLLIPOP);
locationGrantedWifiDenied();
VisibleWifi visibleWifi = PlatformNetworksManager.getConnectedWifi(mContext, mWifiManager);
assertEquals(CONNECTED_WIFI, visibleWifi);
assertEquals(Long.valueOf(CURRENT_TIME_MS), visibleWifi.timestampMs());
verifyNetworkStateAction();
}
@Test
public void testGetConnectedWifi_locationGrantedWifiDenied_noWifiInfo() {
ReflectionHelpers.setStaticField(
Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.LOLLIPOP);
locationGrantedWifiDenied();
when(mNetworkStateChangedIntent.getParcelableExtra(eq(WifiManager.EXTRA_WIFI_INFO)))
.thenReturn(null);
VisibleWifi visibleWifi = PlatformNetworksManager.getConnectedWifi(mContext, mWifiManager);
assertEquals(UNKNOWN_VISIBLE_WIFI, visibleWifi);
assertNull(visibleWifi.timestampMs());
verifyNetworkStateAction();
}
@Test
public void testGetConnectedWifi_locationDeniedWifiGranted() {
ReflectionHelpers.setStaticField(Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.M);
locationDeniedWifiGranted();
VisibleWifi visibleWifi = PlatformNetworksManager.getConnectedWifi(mContext, mWifiManager);
assertEquals(UNKNOWN_VISIBLE_WIFI, visibleWifi);
assertNull(visibleWifi.timestampMs());
}
@Test
public void testGetAllVisibleWifis() {
Set<VisibleWifi> visibleWifis =
PlatformNetworksManager.getAllVisibleWifis(mContext, mWifiManager);
assertEquals(2, visibleWifis.size());
// When we get all wifis, we should get the scan time.
for (VisibleWifi visibleWifi : visibleWifis) {
if (visibleWifi.bssid().equals(CONNECTED_WIFI.bssid())) {
assertEquals(CONNECTED_WIFI, visibleWifi);
assertEquals(Long.valueOf(CURRENT_TIME_MS - CONNECTED_WIFI_AGE),
visibleWifi.timestampMs());
} else {
assertEquals(NOT_CONNECTED_WIFI, visibleWifi);
assertEquals(Long.valueOf(CURRENT_TIME_MS - NOT_CONNECTED_WIFI_AGE),
visibleWifi.timestampMs());
}
}
}
@Test
public void testGetAllVisibleWifis_allPermissionsDenied() {
ReflectionHelpers.setStaticField(Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.M);
allPermissionsDenied();
Set<VisibleWifi> visibleWifis =
PlatformNetworksManager.getAllVisibleWifis(mContext, mWifiManager);
// Empty set expected
assertTrue(visibleWifis.isEmpty());
}
@Test
public void testGetAllVisibleWifis_locationGrantedWifiDenied() {
ReflectionHelpers.setStaticField(Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.M);
locationGrantedWifiDenied();
Set<VisibleWifi> visibleWifis =
PlatformNetworksManager.getAllVisibleWifis(mContext, mWifiManager);
// Empty set expected
assertTrue(visibleWifis.isEmpty());
}
@Test
public void testGetAllVisibleWifis_locationDeniedWifiGranted() {
ReflectionHelpers.setStaticField(Build.VERSION.class, "SDK_INT", Build.VERSION_CODES.M);
locationDeniedWifiGranted();
Set<VisibleWifi> visibleWifis =
PlatformNetworksManager.getAllVisibleWifis(mContext, mWifiManager);
// Empty set expected
assertTrue(visibleWifis.isEmpty());
}
@Test
public void testComputeVisibleNetworks_withoutNonConnectedNetworks() {
VisibleNetworks expectedVisibleNetworks =
VisibleNetworks.create(CONNECTED_WIFI, LTE_CELL, null, null);
VisibleNetworks visibleNetworks =
PlatformNetworksManager.computeConnectedNetworks(mContext);
assertEquals(expectedVisibleNetworks, visibleNetworks);
}
@Test
public void testComputeVisibleNetworks_withNonConnectedNetworks() {
Set<VisibleCell> expectedVisibleCells =
new HashSet<VisibleCell>(Arrays.asList(LTE_CELL, WCDMA_CELL, GSM_CELL, CDMA_CELL));
Set<VisibleWifi> expectedVisibleWifis =
new HashSet<VisibleWifi>(Arrays.asList(CONNECTED_WIFI, NOT_CONNECTED_WIFI));
VisibleNetworks expectedVisibleNetworks = VisibleNetworks.create(
CONNECTED_WIFI, LTE_CELL, expectedVisibleWifis, expectedVisibleCells);
PlatformNetworksManager.computeVisibleNetworks(mContext, mVisibleNetworksCallback);
verify(mVisibleNetworksCallback).onResult(mVisibleNetworksArgument.capture());
assertEquals(expectedVisibleNetworks, mVisibleNetworksArgument.getValue());
}
@Test
public void testComputeVisibleNetworks_allPermissionsDenied() {
allPermissionsDenied();
PlatformNetworksManager.computeVisibleNetworks(mContext, mVisibleNetworksCallback);
verify(mVisibleNetworksCallback).onResult(mVisibleNetworksArgument.capture());
assertTrue(mVisibleNetworksArgument.getValue().isEmpty());
}
@Test
public void testComputeVisibleNetworks_locationGrantedWifiDenied() {
Set<VisibleCell> expectedVisibleCells =
new HashSet<VisibleCell>(Arrays.asList(LTE_CELL, WCDMA_CELL, GSM_CELL, CDMA_CELL));
Set<VisibleWifi> expectedVisibleWifis = Collections.emptySet();
VisibleNetworks expectedVisibleNetworks = VisibleNetworks.create(
CONNECTED_WIFI, LTE_CELL, expectedVisibleWifis, expectedVisibleCells);
locationGrantedWifiDenied();
PlatformNetworksManager.computeVisibleNetworks(mContext, mVisibleNetworksCallback);
verify(mVisibleNetworksCallback).onResult(mVisibleNetworksArgument.capture());
assertEquals(expectedVisibleNetworks, mVisibleNetworksArgument.getValue());
verifyNetworkStateAction();
}
@Test
public void testComputeVisibleNetworks_locationDeniedWifiGranted() {
Set<VisibleCell> expectedVisibleCells = Collections.emptySet();
Set<VisibleWifi> expectedVisibleWifis = Collections.emptySet();
locationDeniedWifiGranted();
VisibleNetworks expectedVisibleNetworks =
VisibleNetworks.create(null, null, expectedVisibleWifis, expectedVisibleCells);
PlatformNetworksManager.computeVisibleNetworks(mContext, mVisibleNetworksCallback);
verify(mVisibleNetworksCallback).onResult(mVisibleNetworksArgument.capture());
assertEquals(expectedVisibleNetworks, mVisibleNetworksArgument.getValue());
}
private void allPermissionsGranted() {
setPermissions(true, true, true);
}
private void allPermissionsDenied() {
setPermissions(false, false, false);
}
private void locationGrantedWifiDenied() {
setPermissions(true, true, false);
}
private void locationDeniedWifiGranted() {
setPermissions(false, false, true);
}
private void setPermissions(
boolean coarseLocationGranted, boolean fineLocationGranted, boolean wifiStateGranted) {
int coarseLocationPermission = (coarseLocationGranted) ? PackageManager.PERMISSION_GRANTED
: PackageManager.PERMISSION_DENIED;
int fineLocationPermission = (fineLocationGranted) ? PackageManager.PERMISSION_GRANTED
: PackageManager.PERMISSION_DENIED;
int wifiStatePermission = (wifiStateGranted) ? PackageManager.PERMISSION_GRANTED
: PackageManager.PERMISSION_DENIED;
when(mContext.checkPermission(eq(Manifest.permission.ACCESS_COARSE_LOCATION),
any(Integer.class), any(Integer.class)))
.thenReturn(coarseLocationPermission);
when(mContext.checkPermission(eq(Manifest.permission.ACCESS_FINE_LOCATION),
any(Integer.class), any(Integer.class)))
.thenReturn(fineLocationPermission);
when(mContext.checkPermission(eq(Manifest.permission.ACCESS_WIFI_STATE), any(Integer.class),
any(Integer.class)))
.thenReturn(wifiStatePermission);
}
private void verifyNetworkStateAction() {
verify(mContext).registerReceiver(eq(null), argThat(new ArgumentMatcher<IntentFilter>() {
@Override
public boolean matches(IntentFilter intentFilter) {
return intentFilter.hasAction(WifiManager.NETWORK_STATE_CHANGED_ACTION);
}
}));
}
}
| |
/*
* $Id: ELSubmitTagBeanInfo.java 479635 2006-11-27 14:27:18Z pbenedict $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.strutsel.taglib.html;
import java.beans.IntrospectionException;
import java.beans.PropertyDescriptor;
import java.beans.SimpleBeanInfo;
import java.util.ArrayList;
/**
* This is the <code>BeanInfo</code> descriptor for the
* <code>org.apache.strutsel.taglib.html.ELSubmitTag</code> class. It is
* needed to override the default mapping of custom tag attribute names to
* class attribute names. <p> This is because the value of the unevaluated EL
* expression has to be kept separately from the evaluated value, which is
* stored in the base class. This is related to the fact that the JSP compiler
* can choose to reuse different tag instances if they received the same
* original attribute values, and the JSP compiler can choose to not re-call
* the setter methods, because it can assume the same values are already set.
*/
public class ELSubmitTagBeanInfo extends SimpleBeanInfo {
public PropertyDescriptor[] getPropertyDescriptors() {
ArrayList proplist = new ArrayList();
try {
proplist.add(new PropertyDescriptor("accesskey", ELSubmitTag.class,
null, "setAccesskeyExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("alt", ELSubmitTag.class, null,
"setAltExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("altKey", ELSubmitTag.class,
null, "setAltKeyExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("bundle", ELSubmitTag.class,
null, "setBundleExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("dir", ELSubmitTag.class,
null, "setDirExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("disabled", ELSubmitTag.class,
null, "setDisabledExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("indexed", ELSubmitTag.class,
null, "setIndexedExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("lang", ELSubmitTag.class,
null, "setLangExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onblur", ELSubmitTag.class,
null, "setOnblurExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onchange", ELSubmitTag.class,
null, "setOnchangeExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onclick", ELSubmitTag.class,
null, "setOnclickExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("ondblclick",
ELSubmitTag.class, null, "setOndblclickExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onfocus", ELSubmitTag.class,
null, "setOnfocusExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onkeydown", ELSubmitTag.class,
null, "setOnkeydownExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onkeypress",
ELSubmitTag.class, null, "setOnkeypressExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onkeyup", ELSubmitTag.class,
null, "setOnkeyupExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onmousedown",
ELSubmitTag.class, null, "setOnmousedownExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onmousemove",
ELSubmitTag.class, null, "setOnmousemoveExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onmouseout",
ELSubmitTag.class, null, "setOnmouseoutExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onmouseover",
ELSubmitTag.class, null, "setOnmouseoverExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("onmouseup", ELSubmitTag.class,
null, "setOnmouseupExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("property", ELSubmitTag.class,
null, "setPropertyExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("style", ELSubmitTag.class,
null, "setStyleExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("styleClass",
ELSubmitTag.class, null, "setStyleClassExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("styleId", ELSubmitTag.class,
null, "setStyleIdExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("tabindex", ELSubmitTag.class,
null, "setTabindexExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("title", ELSubmitTag.class,
null, "setTitleExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("titleKey", ELSubmitTag.class,
null, "setTitleKeyExpr"));
} catch (IntrospectionException ex) {
}
try {
proplist.add(new PropertyDescriptor("value", ELSubmitTag.class,
null, "setValueExpr"));
} catch (IntrospectionException ex) {
}
PropertyDescriptor[] result = new PropertyDescriptor[proplist.size()];
return ((PropertyDescriptor[]) proplist.toArray(result));
}
}
| |
package com.github.chrisblutz.trinity.interpreter;
import com.github.chrisblutz.trinity.interpreter.actions.ArgumentProcedureAction;
import com.github.chrisblutz.trinity.interpreter.actions.ExpressionProcedureAction;
import com.github.chrisblutz.trinity.interpreter.helpers.KeywordExpressionHelper;
import com.github.chrisblutz.trinity.interpreter.instructions.*;
import com.github.chrisblutz.trinity.lang.errors.Errors;
import com.github.chrisblutz.trinity.lang.procedures.ProcedureAction;
import com.github.chrisblutz.trinity.lang.procedures.TYProcedure;
import com.github.chrisblutz.trinity.parser.blocks.Block;
import com.github.chrisblutz.trinity.parser.blocks.BlockLine;
import com.github.chrisblutz.trinity.parser.lines.Line;
import com.github.chrisblutz.trinity.parser.tokens.Token;
import com.github.chrisblutz.trinity.parser.tokens.TokenInfo;
import com.github.chrisblutz.trinity.runner.Runner;
import java.util.*;
/**
* @author Christopher Lutz
*/
public class ExpressionInterpreter {
private static InterpretEnvironment environment;
public static ProcedureAction interpret(Block block, InterpretEnvironment environment, String errorClass, String method, boolean includeStackTrace) {
ExpressionInterpreter.environment = environment;
List<InstructionSet> sets = new ArrayList<>();
for (int i = 0; i < block.size(); i++) {
Line line = ((BlockLine) block.get(i)).getLine();
String fileName = block.getFileName();
int lineNumber = line.getLineNumber();
Runner.updateLocation(fileName, lineNumber);
Block nextBlock = null;
if (i + 1 < block.size() && block.get(i + 1) instanceof Block) {
nextBlock = (Block) block.get(++i);
}
if (line.size() > 0) {
InstructionSet set = interpretExpression(block, line.toArray(), new Location(fileName, block.getFullFile(), lineNumber), errorClass, method, nextBlock);
if (set != null) {
sets.add(set);
}
}
}
return new ExpressionProcedureAction(errorClass, method, includeStackTrace, sets.toArray(new InstructionSet[sets.size()]));
}
public static InstructionSet interpretExpression(Block block, TokenInfo[] tokens, Location location, String errorClass, String method, Block nextBlock) {
Token first = tokens[0].getToken();
if (KeywordExpressions.isKeyword(first)) {
ProcedureAction next = null;
if (nextBlock != null) {
next = interpret(nextBlock, environment, errorClass, method, false);
}
if (KeywordExpressions.checkConstraints(block, first)) {
TokenInfo[] strippedTokens = new TokenInfo[tokens.length - 1];
System.arraycopy(tokens, 1, strippedTokens, 0, strippedTokens.length);
if (strippedTokens.length > 0 && strippedTokens[0].getToken() == Token.LEFT_PARENTHESIS && strippedTokens[strippedTokens.length - 1].getToken() == Token.RIGHT_PARENTHESIS && checkWrappingOnFirst(strippedTokens)) {
TokenInfo[] temp = new TokenInfo[strippedTokens.length - 2];
System.arraycopy(strippedTokens, 1, temp, 0, temp.length);
strippedTokens = temp;
}
int comp = KeywordExpressions.getKeywordComponents(first);
List<InstructionSet> components = new ArrayList<>();
if (comp > 0) {
if (strippedTokens.length > 0) {
Token delimiter = KeywordExpressions.getKeywordDelimiter(first);
if (delimiter == null) {
components.add(interpretCompoundExpression(strippedTokens, location, errorClass, method, null));
} else {
InstructionSet[] sets = splitExpressions(strippedTokens, delimiter, location, errorClass, method, null);
components.addAll(Arrays.asList(sets));
}
if (components.size() > comp || (components.size() < comp && KeywordExpressions.isKeywordRigid(first))) {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, "'" + first.getLiteral() + "' statements require " + comp + " expressions, found " + components.size() + ".", location.getFileName(), location.getLineNumber());
}
} else if (KeywordExpressions.isKeywordRigid(first)) {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, "'" + first.getLiteral() + "' statements require " + comp + " expressions, found " + components.size() + ".", location.getFileName(), location.getLineNumber());
}
} else if (strippedTokens.length > 0) {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, "'" + first.getLiteral() + "' statements require " + comp + " expressions, found " + components.size() + ".", location.getFileName(), location.getLineNumber());
}
InstructionSet[] sets = components.toArray(new InstructionSet[components.size()]);
KeywordExpressionHelper helper = KeywordExpressions.getKeywordHelper(first);
InstructionSet thisSet = helper.interpret(sets, next, location);
if (KeywordExpressions.runConstraintHelper(block, first, thisSet)) {
KeywordExpressions.updatePrevious(block, first, thisSet);
if (KeywordExpressions.getKeywordAddDirect(first)) {
return thisSet;
} else {
return null;
}
} else {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, KeywordExpressions.getConstraintMessage(first), location.getFileName(), location.getLineNumber());
}
} else {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, KeywordExpressions.getConstraintMessage(first), location.getFileName(), location.getLineNumber());
}
} else {
TokenInfo[] expression = tokens;
TYProcedure next = null;
if (nextBlock != null) {
ProcedureAction action = interpret(nextBlock, environment, errorClass, method, true);
if (tokens[tokens.length - 1].getToken() == Token.VERTICAL_BAR) {
List<TokenInfo> params = new ArrayList<>();
int i = tokens.length - 2;
for (; i >= 0; i--) {
TokenInfo token = tokens[i];
if (token.getToken() == Token.VERTICAL_BAR) {
break;
} else {
params.add(0, token);
}
}
Parameters parameters = interpretParameters(params.toArray(new TokenInfo[params.size()]), location, errorClass, method);
expression = new TokenInfo[i];
System.arraycopy(tokens, 0, expression, 0, expression.length);
next = new TYProcedure(action, parameters.getMandatoryParameters(), parameters.getOptionalParameters(), parameters.getBlockParameter(), parameters.getOverflowParameter(), false);
} else {
next = new TYProcedure(action, false);
}
}
return interpretCompoundExpression(expression, location, errorClass, method, next);
}
return null;
}
private static boolean checkWrappingOnFirst(TokenInfo[] tokens) {
int level = 0;
for (int i = 0; i < tokens.length; i++) {
Token t = tokens[i].getToken();
if (t == Token.LEFT_PARENTHESIS) {
level++;
} else if (t == Token.RIGHT_PARENTHESIS) {
level--;
}
if (level == 0 && i < tokens.length - 1) {
return false;
}
}
return true;
}
public static InstructionSet interpretCompoundExpression(TokenInfo[] tokens, Location location, String errorClass, String method, TYProcedure next) {
Token[] logicalOperators = LogicalOperator.getOperators().toArray(new Token[LogicalOperator.getOperators().size()]);
if (containsOnFirstLevel(tokens, AssignmentOperators.getAssignmentTokens())) {
// Signifies assignment (ex. x = 10)
if (containsOnFirstLevel(tokens, Token.COMMA)) {
InstructionSet[] sets = splitExpressions(tokens, Token.COMMA, location, errorClass, method, next);
return new InstructionSet(sets, location);
} else {
Token token = findOnFirstLevel(tokens, AssignmentOperators.getAssignmentTokens());
List<List<TokenInfo>> tokenSets = splitTokens(tokens, token);
TokenInfo[] assignmentTokens = tokenSets.get(0).toArray(new TokenInfo[tokenSets.get(0).size()]);
TokenInfo[] valueTokens = tokenSets.get(1).toArray(new TokenInfo[tokenSets.get(1).size()]);
InstructionSet value = interpretCompoundExpression(valueTokens, location, errorClass, method, next);
if (assignmentTokens[assignmentTokens.length - 1].getToken() == Token.RIGHT_SQUARE_BRACKET) {
int loc = findBracketBeginning(tokens, location);
TokenInfo[] strippedTokens = new TokenInfo[assignmentTokens.length - loc - 2];
System.arraycopy(assignmentTokens, loc + 1, strippedTokens, 0, strippedTokens.length);
InstructionSet[] indices = splitExpressions(strippedTokens, Token.COMMA, location, errorClass, method, null);
TokenInfo[] objectTokens = new TokenInfo[loc];
System.arraycopy(tokens, 0, objectTokens, 0, objectTokens.length);
InstructionSet object = interpretCompoundExpression(objectTokens, location, errorClass, method, null);
return new InstructionSet(new Instruction[]{new IndexAssignmentInstruction(token, object, indices, value, location)}, location);
} else {
InstructionSet assignmentObject = interpretCompoundExpression(assignmentTokens, location, errorClass, method, null);
Instruction[] instructions = assignmentObject.getInstructions();
Instruction[] remainder = new Instruction[instructions.length - 1];
System.arraycopy(instructions, 0, remainder, 0, remainder.length);
Instruction end = instructions[instructions.length - 1];
if (end instanceof InstructionSet && ((InstructionSet) end).getInstructions().length == 1) {
end = ((InstructionSet) end).getInstructions()[0];
}
InstructionSet remainderSet = new InstructionSet(remainder, assignmentObject.getLocation());
VariableLocRetriever retriever = null;
if (end instanceof GlobalVariableInstruction) {
retriever = new GlobalVariableLocRetriever(((GlobalVariableInstruction) end).getName());
} else if (end instanceof SingleTokenInstruction) {
retriever = new SingleTokenVariableLocRetriever(((SingleTokenInstruction) end).getContents());
} else {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, "Invalid left-hand expression.", location.getFileName(), location.getLineNumber());
}
return new InstructionSet(new Instruction[]{new AssignmentInstruction(token, remainderSet, retriever, value, location)}, location);
}
}
} else if (containsOnFirstLevelSequentially(tokens, Token.QUESTION_MARK, Token.COLON)) {
// Signifies ternary operator usage (ex. x ? y : z)
List<List<TokenInfo>> firstSplit = splitTokens(tokens, Token.QUESTION_MARK);
List<TokenInfo> firstHalfList = firstSplit.get(0);
TokenInfo[] firstHalf = firstHalfList.toArray(new TokenInfo[firstHalfList.size()]);
List<TokenInfo> secondHalfList = firstSplit.get(1);
TokenInfo[] secondHalf = secondHalfList.toArray(new TokenInfo[secondHalfList.size()]);
InstructionSet first = interpretCompoundExpression(firstHalf, location, errorClass, method, null);
InstructionSet[] secondSplit = splitByToken(secondHalf, Token.COLON, location, errorClass, method, next);
return new InstructionSet(new Instruction[]{new TernaryOperatorInstruction(first, secondSplit[0], secondSplit[1], location)}, location);
} else if (containsOnFirstLevel(tokens, logicalOperators)) {
// Signifies binary logical operator usage (ex. x && y)
Token token = findOnFirstLevel(tokens, logicalOperators);
InstructionSet[] sets = splitByToken(tokens, token, location, errorClass, method, next);
return new InstructionSet(new Instruction[]{sets[0], new LogicalOperatorInstruction(LogicalOperator.getOperator(token), sets[1], location)}, location);
} else if (containsOperatorOnFirstLevel(tokens)) {
// Signifies binary operator usage (ex. 1 + 2)
Token token = findOperatorOnFirstLevel(tokens);
InstructionSet[] sets = splitByTokenWithOperators(tokens, token, location, errorClass, method, next);
return new InstructionSet(new Instruction[]{sets[0], new BinaryOperatorInstruction(BinaryOperator.getOperator(token), sets[1], location)}, location);
} else if (containsOnFirstLevel(tokens, Token.DOUBLE_DOT, Token.TRIPLE_DOT)) {
// Signifies range initialization (ex. 1..10)
Token token = findOnFirstLevel(tokens, Token.DOUBLE_DOT, Token.TRIPLE_DOT);
InstructionSet[] sets = splitByToken(tokens, token, location, errorClass, method, next);
return new InstructionSet(new Instruction[]{new RangeCreationInstruction(token, sets[0], sets[1], location)}, location);
} else if (tokens.length > 0 && UnaryOperator.getOperators().contains(tokens[0].getToken())) {
// Signifies unary operator use (ex. -10)
Token token = tokens[0].getToken();
TokenInfo[] strippedTokens = new TokenInfo[tokens.length - 1];
System.arraycopy(tokens, 1, strippedTokens, 0, strippedTokens.length);
InstructionSet set = interpretCompoundExpression(strippedTokens, location, errorClass, method, next);
return new InstructionSet(new Instruction[]{set, new UnaryOperatorInstruction(UnaryOperator.getOperator(token), location)}, location);
} else if (containsOnFirstLevel(tokens, Token.DOT_OPERATOR)) {
// Signifies expressions separated by a dot operator (ex. X.y)
InstructionSet[] sets = splitByToken(tokens, Token.DOT_OPERATOR, location, errorClass, method, next);
return new InstructionSet(new Instruction[]{sets[0], sets[1]}, location);
} else if (tokens.length == 1 && Keywords.getTokens().contains(tokens[0].getToken())) {
// Signifies keywords (super, nil, etc.), as well as literal and numeric strings
return new InstructionSet(new Instruction[]{new KeywordInstruction(tokens[0], location)}, location);
} else {
int loc;
if (tokens.length > 0 && tokens[tokens.length - 1].getToken() == Token.RIGHT_SQUARE_BRACKET && ((loc = findBracketBeginning(tokens, location)) > 0)) {
// Signifies a [] call on an object
TokenInfo[] strippedTokens = new TokenInfo[tokens.length - loc - 2];
System.arraycopy(tokens, loc + 1, strippedTokens, 0, strippedTokens.length);
InstructionSet[] indices = splitExpressions(strippedTokens, Token.COMMA, location, errorClass, method, null);
TokenInfo[] objectTokens = new TokenInfo[loc];
System.arraycopy(tokens, 0, objectTokens, 0, objectTokens.length);
InstructionSet object = interpretCompoundExpression(objectTokens, location, errorClass, method, null);
return new InstructionSet(new Instruction[]{object, new IndexAccessInstruction(indices, next, location)}, location);
} else if (tokens.length > 0 && tokens[0].getToken() == Token.LEFT_PARENTHESIS) {
// Signifies an expression wrapped in parentheses
checkWrapping(tokens, Token.LEFT_PARENTHESIS, Token.RIGHT_PARENTHESIS, "Unmatched parentheses.", location);
TokenInfo[] strippedTokens = new TokenInfo[tokens.length - 2];
System.arraycopy(tokens, 1, strippedTokens, 0, strippedTokens.length);
return interpretCompoundExpression(strippedTokens, location, errorClass, method, null);
} else if (tokens.length > 0 && tokens[0].getToken() == Token.LEFT_SQUARE_BRACKET) {
// Signifies array initialization
checkWrapping(tokens, Token.LEFT_SQUARE_BRACKET, Token.RIGHT_SQUARE_BRACKET, "Unmatched brackets.", location);
TokenInfo[] strippedTokens = new TokenInfo[tokens.length - 2];
System.arraycopy(tokens, 1, strippedTokens, 0, strippedTokens.length);
InstructionSet[] arrayComponents = splitExpressions(strippedTokens, Token.COMMA, location, errorClass, method, null);
return new InstructionSet(new Instruction[]{new ArrayInitializationInstruction(arrayComponents, location)}, location);
} else if (tokens.length > 0 && tokens[0].getToken() == Token.LEFT_CURLY_BRACKET) {
// Signifies map initialization
checkWrapping(tokens, Token.LEFT_CURLY_BRACKET, Token.RIGHT_CURLY_BRACKET, "Unmatched brackets.", location);
TokenInfo[] strippedTokens = new TokenInfo[tokens.length - 2];
System.arraycopy(tokens, 1, strippedTokens, 0, strippedTokens.length);
List<List<TokenInfo>> tokenSets = splitTokens(strippedTokens, Token.COMMA);
List<InstructionSet[]> mapComponents = new ArrayList<>();
for (List<TokenInfo> set : tokenSets) {
TokenInfo[] array = set.toArray(new TokenInfo[set.size()]);
mapComponents.add(splitByToken(array, Token.COLON, location, errorClass, method, null));
}
return new InstructionSet(new Instruction[]{new MapInitializationInstruction(mapComponents, location)}, location);
} else if (tokens.length == 2 && tokens[0].getToken() == Token.GLOBAL_VAR && tokens[1].getToken() == Token.NON_TOKEN_STRING) {
// Signifies global variable
return new InstructionSet(new Instruction[]{new GlobalVariableInstruction(tokens[1].getContents(), location)}, location);
} else if (tokens.length == 1 && tokens[0].getToken() == Token.NON_TOKEN_STRING) {
// Signifies single-token name (class/module names, variables, etc.)
return new InstructionSet(new Instruction[]{new SingleTokenInstruction(tokens[0].getContents(), location)}, location);
} else if (tokens.length > 1 && tokens[1].getToken() == Token.LEFT_PARENTHESIS) {
// Signifies a method call
TokenInfo[] paramTokens = new TokenInfo[tokens.length - 1];
System.arraycopy(tokens, 1, paramTokens, 0, paramTokens.length);
checkWrapping(paramTokens, Token.LEFT_PARENTHESIS, Token.RIGHT_PARENTHESIS, "Unmatched parentheses.", location);
TokenInfo[] strippedTokens = new TokenInfo[paramTokens.length - 2];
System.arraycopy(paramTokens, 1, strippedTokens, 0, strippedTokens.length);
InstructionSet[] params = splitExpressions(strippedTokens, Token.COMMA, location, errorClass, method, null);
return new InstructionSet(new Instruction[]{new MethodCallInstruction(tokens[0].getContents(), params, next, location)}, location);
} else {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, "Unrecognized token.", location.getFileName(), location.getLineNumber());
}
}
return null;
}
private static int findBracketBeginning(TokenInfo[] tokens, Location location) {
int level = 0;
for (int i = tokens.length - 1; i >= 0; i--) {
TokenInfo info = tokens[i];
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
}
if (level == 0 && info.getToken() == Token.LEFT_SQUARE_BRACKET) {
return i;
}
}
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, "Unmatched brackets.", location.getFileName(), location.getLineNumber());
return 0;
}
private static void checkWrapping(TokenInfo[] tokens, Token left, Token right, String errorMessage, Location location) {
if (tokens[0].getToken() != left || tokens[tokens.length - 1].getToken() != right) {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, errorMessage, location.getFileName(), location.getLineNumber());
}
// Check for imbalanced brackets
int level = 0;
for (TokenInfo info : tokens) {
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
}
}
if (level != 0) {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, errorMessage, location.getFileName(), location.getLineNumber());
}
}
private static InstructionSet[] splitExpressions(TokenInfo[] tokens, Token delimiter, Location location, String errorClass, String method, TYProcedure next) {
List<List<TokenInfo>> tokenSets = splitTokens(tokens, delimiter);
List<InstructionSet> sets = new ArrayList<>();
for (List<TokenInfo> list : tokenSets) {
TokenInfo[] listArr = list.toArray(new TokenInfo[list.size()]);
sets.add(interpretCompoundExpression(listArr, location, errorClass, method, next));
}
return sets.toArray(new InstructionSet[sets.size()]);
}
public static List<List<TokenInfo>> splitTokens(TokenInfo[] tokens, Token delimiter) {
List<List<TokenInfo>> sets = new ArrayList<>();
List<TokenInfo> current = new ArrayList<>();
int level = 0;
for (TokenInfo info : tokens) {
if (level == 0 && info.getToken() == delimiter) {
List<TokenInfo> tempSet = new ArrayList<>();
tempSet.addAll(current);
sets.add(tempSet);
current.clear();
} else {
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
}
current.add(info);
}
}
if (!current.isEmpty()) {
sets.add(current);
}
return sets;
}
private static List<List<TokenInfo>> splitTokensWithOperators(TokenInfo[] tokens, Token delimiter) {
List<List<TokenInfo>> sets = new ArrayList<>();
List<TokenInfo> current = new ArrayList<>();
int level = 0;
for (int i = 0; i < tokens.length; i++) {
TokenInfo info = tokens[i];
if (level == 0 && info.getToken() == delimiter && checkOperator(tokens, i, delimiter)) {
List<TokenInfo> tempSet = new ArrayList<>();
tempSet.addAll(current);
sets.add(tempSet);
current.clear();
} else {
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
}
current.add(info);
}
}
if (!current.isEmpty()) {
sets.add(current);
}
return sets;
}
private static boolean checkOperator(TokenInfo[] tokens, int i, Token token) {
if (UnaryOperator.getOperators().contains(token)) {
if (i == 0 || (i - 1 > 0 && BinaryOperator.getOperators().contains(tokens[i - 1].getToken()))) {
return false;
}
}
return true;
}
private static InstructionSet[] splitByToken(TokenInfo[] tokens, Token delimiter, Location location, String errorClass, String method, TYProcedure next) {
List<List<TokenInfo>> tokenSets = splitTokens(tokens, delimiter);
List<TokenInfo> firstExpression = new ArrayList<>();
for (int i = 0; i < tokenSets.size() - 1; i++) {
firstExpression.addAll(tokenSets.get(i));
if (i < tokenSets.size() - 2) {
firstExpression.add(new TokenInfo(delimiter, delimiter.getLiteral()));
}
}
List<TokenInfo> secondExpression = tokenSets.get(tokenSets.size() - 1);
InstructionSet first = interpretCompoundExpression(firstExpression.toArray(new TokenInfo[firstExpression.size()]), location, errorClass, method, null);
InstructionSet second = interpretCompoundExpression(secondExpression.toArray(new TokenInfo[secondExpression.size()]), location, errorClass, method, next);
return new InstructionSet[]{first, second};
}
private static InstructionSet[] splitByTokenWithOperators(TokenInfo[] tokens, Token delimiter, Location location, String errorClass, String method, TYProcedure next) {
List<List<TokenInfo>> tokenSets = splitTokensWithOperators(tokens, delimiter);
List<TokenInfo> firstExpression = new ArrayList<>();
for (int i = 0; i < tokenSets.size() - 1; i++) {
firstExpression.addAll(tokenSets.get(i));
if (i < tokenSets.size() - 2) {
firstExpression.add(new TokenInfo(delimiter, delimiter.getLiteral()));
}
}
List<TokenInfo> secondExpression = tokenSets.get(tokenSets.size() - 1);
InstructionSet first = interpretCompoundExpression(firstExpression.toArray(new TokenInfo[firstExpression.size()]), location, errorClass, method, null);
InstructionSet second = interpretCompoundExpression(secondExpression.toArray(new TokenInfo[secondExpression.size()]), location, errorClass, method, next);
return new InstructionSet[]{first, second};
}
public static Parameters interpretParameters(TokenInfo[] tokens, Location location, String errorClass, String method) {
List<String> mandatory = new ArrayList<>();
Map<String, ProcedureAction> optional = new LinkedHashMap<>();
String block = null;
String overflow = null;
List<List<TokenInfo>> tokenSets = splitTokens(tokens, Token.COMMA);
for (List<TokenInfo> list : tokenSets) {
if (list.size() == 1 && list.get(0).getToken() == Token.NON_TOKEN_STRING) {
mandatory.add(list.get(0).getContents());
} else if (list.size() > 2 && list.get(0).getToken() == Token.NON_TOKEN_STRING && list.get(1).getToken() == Token.ASSIGNMENT_OPERATOR) {
TokenInfo[] fullExp = list.toArray(new TokenInfo[list.size()]);
TokenInfo[] optionalValue = new TokenInfo[list.size() - 2];
System.arraycopy(fullExp, 2, optionalValue, 0, optionalValue.length);
InstructionSet value = interpretCompoundExpression(optionalValue, location, errorClass, method, null);
ProcedureAction action = new ArgumentProcedureAction(value);
optional.put(list.get(0).getContents(), action);
} else if (list.size() == 2 && list.get(1).getToken() == Token.NON_TOKEN_STRING) {
String contents = list.get(1).getContents();
if (list.get(0).getToken() == Token.BLOCK_PREFIX) {
block = contents;
} else if (list.get(0).getToken() == Token.TRIPLE_DOT) {
overflow = contents;
}
} else {
Errors.throwSyntaxError(Errors.Classes.SYNTAX_ERROR, "Unrecognized expression.", location.getFileName(), location.getLineNumber());
}
}
return new Parameters(mandatory, optional, block, overflow);
}
private static boolean isLevelUpToken(Token t) {
return t == Token.LEFT_PARENTHESIS || t == Token.LEFT_SQUARE_BRACKET || t == Token.LEFT_CURLY_BRACKET;
}
private static boolean isLevelDownToken(Token t) {
return t == Token.RIGHT_PARENTHESIS || t == Token.RIGHT_SQUARE_BRACKET || t == Token.RIGHT_CURLY_BRACKET;
}
private static boolean containsOnFirstLevel(TokenInfo[] tokens, Token... delimiters) {
List<Token> tokenList = Arrays.asList(delimiters);
int level = 0;
for (TokenInfo info : tokens) {
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
} else if (level == 0 && tokenList.contains(info.getToken())) {
return true;
}
}
return false;
}
private static Token findOnFirstLevel(TokenInfo[] tokens, Token... delimiters) {
List<Token> tokenList = Arrays.asList(delimiters);
int level = 0;
for (TokenInfo info : tokens) {
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
} else if (level == 0 && tokenList.contains(info.getToken())) {
return info.getToken();
}
}
return null;
}
private static boolean containsOnFirstLevelSequentially(TokenInfo[] tokens, Token first, Token second) {
boolean foundFirst = false;
int level = 0;
for (TokenInfo info : tokens) {
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
} else if (level == 0 && !foundFirst && info.getToken() == first) {
foundFirst = true;
} else if (level == 0 && foundFirst && info.getToken() == second) {
return true;
}
}
return false;
}
private static boolean containsOperatorOnFirstLevel(TokenInfo[] tokens) {
List<Token> tokenList = BinaryOperator.getOperators();
int level = 0;
for (int i = 0; i < tokens.length; i++) {
TokenInfo info = tokens[i];
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
} else if (level == 0 && tokenList.contains(info.getToken()) && checkOperator(tokens, i, info.getToken())) {
return true;
}
}
return false;
}
private static Token findOperatorOnFirstLevel(TokenInfo[] tokens) {
List<Token> tokenList = BinaryOperator.getOperators();
int level = 0;
int precedenceIndex = tokenList.size();
for (int i = 0; i < tokens.length; i++) {
TokenInfo info = tokens[i];
if (isLevelUpToken(info.getToken())) {
level++;
} else if (isLevelDownToken(info.getToken())) {
level--;
} else if (level == 0 && tokenList.contains(info.getToken()) && checkOperator(tokens, i, info.getToken())) {
// Order of operations
int index = tokenList.indexOf(info.getToken());
if (index < precedenceIndex) {
precedenceIndex = index;
}
}
}
if (precedenceIndex < tokenList.size()) {
return tokenList.get(precedenceIndex);
}
return null;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.fieldcaps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.action.ActionType;
import org.elasticsearch.action.NoShardAvailableActionException;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.ChannelActionListener;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.routing.GroupShardsIterator;
import org.elasticsearch.cluster.routing.ShardIterator;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.mapper.RuntimeFieldType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.search.internal.ShardSearchRequest;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequestHandler;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Executor;
import java.util.function.Predicate;
import static org.elasticsearch.action.support.TransportActions.isShardNotAvailableException;
public class TransportFieldCapabilitiesIndexAction
extends HandledTransportAction<FieldCapabilitiesIndexRequest, FieldCapabilitiesIndexResponse> {
private static final Logger logger = LogManager.getLogger(TransportFieldCapabilitiesIndexAction.class);
private static final String ACTION_NAME = FieldCapabilitiesAction.NAME + "[index]";
private static final String ACTION_SHARD_NAME = ACTION_NAME + "[s]";
public static final ActionType<FieldCapabilitiesIndexResponse> TYPE =
new ActionType<>(ACTION_NAME, FieldCapabilitiesIndexResponse::new);
private final ClusterService clusterService;
private final TransportService transportService;
private final SearchService searchService;
private final IndicesService indicesService;
private final Executor executor;
@Inject
public TransportFieldCapabilitiesIndexAction(ClusterService clusterService, TransportService transportService,
IndicesService indicesService, SearchService searchService, ThreadPool threadPool,
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) {
super(ACTION_NAME, transportService, actionFilters, FieldCapabilitiesIndexRequest::new);
this.clusterService = clusterService;
this.transportService = transportService;
this.searchService = searchService;
this.indicesService = indicesService;
this.executor = threadPool.executor(ThreadPool.Names.MANAGEMENT);
transportService.registerRequestHandler(ACTION_SHARD_NAME, ThreadPool.Names.SAME,
FieldCapabilitiesIndexRequest::new, new ShardTransportHandler());
}
@Override
protected void doExecute(Task task, FieldCapabilitiesIndexRequest request, ActionListener<FieldCapabilitiesIndexResponse> listener) {
new AsyncShardsAction(request, listener).start();
}
private FieldCapabilitiesIndexResponse shardOperation(final FieldCapabilitiesIndexRequest request) throws IOException {
final ShardId shardId = request.shardId();
final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
final IndexShard indexShard = indexService.getShard(request.shardId().getId());
try (Engine.Searcher searcher = indexShard.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE)) {
final SearchExecutionContext searchExecutionContext = indexService.newSearchExecutionContext(shardId.id(), 0,
searcher, request::nowInMillis, null, Collections.emptyMap());
if (canMatchShard(request, searchExecutionContext) == false) {
return new FieldCapabilitiesIndexResponse(request.index(), Collections.emptyMap(), false);
}
Set<String> fieldNames = new HashSet<>();
for (String field : request.fields()) {
fieldNames.addAll(searchExecutionContext.simpleMatchToIndexNames(field));
}
Predicate<String> fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName());
Map<String, IndexFieldCapabilities> responseMap = new HashMap<>();
for (String field : fieldNames) {
MappedFieldType ft = searchExecutionContext.getFieldType(field);
if (ft != null) {
if (searchExecutionContext.isMetadataField(field)
|| fieldPredicate.test(ft.name())) {
IndexFieldCapabilities fieldCap = new IndexFieldCapabilities(field, ft.familyTypeName(),
ft.isSearchable(), ft.isAggregatable(), ft.meta());
responseMap.put(field, fieldCap);
} else {
continue;
}
// Check the ancestor of the field to find nested and object fields.
// Runtime fields are excluded since they can override any path.
if (ft instanceof RuntimeFieldType == false) {
int dotIndex = ft.name().lastIndexOf('.');
while (dotIndex > -1) {
String parentField = ft.name().substring(0, dotIndex);
if (responseMap.containsKey(parentField)) {
// we added this path on another field already
break;
}
// checks if the parent field contains sub-fields
if (searchExecutionContext.getFieldType(parentField) == null) {
// no field type, it must be an object field
ObjectMapper mapper = searchExecutionContext.getObjectMapper(parentField);
String type = mapper.nested().isNested() ? "nested" : "object";
IndexFieldCapabilities fieldCap = new IndexFieldCapabilities(parentField, type,
false, false, Collections.emptyMap());
responseMap.put(parentField, fieldCap);
}
dotIndex = parentField.lastIndexOf('.');
}
}
}
}
return new FieldCapabilitiesIndexResponse(request.index(), responseMap, true);
}
}
private boolean canMatchShard(FieldCapabilitiesIndexRequest req, SearchExecutionContext searchExecutionContext) throws IOException {
if (req.indexFilter() == null || req.indexFilter() instanceof MatchAllQueryBuilder) {
return true;
}
assert req.nowInMillis() != 0L;
ShardSearchRequest searchRequest = new ShardSearchRequest(req.shardId(), req.nowInMillis(), AliasFilter.EMPTY);
searchRequest.source(new SearchSourceBuilder().query(req.indexFilter()));
return SearchService.queryStillMatchesAfterRewrite(searchRequest, searchExecutionContext);
}
private ClusterBlockException checkGlobalBlock(ClusterState state) {
return state.blocks().globalBlockedException(ClusterBlockLevel.READ);
}
private ClusterBlockException checkRequestBlock(ClusterState state, String concreteIndex) {
return state.blocks().indexBlockedException(ClusterBlockLevel.READ, concreteIndex);
}
/**
* An action that executes on each shard sequentially until it finds one that can match the provided
* {@link FieldCapabilitiesIndexRequest#indexFilter()}. In which case the shard is used
* to create the final {@link FieldCapabilitiesIndexResponse}.
*/
class AsyncShardsAction {
private final FieldCapabilitiesIndexRequest request;
private final DiscoveryNodes nodes;
private final ActionListener<FieldCapabilitiesIndexResponse> listener;
private final GroupShardsIterator<ShardIterator> shardsIt;
private volatile int shardIndex = 0;
private AsyncShardsAction(FieldCapabilitiesIndexRequest request, ActionListener<FieldCapabilitiesIndexResponse> listener) {
this.listener = listener;
ClusterState clusterState = clusterService.state();
if (logger.isTraceEnabled()) {
logger.trace("executing [{}] based on cluster state version [{}]", request, clusterState.version());
}
nodes = clusterState.nodes();
ClusterBlockException blockException = checkGlobalBlock(clusterState);
if (blockException != null) {
throw blockException;
}
this.request = request;
blockException = checkRequestBlock(clusterState, request.index());
if (blockException != null) {
throw blockException;
}
shardsIt = clusterService.operationRouting().searchShards(clusterService.state(),
new String[]{request.index()}, null, null, null, null);
}
public void start() {
tryNext(null, true);
}
private void onFailure(ShardRouting shardRouting, Exception e) {
if (e != null) {
logger.trace(() -> new ParameterizedMessage("{}: failed to execute [{}]", shardRouting, request), e);
}
tryNext(e, false);
}
private ShardRouting nextRoutingOrNull() {
if (shardsIt.size() == 0 || shardIndex >= shardsIt.size()) {
return null;
}
ShardRouting next = shardsIt.get(shardIndex).nextOrNull();
if (next != null) {
return next;
}
moveToNextShard();
return nextRoutingOrNull();
}
private void moveToNextShard() {
++ shardIndex;
}
private void tryNext(@Nullable final Exception lastFailure, boolean canMatchShard) {
ShardRouting shardRouting = nextRoutingOrNull();
if (shardRouting == null) {
if (canMatchShard == false) {
listener.onResponse(new FieldCapabilitiesIndexResponse(request.index(), Collections.emptyMap(), false));
} else {
if (lastFailure == null || isShardNotAvailableException(lastFailure)) {
listener.onFailure(new NoShardAvailableActionException(null,
LoggerMessageFormat.format("No shard available for [{}]", request), lastFailure));
} else {
logger.debug(() -> new ParameterizedMessage("{}: failed to execute [{}]", null, request), lastFailure);
listener.onFailure(lastFailure);
}
}
return;
}
DiscoveryNode node = nodes.get(shardRouting.currentNodeId());
if (node == null) {
onFailure(shardRouting, new NoShardAvailableActionException(shardRouting.shardId()));
} else {
request.shardId(shardRouting.shardId());
if (logger.isTraceEnabled()) {
logger.trace(
"sending request [{}] on node [{}]",
request,
node
);
}
transportService.sendRequest(node, ACTION_SHARD_NAME, request,
new TransportResponseHandler<FieldCapabilitiesIndexResponse>() {
@Override
public FieldCapabilitiesIndexResponse read(StreamInput in) throws IOException {
return new FieldCapabilitiesIndexResponse(in);
}
@Override
public void handleResponse(final FieldCapabilitiesIndexResponse response) {
if (response.canMatch()) {
listener.onResponse(response);
} else {
moveToNextShard();
tryNext(null, false);
}
}
@Override
public void handleException(TransportException exp) {
onFailure(shardRouting, exp);
}
});
}
}
}
private class ShardTransportHandler implements TransportRequestHandler<FieldCapabilitiesIndexRequest> {
@Override
public void messageReceived(final FieldCapabilitiesIndexRequest request,
final TransportChannel channel,
Task task) throws Exception {
if (logger.isTraceEnabled()) {
logger.trace("executing [{}]", request);
}
ActionListener<FieldCapabilitiesIndexResponse> listener = new ChannelActionListener<>(channel, ACTION_SHARD_NAME, request);
executor.execute(ActionRunnable.supply(listener, () -> shardOperation(request)));
}
}
}
| |
/*
* The Plaid API
* The Plaid REST API. Please see https://plaid.com/docs/api for more details.
*
* The version of the OpenAPI document: 2020-09-14_1.79.0
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
package com.plaid.client.model;
import java.util.Objects;
import java.util.Arrays;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import com.plaid.client.model.PaymentInitiationConsent;
import com.plaid.client.model.PaymentInitiationConsentConstraints;
import com.plaid.client.model.PaymentInitiationConsentScope;
import com.plaid.client.model.PaymentInitiationConsentStatus;
import com.plaid.client.model.PaymentInitiationRecipientGetResponseAllOf;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
/**
* PaymentInitiationConsentGetResponse defines the response schema for `/payment_initation/consent/get`
*/
@ApiModel(description = "PaymentInitiationConsentGetResponse defines the response schema for `/payment_initation/consent/get`")
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaClientCodegen", date = "2022-03-01T23:05:06.764Z[GMT]")
public class PaymentInitiationConsentGetResponse {
public static final String SERIALIZED_NAME_CONSENT_ID = "consent_id";
@SerializedName(SERIALIZED_NAME_CONSENT_ID)
private String consentId;
public static final String SERIALIZED_NAME_STATUS = "status";
@SerializedName(SERIALIZED_NAME_STATUS)
private PaymentInitiationConsentStatus status;
public static final String SERIALIZED_NAME_CREATED_AT = "created_at";
@SerializedName(SERIALIZED_NAME_CREATED_AT)
private OffsetDateTime createdAt;
public static final String SERIALIZED_NAME_RECIPIENT_ID = "recipient_id";
@SerializedName(SERIALIZED_NAME_RECIPIENT_ID)
private String recipientId;
public static final String SERIALIZED_NAME_REFERENCE = "reference";
@SerializedName(SERIALIZED_NAME_REFERENCE)
private String reference;
public static final String SERIALIZED_NAME_CONSTRAINTS = "constraints";
@SerializedName(SERIALIZED_NAME_CONSTRAINTS)
private PaymentInitiationConsentConstraints constraints;
public static final String SERIALIZED_NAME_SCOPES = "scopes";
@SerializedName(SERIALIZED_NAME_SCOPES)
private List<PaymentInitiationConsentScope> scopes = new ArrayList<>();
public static final String SERIALIZED_NAME_REQUEST_ID = "request_id";
@SerializedName(SERIALIZED_NAME_REQUEST_ID)
private String requestId;
public PaymentInitiationConsentGetResponse consentId(String consentId) {
this.consentId = consentId;
return this;
}
/**
* The consent ID.
* @return consentId
**/
@ApiModelProperty(required = true, value = "The consent ID.")
public String getConsentId() {
return consentId;
}
public void setConsentId(String consentId) {
this.consentId = consentId;
}
public PaymentInitiationConsentGetResponse status(PaymentInitiationConsentStatus status) {
this.status = status;
return this;
}
/**
* Get status
* @return status
**/
@ApiModelProperty(required = true, value = "")
public PaymentInitiationConsentStatus getStatus() {
return status;
}
public void setStatus(PaymentInitiationConsentStatus status) {
this.status = status;
}
public PaymentInitiationConsentGetResponse createdAt(OffsetDateTime createdAt) {
this.createdAt = createdAt;
return this;
}
/**
* Consent creation timestamp, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format.
* @return createdAt
**/
@ApiModelProperty(required = true, value = "Consent creation timestamp, in [ISO 8601](https://wikipedia.org/wiki/ISO_8601) format.")
public OffsetDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(OffsetDateTime createdAt) {
this.createdAt = createdAt;
}
public PaymentInitiationConsentGetResponse recipientId(String recipientId) {
this.recipientId = recipientId;
return this;
}
/**
* The ID of the recipient the payment consent is for.
* @return recipientId
**/
@ApiModelProperty(required = true, value = "The ID of the recipient the payment consent is for.")
public String getRecipientId() {
return recipientId;
}
public void setRecipientId(String recipientId) {
this.recipientId = recipientId;
}
public PaymentInitiationConsentGetResponse reference(String reference) {
this.reference = reference;
return this;
}
/**
* A reference for the payment consent.
* @return reference
**/
@ApiModelProperty(required = true, value = "A reference for the payment consent.")
public String getReference() {
return reference;
}
public void setReference(String reference) {
this.reference = reference;
}
public PaymentInitiationConsentGetResponse constraints(PaymentInitiationConsentConstraints constraints) {
this.constraints = constraints;
return this;
}
/**
* Get constraints
* @return constraints
**/
@ApiModelProperty(required = true, value = "")
public PaymentInitiationConsentConstraints getConstraints() {
return constraints;
}
public void setConstraints(PaymentInitiationConsentConstraints constraints) {
this.constraints = constraints;
}
public PaymentInitiationConsentGetResponse scopes(List<PaymentInitiationConsentScope> scopes) {
this.scopes = scopes;
return this;
}
public PaymentInitiationConsentGetResponse addScopesItem(PaymentInitiationConsentScope scopesItem) {
this.scopes.add(scopesItem);
return this;
}
/**
* An array of payment consent scopes.
* @return scopes
**/
@ApiModelProperty(required = true, value = "An array of payment consent scopes.")
public List<PaymentInitiationConsentScope> getScopes() {
return scopes;
}
public void setScopes(List<PaymentInitiationConsentScope> scopes) {
this.scopes = scopes;
}
public PaymentInitiationConsentGetResponse requestId(String requestId) {
this.requestId = requestId;
return this;
}
/**
* A unique identifier for the request, which can be used for troubleshooting. This identifier, like all Plaid identifiers, is case sensitive.
* @return requestId
**/
@ApiModelProperty(required = true, value = "A unique identifier for the request, which can be used for troubleshooting. This identifier, like all Plaid identifiers, is case sensitive.")
public String getRequestId() {
return requestId;
}
public void setRequestId(String requestId) {
this.requestId = requestId;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PaymentInitiationConsentGetResponse paymentInitiationConsentGetResponse = (PaymentInitiationConsentGetResponse) o;
return Objects.equals(this.consentId, paymentInitiationConsentGetResponse.consentId) &&
Objects.equals(this.status, paymentInitiationConsentGetResponse.status) &&
Objects.equals(this.createdAt, paymentInitiationConsentGetResponse.createdAt) &&
Objects.equals(this.recipientId, paymentInitiationConsentGetResponse.recipientId) &&
Objects.equals(this.reference, paymentInitiationConsentGetResponse.reference) &&
Objects.equals(this.constraints, paymentInitiationConsentGetResponse.constraints) &&
Objects.equals(this.scopes, paymentInitiationConsentGetResponse.scopes) &&
Objects.equals(this.requestId, paymentInitiationConsentGetResponse.requestId);
}
@Override
public int hashCode() {
return Objects.hash(consentId, status, createdAt, recipientId, reference, constraints, scopes, requestId);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class PaymentInitiationConsentGetResponse {\n");
sb.append(" consentId: ").append(toIndentedString(consentId)).append("\n");
sb.append(" status: ").append(toIndentedString(status)).append("\n");
sb.append(" createdAt: ").append(toIndentedString(createdAt)).append("\n");
sb.append(" recipientId: ").append(toIndentedString(recipientId)).append("\n");
sb.append(" reference: ").append(toIndentedString(reference)).append("\n");
sb.append(" constraints: ").append(toIndentedString(constraints)).append("\n");
sb.append(" scopes: ").append(toIndentedString(scopes)).append("\n");
sb.append(" requestId: ").append(toIndentedString(requestId)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
package com.alfarabi.alfalibs.helper;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.StaggeredGridLayoutManager;
/**
* Created by shayanpourvatan on 4/16/17.
*/
public class PaginationHandler {
private RecyclerView recyclerView;
private RecyclerView.OnScrollListener onScrollListener;
private PaginationInterface paginationInterface;
private RecyclerView.LayoutManager layoutManager;
private RecyclerView.Adapter adapter;
private FindLastItemInLayoutManagerInterface findLastItemInLayoutManagerInterface;
private FindFirstItemInLayoutManagerInterface findFirstItemInLayoutManagerInterface;
private int columnCount;
private
@Direction.PaginationDirection
int direction;
private PaginationState paginationState = PaginationState.IDLE;
private int offset;
private enum PaginationState {
IDLE,
LOADING,
DONE // use when you receive all pages.
}
private PaginationHandler() {
throw new AssertionError("Create object via builder class");
}
private PaginationHandler(RecyclerView recyclerView,
RecyclerView.LayoutManager layoutManager,
RecyclerView.Adapter adapter,
int offsetCount,
PaginationInterface paginationInterface,
FindLastItemInLayoutManagerInterface findLastItemInLayoutManagerInterface,
FindFirstItemInLayoutManagerInterface findFirstItemInLayoutManagerInterface,
int columnCount, @Direction.PaginationDirection int direction) {
this.recyclerView = recyclerView;
this.adapter = adapter;
this.layoutManager = layoutManager;
this.offset = offsetCount;
this.paginationInterface = paginationInterface;
this.findLastItemInLayoutManagerInterface = findLastItemInLayoutManagerInterface;
this.findFirstItemInLayoutManagerInterface = findFirstItemInLayoutManagerInterface;
this.columnCount = columnCount;
this.direction = direction;
initOnScrollListener();
}
private void initOnScrollListener() {
onScrollListener = new RecyclerView.OnScrollListener() {
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
// you must set interface to handle pagination
if (paginationInterface == null) {
return;
}
// safety check
if (adapter == null || layoutManager == null) {
return;
}
// pagination is on progress or is finished.
if (paginationState != PaginationState.IDLE) {
return;
}
int lastVisibleItemPosition = 0;
int firstVisibleItemPosition = adapter.getItemCount();
if (findLastItemInLayoutManagerInterface != null && direction == Direction.LOAD_FROM_BOTTOM) {
lastVisibleItemPosition = findLastItemInLayoutManagerInterface.findLastVisibleItemPosition();
} else if (findFirstItemInLayoutManagerInterface != null && direction == Direction.LOAD_FROM_TOP) {
firstVisibleItemPosition = findFirstItemInLayoutManagerInterface.findFirstVisibleItemPosition();
} else if (layoutManager instanceof GridLayoutManager) {
GridLayoutManager gridLayoutManager = (GridLayoutManager) layoutManager;
lastVisibleItemPosition = gridLayoutManager.findLastCompletelyVisibleItemPosition();
firstVisibleItemPosition = gridLayoutManager.findFirstVisibleItemPosition();
columnCount = gridLayoutManager.getSpanCount();
} else if (layoutManager instanceof StaggeredGridLayoutManager) {
StaggeredGridLayoutManager staggeredGridLayoutManager = (StaggeredGridLayoutManager) layoutManager;
int[] lastPositions = staggeredGridLayoutManager.findLastVisibleItemPositions(null);
int[] firstPositions = staggeredGridLayoutManager.findFirstVisibleItemPositions(null);
if (lastPositions != null && lastPositions.length > 0) {
lastVisibleItemPosition = lastPositions[0];
}
if (firstPositions != null && firstPositions.length > 0) {
firstVisibleItemPosition = firstPositions[0];
}
} else if (layoutManager instanceof LinearLayoutManager) {
LinearLayoutManager linearLayoutManager = (LinearLayoutManager) layoutManager;
lastVisibleItemPosition = linearLayoutManager.findLastVisibleItemPosition();
firstVisibleItemPosition = linearLayoutManager.findFirstVisibleItemPosition();
} else {
throw new IllegalArgumentException("this library don't support custom layoutManager, you must setFindLastItemInLayoutManagerInterface to handle other layoutManager ");
}
boolean needLoadMore = needLoadMore(firstVisibleItemPosition, lastVisibleItemPosition);
if (needLoadMore && paginationState == PaginationState.IDLE) {
paginationState = PaginationState.LOADING;
paginationInterface.onLoadMore(new PaginationCompletionInterface() {
@Override
public void handledDataComplete(boolean isLast) {
if (isLast) {
paginationState = PaginationState.DONE;
} else {
paginationState = PaginationState.IDLE;
}
}
});
}
}
};
recyclerView.addOnScrollListener(onScrollListener);
}
public void loadMore(PaginationCompletionInterface paginationCompletionInterface){
if(paginationInterface!=null){
paginationState = PaginationState.LOADING;
paginationInterface.onLoadMore(new PaginationCompletionInterface() {
@Override
public void handledDataComplete(boolean isLast) {
paginationCompletionInterface.handledDataComplete(isLast);
if (isLast) {
paginationState = PaginationState.DONE;
} else {
paginationState = PaginationState.IDLE;
}
}
});
}
}
private boolean needLoadMore(int firstVisibleItemPosition, int lastVisibleItemPosition) {
switch (direction) {
case Direction.LOAD_FROM_TOP:
return firstVisibleItemPosition <= offset;
case Direction.LOAD_FROM_BOTTOM:
return adapter.getItemCount() - (lastVisibleItemPosition + columnCount * offset) <= 0;
default:
throw new IllegalArgumentException("invalid direction");
}
}
public static class Builder {
private int offsetCount = 3;
private int columnCount = 1;
private RecyclerView recyclerView;
private PaginationInterface paginationInterface;
private FindLastItemInLayoutManagerInterface findLastItemInLayoutManagerInterface;
private FindFirstItemInLayoutManagerInterface findFirstItemInLayoutManagerInterface;
private
@Direction.PaginationDirection
int direction = Direction.LOAD_FROM_BOTTOM;
public Builder setOffsetCount(int offsetCount) {
this.offsetCount = offsetCount;
return this;
}
public Builder setRecyclerView(RecyclerView recyclerView) {
this.recyclerView = recyclerView;
return this;
}
public Builder setLoadMoreListener(PaginationInterface paginationInterface) {
this.paginationInterface = paginationInterface;
return this;
}
public Builder setFindLastItemInLayoutManagerInterface(FindLastItemInLayoutManagerInterface findLastItemInLayoutManagerInterface) {
this.findLastItemInLayoutManagerInterface = findLastItemInLayoutManagerInterface;
return this;
}
public Builder setFindFirstItemInLayoutManagerInterface(FindFirstItemInLayoutManagerInterface findFirstItemInLayoutManagerInterface) {
this.findFirstItemInLayoutManagerInterface = findFirstItemInLayoutManagerInterface;
return this;
}
public Builder setDirection(@Direction.PaginationDirection int direction) {
this.direction = direction;
return this;
}
public Builder setColumncount(int columnCount) {
this.columnCount = columnCount;
return this;
}
public PaginationHandler build() {
if (this.recyclerView == null) {
throw new AssertionError("recyclerView must be initialized in PaginationHandler");
}
if (this.recyclerView.getLayoutManager() == null) {
throw new AssertionError("layoutManager must be set in recyclerView");
}
if (this.recyclerView.getAdapter() == null) {
throw new AssertionError("adapter must be initialized in PaginationHandler");
}
return new PaginationHandler(recyclerView,
recyclerView.getLayoutManager(),
recyclerView.getAdapter(),
offsetCount,
paginationInterface,
findLastItemInLayoutManagerInterface,
findFirstItemInLayoutManagerInterface,
columnCount,
direction);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connectors.hive;
import org.apache.flink.util.Preconditions;
import org.apache.flink.shaded.guava30.com.google.common.io.Resources;
import com.klarna.hiverunner.HiveServerContainer;
import com.klarna.hiverunner.HiveServerContext;
import com.klarna.hiverunner.HiveShell;
import com.klarna.hiverunner.HiveShellContainer;
import com.klarna.hiverunner.annotations.HiveProperties;
import com.klarna.hiverunner.annotations.HiveResource;
import com.klarna.hiverunner.annotations.HiveRunnerSetup;
import com.klarna.hiverunner.annotations.HiveSQL;
import com.klarna.hiverunner.annotations.HiveSetupScript;
import com.klarna.hiverunner.builder.HiveShellBuilder;
import com.klarna.hiverunner.config.HiveRunnerConfig;
import com.klarna.reflection.ReflectionUtils;
import org.junit.Ignore;
import org.junit.internal.AssumptionViolatedException;
import org.junit.internal.runners.model.EachTestNotifier;
import org.junit.rules.ExternalResource;
import org.junit.rules.TemporaryFolder;
import org.junit.rules.TestRule;
import org.junit.runner.Description;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.FrameworkMethod;
import org.junit.runners.model.InitializationError;
import org.junit.runners.model.Statement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.reflections.ReflectionUtils.withAnnotation;
/**
* JUnit 4 runner that runs hive sql on a HiveServer residing in this JVM. No external dependencies
* needed. Inspired by StandaloneHiveRunner.java (almost copied).
*/
public class FlinkEmbeddedHiveRunner extends BlockJUnit4ClassRunner {
private static final Logger LOGGER = LoggerFactory.getLogger(FlinkEmbeddedHiveRunner.class);
private HiveShellContainer container;
private final HiveRunnerConfig config = new HiveRunnerConfig();
protected HiveServerContext context;
public FlinkEmbeddedHiveRunner(Class<?> clazz) throws InitializationError {
super(clazz);
}
@Override
protected List<TestRule> classRules() {
// need to load hive runner config before the context is inited
loadAnnotatesHiveRunnerConfig(getTestClass().getJavaClass());
final TemporaryFolder temporaryFolder = new TemporaryFolder();
context = new FlinkEmbeddedHiveServerContext(temporaryFolder, config);
List<TestRule> rules = super.classRules();
ExternalResource hiveShell =
new ExternalResource() {
@Override
protected void before() throws Throwable {
container =
createHiveServerContainer(getTestClass().getJavaClass(), context);
}
@Override
protected void after() {
tearDown();
}
};
rules.add(hiveShell);
rules.add(temporaryFolder);
return rules;
}
@Override
protected void runChild(final FrameworkMethod method, RunNotifier notifier) {
Description description = describeChild(method);
if (method.getAnnotation(Ignore.class) != null) {
notifier.fireTestIgnored(description);
} else {
EachTestNotifier eachNotifier = new EachTestNotifier(notifier, description);
eachNotifier.fireTestStarted();
try {
runTestMethod(method, eachNotifier);
} finally {
eachNotifier.fireTestFinished();
}
}
}
/** Runs a {@link Statement} that represents a leaf (aka atomic) test. */
private void runTestMethod(FrameworkMethod method, EachTestNotifier notifier) {
Statement statement = methodBlock(method);
try {
statement.evaluate();
} catch (AssumptionViolatedException e) {
notifier.addFailedAssumption(e);
} catch (Throwable e) {
notifier.addFailure(e);
}
}
private void tearDown() {
if (container != null) {
LOGGER.info("Tearing down {}", getName());
try {
container.tearDown();
} catch (Throwable e) {
LOGGER.warn("Tear down failed: " + e.getMessage(), e);
}
}
}
/**
* Traverses the test class annotations. Will inject a HiveShell in the test case that envelopes
* the HiveServer.
*/
private HiveShellContainer createHiveServerContainer(
final Class testClass, HiveServerContext context) throws Exception {
context.init();
final HiveServerContainer hiveServerContainer = new HiveServerContainer(context);
HiveShellBuilder hiveShellBuilder = new HiveShellBuilder();
HiveRunnerShim hiveRunnerShim = HiveRunnerShimLoader.load();
hiveRunnerShim.setCommandShellEmulation(hiveShellBuilder, config);
HiveShellField shellSetter = loadScriptsUnderTest(testClass, hiveShellBuilder);
hiveShellBuilder.setHiveServerContainer(hiveServerContainer);
loadAnnotatedResources(testClass, hiveShellBuilder);
loadAnnotatedProperties(testClass, hiveShellBuilder);
loadAnnotatedSetupScripts(testClass, hiveShellBuilder);
// Build shell
final HiveShellContainer shell = hiveShellBuilder.buildShell();
// Set shell
shellSetter.setShell(shell);
if (shellSetter.isAutoStart()) {
shell.start();
}
return shell;
}
private void loadAnnotatesHiveRunnerConfig(Class testClass) {
Set<Field> fields =
ReflectionUtils.getAllFields(testClass, withAnnotation(HiveRunnerSetup.class));
Preconditions.checkState(
fields.size() <= 1,
"Exact one field of type HiveRunnerConfig should to be annotated with @HiveRunnerSetup");
// Override the config with test case config. Taking care to not replace the config instance
// since it
// has been passes around and referenced by some of the other test rules.
if (!fields.isEmpty()) {
Field field = fields.iterator().next();
Preconditions.checkState(
ReflectionUtils.isOfType(field, HiveRunnerConfig.class),
"Field annotated with @HiveRunnerSetup should be of type HiveRunnerConfig");
config.override(
ReflectionUtils.getStaticFieldValue(
testClass, field.getName(), HiveRunnerConfig.class));
}
}
private HiveShellField loadScriptsUnderTest(
final Class testClass, HiveShellBuilder hiveShellBuilder) {
try {
Set<Field> fields =
ReflectionUtils.getAllFields(testClass, withAnnotation(HiveSQL.class));
Preconditions.checkState(
fields.size() == 1, "Exactly one field should to be annotated with @HiveSQL");
final Field field = fields.iterator().next();
List<Path> scripts = new ArrayList<>();
HiveSQL annotation = field.getAnnotation(HiveSQL.class);
for (String scriptFilePath : annotation.files()) {
Path file = Paths.get(Resources.getResource(scriptFilePath).toURI());
Preconditions.checkState(Files.exists(file), "File " + file + " does not exist");
scripts.add(file);
}
Charset charset =
annotation.encoding().equals("")
? Charset.defaultCharset()
: Charset.forName(annotation.encoding());
final boolean isAutoStart = annotation.autoStart();
hiveShellBuilder.setScriptsUnderTest(scripts, charset);
return new HiveShellField() {
@Override
public void setShell(HiveShell shell) {
ReflectionUtils.setStaticField(testClass, field.getName(), shell);
}
@Override
public boolean isAutoStart() {
return isAutoStart;
}
};
} catch (Throwable t) {
throw new IllegalArgumentException(
"Failed to init field annotated with @HiveSQL: " + t.getMessage(), t);
}
}
private void loadAnnotatedSetupScripts(Class testClass, HiveShellBuilder hiveShellBuilder) {
Set<Field> setupScriptFields =
ReflectionUtils.getAllFields(testClass, withAnnotation(HiveSetupScript.class));
for (Field setupScriptField : setupScriptFields) {
if (ReflectionUtils.isOfType(setupScriptField, String.class)) {
String script =
ReflectionUtils.getStaticFieldValue(
testClass, setupScriptField.getName(), String.class);
hiveShellBuilder.addSetupScript(script);
} else if (ReflectionUtils.isOfType(setupScriptField, File.class)
|| ReflectionUtils.isOfType(setupScriptField, Path.class)) {
Path path = getMandatoryPathFromField(testClass, setupScriptField);
hiveShellBuilder.addSetupScript(readAll(path));
} else {
throw new IllegalArgumentException(
"Field annotated with @HiveSetupScript currently only supports type String, File and Path");
}
}
}
private static String readAll(Path path) {
try {
return new String(Files.readAllBytes(path), StandardCharsets.UTF_8);
} catch (IOException e) {
throw new IllegalStateException("Unable to read " + path + ": " + e.getMessage(), e);
}
}
private void loadAnnotatedResources(Class testClass, HiveShellBuilder workFlowBuilder)
throws IOException {
Set<Field> fields =
ReflectionUtils.getAllFields(testClass, withAnnotation(HiveResource.class));
for (Field resourceField : fields) {
HiveResource annotation = resourceField.getAnnotation(HiveResource.class);
String targetFile = annotation.targetFile();
if (ReflectionUtils.isOfType(resourceField, String.class)) {
String data =
ReflectionUtils.getStaticFieldValue(
testClass, resourceField.getName(), String.class);
workFlowBuilder.addResource(targetFile, data);
} else if (ReflectionUtils.isOfType(resourceField, File.class)
|| ReflectionUtils.isOfType(resourceField, Path.class)) {
Path dataFile = getMandatoryPathFromField(testClass, resourceField);
workFlowBuilder.addResource(targetFile, dataFile);
} else {
throw new IllegalArgumentException(
"Fields annotated with @HiveResource currently only supports field type String, File or Path");
}
}
}
private Path getMandatoryPathFromField(Class testClass, Field resourceField) {
Path path;
if (ReflectionUtils.isOfType(resourceField, File.class)) {
File dataFile =
ReflectionUtils.getStaticFieldValue(
testClass, resourceField.getName(), File.class);
path = Paths.get(dataFile.toURI());
} else if (ReflectionUtils.isOfType(resourceField, Path.class)) {
path =
ReflectionUtils.getStaticFieldValue(
testClass, resourceField.getName(), Path.class);
} else {
throw new IllegalArgumentException(
"Only Path or File type is allowed on annotated field " + resourceField);
}
Preconditions.checkArgument(Files.exists(path), "File %s does not exist", path);
return path;
}
private void loadAnnotatedProperties(Class testClass, HiveShellBuilder workFlowBuilder) {
for (Field hivePropertyField :
ReflectionUtils.getAllFields(testClass, withAnnotation(HiveProperties.class))) {
Preconditions.checkState(
ReflectionUtils.isOfType(hivePropertyField, Map.class),
"Field annotated with @HiveProperties should be of type Map<String, String>");
workFlowBuilder.putAllProperties(
ReflectionUtils.getStaticFieldValue(
testClass, hivePropertyField.getName(), Map.class));
}
}
/**
* Used as a handle for the HiveShell field in the test case so that we may set it once the
* HiveShell has been instantiated.
*/
interface HiveShellField {
void setShell(HiveShell shell);
boolean isAutoStart();
}
}
| |
package org.hisp.dhis.api.mobile.model.LWUITmodel;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.hisp.dhis.api.mobile.model.DataStreamSerializable;
import org.hisp.dhis.api.mobile.model.PatientAttribute;
/**
* @author Nguyen Kim Lai
*/
public class Patient
implements DataStreamSerializable
{
private String clientVersion;
private int id;
private String organisationUnitName;
private String trackedEntityName;
private List<PatientAttribute> attributes = new ArrayList<>();
private List<ProgramInstance> enrollmentPrograms = new ArrayList<>();
private List<ProgramInstance> completedPrograms = new ArrayList<>();
private List<Relationship> relationships = new ArrayList<>();
private int idToAddRelative;
private int relTypeIdToAdd;
private Relationship enrollmentRelationship;
// -------------------------------------------------------------------------
// Getters and setters
// -------------------------------------------------------------------------
public List<Relationship> getRelationships()
{
return relationships;
}
public void setRelationships( List<Relationship> relationships )
{
this.relationships = relationships;
}
public List<PatientAttribute> getAttributes()
{
return attributes;
}
public void setAttributes( List<PatientAttribute> attributes )
{
this.attributes = attributes;
}
public int getId()
{
return id;
}
public void setId( int id )
{
this.id = id;
}
public String getClientVersion()
{
return clientVersion;
}
public void setClientVersion( String clientVersion )
{
this.clientVersion = clientVersion;
}
public String getOrganisationUnitName()
{
return organisationUnitName;
}
public void setOrganisationUnitName( String organisationUnitName )
{
this.organisationUnitName = organisationUnitName;
}
public String getTrackedEntityName()
{
return trackedEntityName;
}
public void setTrackedEntityName( String trackedEntityName )
{
this.trackedEntityName = trackedEntityName;
}
public List<ProgramInstance> getEnrollmentPrograms()
{
return enrollmentPrograms;
}
public void setEnrollmentPrograms( List<ProgramInstance> enrollmentPrograms )
{
this.enrollmentPrograms = enrollmentPrograms;
}
public List<ProgramInstance> getCompletedPrograms()
{
return completedPrograms;
}
public void setCompletedPrograms( List<ProgramInstance> completedPrograms )
{
this.completedPrograms = completedPrograms;
}
public int getIdToAddRelative()
{
return idToAddRelative;
}
public void setIdToAddRelative( int idToAddRelative )
{
this.idToAddRelative = idToAddRelative;
}
public int getRelTypeIdToAdd()
{
return relTypeIdToAdd;
}
public void setRelTypeIdToAdd( int relTypeIdToAdd )
{
this.relTypeIdToAdd = relTypeIdToAdd;
}
public Relationship getEnrollmentRelationship()
{
return enrollmentRelationship;
}
public void setEnrollmentRelationship( Relationship enrollmentRelationship )
{
this.enrollmentRelationship = enrollmentRelationship;
}
// -------------------------------------------------------------------------
// Override Methods
// -------------------------------------------------------------------------
@Override
public void serialize( DataOutputStream dout )
throws IOException
{
// ByteArrayOutputStream bout = new ByteArrayOutputStream();
// DataOutputStream dout = new DataOutputStream( bout );
dout.writeInt( this.getId() );
dout.writeUTF( this.getOrganisationUnitName() );
dout.writeUTF( this.getTrackedEntityName() );
// Write Patient Attribute
dout.writeInt( attributes.size() );
for ( PatientAttribute patientAtt : attributes )
{
patientAtt.serialize( dout );
}
// Write Enrollment Programs
dout.writeInt( enrollmentPrograms.size() );
for ( ProgramInstance program : enrollmentPrograms )
{
program.serialize( dout );
}
// Write completed Programs
dout.writeInt( completedPrograms.size() );
for ( ProgramInstance each : completedPrograms )
{
each.serialize( dout );
}
// Write Relationships
dout.writeInt( relationships.size() );
for ( Relationship each : relationships )
{
each.serialize( dout );
}
dout.writeInt( this.getIdToAddRelative() );
dout.writeInt( this.getRelTypeIdToAdd() );
if(enrollmentRelationship!=null)
{
dout.writeInt( 1 );
enrollmentRelationship.serialize( dout );
}
else
{
dout.writeInt( 0 );
}
// bout.flush();
// bout.writeTo( out );
}
@Override
public void deSerialize( DataInputStream din )
throws IOException, EOFException
{
this.setId( din.readInt() );
this.setOrganisationUnitName( din.readUTF() );
this.setTrackedEntityName( din.readUTF() );
// Read Attribute
int attsNumb = din.readInt();
if ( attsNumb > 0 )
{
attributes = new ArrayList<>();
for ( int j = 0; j < attsNumb; j++ )
{
PatientAttribute pa = new PatientAttribute();
pa.deSerialize( din );
attributes.add( pa );
}
}
// Read enrollment programs
int numbEnrollmentPrograms = din.readInt();
if ( numbEnrollmentPrograms > 0 )
{
this.enrollmentPrograms = new ArrayList<>();
for ( int i = 0; i < numbEnrollmentPrograms; i++ )
{
ProgramInstance program = new ProgramInstance();
program.deSerialize( din );
this.enrollmentPrograms.add( program );
}
}
// Read completed programs
int numbCompletedPrograms = din.readInt();
if ( numbCompletedPrograms > 0 )
{
this.completedPrograms = new ArrayList<>();
for ( int i = 0; i < numbCompletedPrograms; i++ )
{
ProgramInstance program = new ProgramInstance();
program.deSerialize( din );
this.completedPrograms.add( program );
}
}
// Read relationships
int numbRelationships = din.readInt();
if ( numbRelationships > 0 )
{
this.relationships = new ArrayList<>();
for ( int i = 0; i < numbRelationships; i++ )
{
Relationship relationship = new Relationship();
relationship.deSerialize( din );
this.relationships.add( relationship );
}
}
this.setIdToAddRelative( din.readInt() );
this.setRelTypeIdToAdd( din.readInt() );
int numEnrollmentRelationships = din.readInt();
if(numEnrollmentRelationships > 0)
{
Relationship enrollmentRelationship = new Relationship();
enrollmentRelationship.deSerialize( din );
this.setEnrollmentRelationship( enrollmentRelationship );
}
}
@Override
public void serializeVersion2_8( DataOutputStream out )
throws IOException
{
// TODO Auto-generated method stub
}
@Override
public void serializeVersion2_9( DataOutputStream dout )
throws IOException
{
// TODO Auto-generated method stub
}
@Override
public void serializeVersion2_10( DataOutputStream dataOutputStream )
throws IOException
{
// TODO Auto-generated method stub
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.util;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import org.camunda.bpm.engine.ProcessEngine;
import org.camunda.bpm.engine.TaskService;
import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.camunda.bpm.engine.impl.jobexecutor.JobExecutor;
import org.camunda.bpm.engine.impl.util.ClockUtil;
import org.camunda.bpm.engine.repository.Deployment;
import org.camunda.bpm.engine.repository.DeploymentBuilder;
import org.camunda.bpm.engine.repository.ProcessDefinition;
import org.camunda.bpm.engine.runtime.CaseInstance;
import org.camunda.bpm.engine.runtime.Job;
import org.camunda.bpm.engine.runtime.ProcessInstance;
import org.camunda.bpm.engine.task.Task;
import org.camunda.bpm.engine.test.ProcessEngineRule;
import org.camunda.bpm.model.bpmn.BpmnModelInstance;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import junit.framework.AssertionFailedError;
public class ProcessEngineTestRule extends TestWatcher {
public static final String DEFAULT_BPMN_RESOURCE_NAME = "process.bpmn20.xml";
protected ProcessEngineRule processEngineRule;
protected ProcessEngine processEngine;
public ProcessEngineTestRule(ProcessEngineRule processEngineRule) {
this.processEngineRule = processEngineRule;
}
@Override
protected void starting(Description description) {
this.processEngine = processEngineRule.getProcessEngine();
}
@Override
protected void finished(Description description) {
this.processEngine = null;
}
public void assertProcessEnded(String processInstanceId) {
ProcessInstance processInstance = processEngine
.getRuntimeService()
.createProcessInstanceQuery()
.processInstanceId(processInstanceId)
.singleResult();
assertThat("Process instance with id " + processInstanceId + " is not finished",
processInstance, is(nullValue()));
}
public void assertCaseEnded(String caseInstanceId) {
CaseInstance caseInstance = processEngine
.getCaseService()
.createCaseInstanceQuery()
.caseInstanceId(caseInstanceId)
.singleResult();
assertThat("Case instance with id " + caseInstanceId + " is not finished",
caseInstance, is(nullValue()));
}
public Deployment deploy(BpmnModelInstance... bpmnModelInstances) {
return deploy(createDeploymentBuilder(), Arrays.asList(bpmnModelInstances), Collections.<String> emptyList());
}
public Deployment deploy(String... resources) {
return deploy(createDeploymentBuilder(), Collections.<BpmnModelInstance> emptyList(), Arrays.asList(resources));
}
public Deployment deploy(BpmnModelInstance bpmnModelInstance, String resource) {
return deploy(createDeploymentBuilder(), Collections.singletonList(bpmnModelInstance), Collections.singletonList(resource));
}
public Deployment deployForTenant(String tenantId, BpmnModelInstance... bpmnModelInstances) {
return deploy(createDeploymentBuilder().tenantId(tenantId), Arrays.asList(bpmnModelInstances), Collections.<String> emptyList());
}
public Deployment deployForTenant(String tenantId, String... resources) {
return deploy(createDeploymentBuilder().tenantId(tenantId), Collections.<BpmnModelInstance> emptyList(), Arrays.asList(resources));
}
public Deployment deployForTenant(String tenant, BpmnModelInstance bpmnModelInstance, String resource) {
return deploy(createDeploymentBuilder().tenantId(tenant), Collections.singletonList(bpmnModelInstance), Collections.singletonList(resource));
}
public ProcessDefinition deployAndGetDefinition(BpmnModelInstance bpmnModel) {
return deployForTenantAndGetDefinition(null, bpmnModel);
}
public ProcessDefinition deployForTenantAndGetDefinition(String tenant, BpmnModelInstance bpmnModel) {
Deployment deployment = deploy(createDeploymentBuilder().tenantId(tenant), Collections.singletonList(bpmnModel), Collections.<String>emptyList());
return processEngineRule.getRepositoryService()
.createProcessDefinitionQuery()
.deploymentId(deployment.getId())
.singleResult();
}
protected Deployment deploy(DeploymentBuilder deploymentBuilder, List<BpmnModelInstance> bpmnModelInstances, List<String> resources) {
int i = 0;
for (BpmnModelInstance bpmnModelInstance : bpmnModelInstances) {
deploymentBuilder.addModelInstance(i + "_" + DEFAULT_BPMN_RESOURCE_NAME, bpmnModelInstance);
i++;
}
for (String resource : resources) {
deploymentBuilder.addClasspathResource(resource);
}
Deployment deployment = deploymentBuilder.deploy();
processEngineRule.manageDeployment(deployment);
return deployment;
}
protected DeploymentBuilder createDeploymentBuilder() {
return processEngine.getRepositoryService().createDeployment();
}
public void waitForJobExecutorToProcessAllJobs() {
waitForJobExecutorToProcessAllJobs(0);
}
public void waitForJobExecutorToProcessAllJobs(long maxMillisToWait) {
ProcessEngineConfigurationImpl processEngineConfiguration = (ProcessEngineConfigurationImpl) processEngine.getProcessEngineConfiguration();
JobExecutor jobExecutor = processEngineConfiguration.getJobExecutor();
jobExecutor.start();
long intervalMillis = 1000;
int jobExecutorWaitTime = jobExecutor.getWaitTimeInMillis() * 2;
if(maxMillisToWait < jobExecutorWaitTime) {
maxMillisToWait = jobExecutorWaitTime;
}
try {
Timer timer = new Timer();
InterruptTask task = new InterruptTask(Thread.currentThread());
timer.schedule(task, maxMillisToWait);
boolean areJobsAvailable = true;
try {
while (areJobsAvailable && !task.isTimeLimitExceeded()) {
Thread.sleep(intervalMillis);
try {
areJobsAvailable = areJobsAvailable();
} catch(Throwable t) {
// Ignore, possible that exception occurs due to locking/updating of table on MSSQL when
// isolation level doesn't allow READ of the table
}
}
} catch (InterruptedException e) {
} finally {
timer.cancel();
}
if (areJobsAvailable) {
throw new AssertionError("time limit of " + maxMillisToWait + " was exceeded");
}
} finally {
jobExecutor.shutdown();
}
}
protected boolean areJobsAvailable() {
List<Job> list = processEngine.getManagementService().createJobQuery().list();
for (Job job : list) {
if (!job.isSuspended() && job.getRetries() > 0 && (job.getDuedate() == null || ClockUtil.getCurrentTime().after(job.getDuedate()))) {
return true;
}
}
return false;
}
/**
* Execute all available jobs recursively till no more jobs found.
*/
public void executeAvailableJobs() {
executeAvailableJobs(0, Integer.MAX_VALUE);
}
/**
* Execute all available jobs recursively till no more jobs found or the number of executions is higher than expected.
*
* @param expectedExecutions number of expected job executions
*
* @throws AssertionFailedError when execute less or more jobs than expected
*
* @see #executeAvailableJobs()
*/
public void executeAvailableJobs(int expectedExecutions){
executeAvailableJobs(0, expectedExecutions);
}
private void executeAvailableJobs(int jobsExecuted, int expectedExecutions) {
List<Job> jobs = processEngine.getManagementService().createJobQuery().withRetriesLeft().list();
if (jobs.isEmpty()) {
if (expectedExecutions != Integer.MAX_VALUE) {
assertThat("executed less jobs than expected.", jobsExecuted, is(expectedExecutions));
}
return;
}
for (Job job : jobs) {
try {
processEngine.getManagementService().executeJob(job.getId());
jobsExecuted += 1;
} catch (Exception e) {}
}
assertThat("executed more jobs than expected.",
jobsExecuted, lessThanOrEqualTo(expectedExecutions));
executeAvailableJobs(jobsExecuted, expectedExecutions);
}
public void completeTask(String taskKey) {
TaskService taskService = processEngine.getTaskService();
Task task = taskService.createTaskQuery().taskDefinitionKey(taskKey).singleResult();
assertNotNull("Expected a task with key '" + taskKey + "' to exist", task);
taskService.complete(task.getId());
}
public void completeAnyTask(String taskKey) {
TaskService taskService = processEngine.getTaskService();
List<Task> tasks = taskService.createTaskQuery().taskDefinitionKey(taskKey).list();
assertTrue(!tasks.isEmpty());
taskService.complete(tasks.get(0).getId());
}
public void correlateMessage(String messageName) {
processEngine.getRuntimeService().createMessageCorrelation(messageName).correlate();
}
public void sendSignal(String signalName) {
processEngine.getRuntimeService().signalEventReceived(signalName);
}
protected static class InterruptTask extends TimerTask {
protected boolean timeLimitExceeded = false;
protected Thread thread;
public InterruptTask(Thread thread) {
this.thread = thread;
}
public boolean isTimeLimitExceeded() {
return timeLimitExceeded;
}
@Override
public void run() {
timeLimitExceeded = true;
thread.interrupt();
}
}
}
| |
package org.sakaiproject.evaluation.tool.reporting;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Date;
import java.util.List;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CreationHelper;
import org.apache.poi.ss.usermodel.Font;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.sakaiproject.evaluation.constant.EvalConstants;
import org.sakaiproject.evaluation.logic.EvalCommonLogic;
import org.sakaiproject.evaluation.logic.EvalDeliveryService;
import org.sakaiproject.evaluation.logic.EvalEvaluationService;
import org.sakaiproject.evaluation.logic.model.EvalUser;
import org.sakaiproject.evaluation.model.EvalAnswer;
import org.sakaiproject.evaluation.model.EvalEvaluation;
import org.sakaiproject.evaluation.tool.utils.EvalResponseAggregatorUtil;
import org.sakaiproject.evaluation.utils.EvalUtils;
import org.sakaiproject.evaluation.utils.TemplateItemDataList;
import org.sakaiproject.evaluation.utils.TemplateItemDataList.DataTemplateItem;
import uk.org.ponder.messageutil.MessageLocator;
import uk.org.ponder.util.UniversalRuntimeException;
/**
*
* @author Steven Githens
* @author Aaron Zeckoski (aaronz@vt.edu)
*/
public class XLSReportExporter implements ReportExporter {
private static final short QUESTION_CAT_ROW = 3; // Course, Instructor, etc
private static final short QUESTION_TYPE_ROW = 4;
private static final short QUESTION_TEXT_ROW = 5;
private static final short FIRST_ANSWER_ROW = 6;
private EvalCommonLogic commonLogic;
public void setCommonLogic(EvalCommonLogic commonLogic) {
this.commonLogic = commonLogic;
}
private EvalResponseAggregatorUtil responseAggregator;
public void setEvalResponseAggregatorUtil(EvalResponseAggregatorUtil bean) {
this.responseAggregator = bean;
}
private EvalEvaluationService evaluationService;
public void setEvaluationService(EvalEvaluationService evaluationService) {
this.evaluationService = evaluationService;
}
private EvalDeliveryService deliveryService;
public void setDeliveryService(EvalDeliveryService deliveryService) {
this.deliveryService = deliveryService;
}
private MessageLocator messageLocator;
public void setMessageLocator(MessageLocator locator) {
this.messageLocator = locator;
}
CellStyle dateCellStyle;
CreationHelper creationHelper;
/*
* (non-Javadoc)
*
* @see
* org.sakaiproject.evaluation.tool.reporting.ReportExporter#buildReport(org.sakaiproject.evaluation
* .model.EvalEvaluation, java.lang.String[], java.io.OutputStream)
*/
public void buildReport(EvalEvaluation evaluation, String[] groupIds, OutputStream outputStream) {
/*
* Logic for creating this view 1) make tidl 2) get DTIs for this eval from tidl 3) use DTIs
* to make the headers 4) get responseIds from tidl 5) loop over response ids 6) loop over
* DTIs 7) check answersmap for an answer, if there put in cell, if missing, insert blank 8)
* done
*/
Boolean instructorViewAllResults = (boolean) evaluation.getInstructorViewAllResults();
String currentUserId = commonLogic.getCurrentUserId();
String evalOwner = evaluation.getOwner();
// 1 Make TIDL
TemplateItemDataList tidl = getEvalTIDL(evaluation, groupIds);
// 2: get DTIs for this eval from tidl
List<DataTemplateItem> dtiList = tidl.getFlatListOfDataTemplateItems(true);
Workbook wb;
if(dtiList.size() < 256){
wb = new HSSFWorkbook();
}else{
// allow columns greater than 255 - EVALSYS-775
wb = new XSSFWorkbook();
}
creationHelper = wb.getCreationHelper();
Sheet sheet = wb.createSheet(messageLocator.getMessage("reporting.xls.sheetname"));
// Title Style
Font font = wb.createFont();
font.setFontHeightInPoints((short) 12);
font.setBoldweight(Font.BOLDWEIGHT_BOLD);
CellStyle mainTitleStyle = wb.createCellStyle();
mainTitleStyle.setFont(font);
// Bold header style
font = wb.createFont();
font.setFontHeightInPoints((short) 10);
font.setBoldweight(Font.BOLDWEIGHT_BOLD);
CellStyle boldHeaderStyle = wb.createCellStyle();
boldHeaderStyle.setFont(font);
// Italic meta header style
font = wb.createFont();
font.setFontHeightInPoints((short) 10);
font.setItalic(true);
CellStyle italicMiniHeaderStyle = wb.createCellStyle();
italicMiniHeaderStyle.setFont(font);
// Date meta Style
dateCellStyle = wb.createCellStyle();
// TODO FIXME HELPME To properly
// String dateCellFormat = ((SimpleDateFormat)DateFormat.getDateInstance(DateFormat.MEDIUM,
// localeGetter.get())).toLocalizedPattern();
// http://poi.apache.org/apidocs/org/apache/poi/hssf/usermodel/HSSFDataFormat.html
dateCellStyle.setDataFormat((short) 0x16);
// Evaluation Title
Row row1 = sheet.createRow(0);
Cell cellA1 = row1.createCell((short) 0);
setPlainStringCell(cellA1, evaluation.getTitle());
cellA1.setCellStyle(mainTitleStyle);
// calculate the response rate
// int responsesCount = deliveryService.countResponses(evaluation.getId(), null, true);
int responsesCount = evaluationService.countResponses(null, new Long[] {evaluation.getId()}, groupIds, null);
int enrollmentsCount = evaluationService.countParticipantsForEval(evaluation.getId(), groupIds);
Row row2 = sheet.createRow(1);
Cell cellA2 = row2.createCell((short) 0);
cellA2.setCellStyle(boldHeaderStyle);
setPlainStringCell(cellA2, EvalUtils.makeResponseRateStringFromCounts(responsesCount,
enrollmentsCount));
// dates
setPlainStringCell(row1.createCell((short) 2), messageLocator
.getMessage("evalsettings.start.date.header"));
setDateCell(row2.createCell((short) 2), evaluation.getStartDate());
if (evaluation.getDueDate() != null) {
setPlainStringCell(row1.createCell((short) 3), messageLocator
.getMessage("evalsettings.due.date.header"));
setDateCell(row2.createCell((short) 3), evaluation.getDueDate());
}
// add in list of groups
if (groupIds.length > 0) {
Row row3 = sheet.createRow(2);
Cell cellA3 = row3.createCell((short) 0);
setPlainStringCell(cellA3, messageLocator.getMessage("reporting.xls.participants",
new Object[] { responseAggregator.getCommaSeparatedGroupNames(groupIds) }));
}
// 3 use DTIs to make the headers
Row questionCatRow = sheet.createRow(QUESTION_CAT_ROW);
Row questionTypeRow = sheet.createRow(QUESTION_TYPE_ROW);
Row questionTextRow = sheet.createRow(QUESTION_TEXT_ROW);
short headerCount = 1;
for (DataTemplateItem dti : dtiList) {
if (!instructorViewAllResults // If the eval is so configured,
&& !commonLogic.isUserAdmin(currentUserId) // and currentUser is not an admin
&& !currentUserId.equals(evalOwner) // and currentUser is not the eval creator
&& !EvalConstants.ITEM_CATEGORY_COURSE.equals(dti.associateType)
&& !currentUserId.equals(commonLogic.getEvalUserById(dti.associateId).userId) ) {
// skip items that aren't for the current user
continue;
}
Cell cell = questionTypeRow.createCell(headerCount);
setPlainStringCell(cell, responseAggregator.getHeaderLabelForItemType(dti
.getTemplateItemType()));
cell.setCellStyle(italicMiniHeaderStyle);
Cell questionText = questionTextRow.createCell(headerCount);
setPlainStringCell(questionText, commonLogic.makePlainTextFromHTML(dti.templateItem
.getItem().getItemText()));
Cell questionCat = questionCatRow.createCell(headerCount);
if (EvalConstants.ITEM_CATEGORY_INSTRUCTOR.equals(dti.associateType)) {
EvalUser user = commonLogic.getEvalUserById( dti.associateId );
String instructorMsg = messageLocator.getMessage("reporting.spreadsheet.instructor",
new Object[] {user.displayName});
setPlainStringCell(questionCat, instructorMsg );
} else if (EvalConstants.ITEM_CATEGORY_ASSISTANT.equals(dti.associateType)) {
EvalUser user = commonLogic.getEvalUserById( dti.associateId );
String assistantMsg = messageLocator.getMessage("reporting.spreadsheet.ta",
new Object[] {user.displayName});
setPlainStringCell(questionCat, assistantMsg );
} else if (EvalConstants.ITEM_CATEGORY_COURSE.equals(dti.associateType)) {
setPlainStringCell(questionCat, messageLocator
.getMessage("reporting.spreadsheet.course"));
} else {
setPlainStringCell(questionCat, messageLocator.getMessage("unknown.caps"));
}
headerCount++;
if (dti.usesComments()) {
// add an extra column for comments
setPlainStringCell(questionTypeRow.createCell(headerCount),
messageLocator.getMessage("viewreport.comments.header")).setCellStyle(
italicMiniHeaderStyle);
headerCount++;
}
}
// 4) get responseIds from tidl
List<Long> responseIds = tidl.getResponseIdsForAnswers();
// 5) loop over response ids
short responseIdCounter = 0;
for (Long responseId : responseIds) {
Row row = sheet.createRow(responseIdCounter + FIRST_ANSWER_ROW);
Cell indexCell = row.createCell((short) 0);
indexCell.setCellValue(responseIdCounter + 1);
indexCell.setCellStyle(boldHeaderStyle);
// 6) loop over DTIs
short dtiCounter = 1;
for (DataTemplateItem dti : dtiList) {
if (!instructorViewAllResults // If the eval is so configured,
&& !commonLogic.isUserAdmin(currentUserId) // and currentUser is not an admin
&& !currentUserId.equals(evalOwner) // and currentUser is not the eval creator
&& !EvalConstants.ITEM_CATEGORY_COURSE.equals(dti.associateType)
&& !currentUserId.equals(commonLogic.getEvalUserById(dti.associateId).userId) ) {
//skip instructor items that aren't for the current user
continue;
}
// 7) check answersmap for an answer, if there put in cell, if missing, insert blank
EvalAnswer answer = dti.getAnswer(responseId);
Cell responseCell = row.createCell(dtiCounter);
// In Eval, users can leave questions blank, in which case this will be null
if (answer != null) {
setPlainStringCell(responseCell, responseAggregator.formatForSpreadSheet(answer
.getTemplateItem(), answer));
}
if (dti.usesComments()) {
// put comment in the extra column
dtiCounter++;
setPlainStringCell(row.createCell(dtiCounter), (answer == null || EvalUtils
.isBlank(answer.getComment())) ? "" : answer.getComment());
}
dtiCounter++;
}
responseIdCounter++;
}
// dump the output to the response stream
try {
wb.write(outputStream);
} catch (IOException e) {
throw UniversalRuntimeException.accumulate(e,
"Could not get Writer to dump output to xls");
}
}
private TemplateItemDataList getEvalTIDL(EvalEvaluation evaluation,
String[] groupIds) {
return responseAggregator.prepareTemplateItemDataStructure(evaluation.getId(), groupIds);
}
/**
* The regular set string value method in POI is deprecated, and this preferred way is much more
* bulky, so this is a convenience method.
*
* @param cell
* @param value
*/
private Cell setPlainStringCell(Cell cell, String value) {
cell.setCellValue( creationHelper.createRichTextString(value) );
return cell;
}
/**
* Sets the cell contents to the date (requires extra work because Excel stores dates as
* numbers.
*
* @param cell
* @param date
* @return
*/
private Cell setDateCell(Cell cell, Date date) {
cell.setCellStyle(dateCellStyle);
cell.setCellValue(date);
return cell;
}
public String getContentType() {
return "application/vnd.ms-excel";
}
public int getEvalTDIsize(EvalEvaluation evaluation,
String[] groupIds) {
List<DataTemplateItem> dtiList = getEvalTIDL(evaluation, groupIds).getFlatListOfDataTemplateItems(true);
return dtiList == null? 0 : dtiList.size();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams.processor.internals;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.StreamsMetrics;
import org.apache.kafka.streams.errors.TopologyBuilderException;
import org.apache.kafka.streams.processor.StateRestoreCallback;
import org.apache.kafka.streams.processor.StateStore;
import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.state.internals.ThreadCache;
import java.io.File;
import java.util.List;
import java.util.Map;
public class ProcessorContextImpl implements InternalProcessorContext, RecordCollector.Supplier {
public static final String NONEXIST_TOPIC = "__null_topic__";
private final TaskId id;
private final StreamTask task;
private final StreamsMetrics metrics;
private final RecordCollector collector;
private final ProcessorStateManager stateMgr;
private final StreamsConfig config;
private final Serde<?> keySerde;
private final Serde<?> valSerde;
private final ThreadCache cache;
private boolean initialized;
private RecordContext recordContext;
private ProcessorNode currentNode;
public ProcessorContextImpl(TaskId id,
StreamTask task,
StreamsConfig config,
RecordCollector collector,
ProcessorStateManager stateMgr,
StreamsMetrics metrics,
final ThreadCache cache) {
this.id = id;
this.task = task;
this.metrics = metrics;
this.collector = collector;
this.stateMgr = stateMgr;
this.config = config;
this.keySerde = config.keySerde();
this.valSerde = config.valueSerde();
this.cache = cache;
this.initialized = false;
}
public void initialized() {
this.initialized = true;
}
public ProcessorStateManager getStateMgr() {
return stateMgr;
}
@Override
public TaskId taskId() {
return id;
}
@Override
public String applicationId() {
return task.applicationId();
}
@Override
public RecordCollector recordCollector() {
return this.collector;
}
@Override
public Serde<?> keySerde() {
return this.keySerde;
}
@Override
public Serde<?> valueSerde() {
return this.valSerde;
}
@Override
public File stateDir() {
return stateMgr.baseDir();
}
@Override
public StreamsMetrics metrics() {
return metrics;
}
/**
* @throws IllegalStateException if this method is called before {@link #initialized()}
*/
@Override
public void register(StateStore store, boolean loggingEnabled, StateRestoreCallback stateRestoreCallback) {
if (initialized)
throw new IllegalStateException("Can only create state stores during initialization.");
stateMgr.register(store, loggingEnabled, stateRestoreCallback);
}
/**
* @throws TopologyBuilderException if an attempt is made to access this state store from an unknown node
*/
@Override
public StateStore getStateStore(String name) {
if (currentNode == null)
throw new TopologyBuilderException("Accessing from an unknown node");
if (!currentNode.stateStores.contains(name)) {
throw new TopologyBuilderException("Processor " + currentNode.name() + " has no access to StateStore " + name);
}
return stateMgr.getStore(name);
}
@Override
public ThreadCache getCache() {
return cache;
}
/**
* @throws IllegalStateException if the task's record is null
*/
@Override
public String topic() {
if (recordContext == null)
throw new IllegalStateException("This should not happen as topic() should only be called while a record is processed");
String topic = recordContext.topic();
if (topic.equals(NONEXIST_TOPIC))
return null;
else
return topic;
}
/**
* @throws IllegalStateException if partition is null
*/
@Override
public int partition() {
if (recordContext == null)
throw new IllegalStateException("This should not happen as partition() should only be called while a record is processed");
return recordContext.partition();
}
/**
* @throws IllegalStateException if offset is null
*/
@Override
public long offset() {
if (recordContext == null)
throw new IllegalStateException("This should not happen as offset() should only be called while a record is processed");
return recordContext.offset();
}
/**
* @throws IllegalStateException if timestamp is null
*/
@Override
public long timestamp() {
if (recordContext == null)
throw new IllegalStateException("This should not happen as timestamp() should only be called while a record is processed");
return recordContext.timestamp();
}
@Override
public <K, V> void forward(K key, V value) {
ProcessorNode previousNode = currentNode;
try {
for (ProcessorNode child : (List<ProcessorNode>) currentNode.children()) {
currentNode = child;
child.process(key, value);
}
} finally {
currentNode = previousNode;
}
}
@Override
public <K, V> void forward(K key, V value, int childIndex) {
ProcessorNode previousNode = currentNode;
final ProcessorNode child = (ProcessorNode<K, V>) currentNode.children().get(childIndex);
currentNode = child;
try {
child.process(key, value);
} finally {
currentNode = previousNode;
}
}
@Override
public <K, V> void forward(K key, V value, String childName) {
for (ProcessorNode child : (List<ProcessorNode<K, V>>) currentNode.children()) {
if (child.name().equals(childName)) {
ProcessorNode previousNode = currentNode;
currentNode = child;
try {
child.process(key, value);
return;
} finally {
currentNode = previousNode;
}
}
}
}
@Override
public void commit() {
task.needCommit();
}
@Override
public void schedule(long interval) {
task.schedule(interval);
}
@Override
public Map<String, Object> appConfigs() {
return config.originals();
}
@Override
public Map<String, Object> appConfigsWithPrefix(String prefix) {
return config.originalsWithPrefix(prefix);
}
@Override
public void setRecordContext(final RecordContext recordContext) {
this.recordContext = recordContext;
}
@Override
public RecordContext recordContext() {
return this.recordContext;
}
@Override
public void setCurrentNode(final ProcessorNode currentNode) {
this.currentNode = currentNode;
}
@Override
public ProcessorNode currentNode() {
return currentNode;
}
}
| |
/*
Copyright 2011-2012 Selenium committers
Copyright 2011-2012 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.remote.service;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static java.util.concurrent.TimeUnit.SECONDS;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.openqa.selenium.Beta;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.io.FileHandler;
import org.openqa.selenium.net.PortProber;
import org.openqa.selenium.net.UrlChecker;
import org.openqa.selenium.os.CommandLine;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Map;
import java.util.concurrent.locks.ReentrantLock;
/**
* Manages the life and death of a native executable driver server.
*
* It is expected that the driver server implements the
* <a href="http://code.google.com/p/selenium/wiki/JsonWireProtocol">WebDriver Wire Protocol</a>.
* In particular, it should implement /status command that is used to check if the server is alive.
* In addition to this, it is supposed that the driver server implements /shutdown hook that is
* used to stop the server.
*/
public class DriverService {
/**
* The base URL for the managed server.
*/
private final URL url;
/**
* Controls access to {@link #process}.
*/
private final ReentrantLock lock = new ReentrantLock();
/**
* A reference to the current child process. Will be {@code null} whenever this service is not
* running. Protected by {@link #lock}.
*/
private CommandLine process = null;
private final String executable;
private final ImmutableList<String> args;
private final ImmutableMap<String, String> environment;
/**
*
* @param executable The driver executable.
* @param port Which port to start the driver server on.
* @param args The arguments to the launched server.
* @param environment The environment for the launched server.
* @throws IOException If an I/O error occurs.
*/
protected DriverService(File executable, int port, ImmutableList<String> args,
ImmutableMap<String, String> environment) throws IOException {
this.executable = executable.getCanonicalPath();
url = new URL(String.format("http://localhost:%d", port));
this.args = args;
this.environment = environment;
}
/**
* @return The base URL for the managed driver server.
*/
public URL getUrl() {
return url;
}
/**
*
* @param exeName Name of the executable file to look for in PATH
* @param exeProperty Name of a system property that specifies the path to the executable file
* @param exeDocs The link to the driver documentation page
* @param exeDownload The link to the driver download page
*
* @return The driver executable as a {@link File} object
* @throws IllegalStateException If the executable not found or cannot be executed
*/
protected static File findExecutable(String exeName, String exeProperty, String exeDocs,
String exeDownload) {
String defaultPath = CommandLine.find(exeName);
String exePath = System.getProperty(exeProperty, defaultPath);
checkState(exePath != null,
"The path to the driver executable must be set by the %s system property;"
+ " for more information, see %s. "
+ "The latest version can be downloaded from %s",
exeProperty, exeDocs, exeDownload);
File exe = new File(exePath);
checkExecutable(exe);
return exe;
}
protected static void checkExecutable(File exe) {
checkState(exe.exists(),
"The driver executable does not exist: %s", exe.getAbsolutePath());
checkState(!exe.isDirectory(),
"The driver executable is a directory: %s", exe.getAbsolutePath());
checkState(FileHandler.canExecute(exe),
"The driver is not executable: %s", exe.getAbsolutePath());
}
/**
* Checks whether the driver child process is currently running.
*
* @return Whether the driver child process is still running.
*/
public boolean isRunning() {
lock.lock();
try {
if (process == null) {
return false;
}
return process.isRunning();
} catch (IllegalThreadStateException e) {
return true;
} finally {
lock.unlock();
}
}
/**
* Starts this service if it is not already running. This method will block until the server has
* been fully started and is ready to handle commands.
*
* @throws IOException If an error occurs while spawning the child process.
* @see #stop()
*/
public void start() throws IOException {
lock.lock();
try {
if (process != null) {
return;
}
process = new CommandLine(this.executable, args.toArray(new String[] {}));
process.setEnvironmentVariables(environment);
process.copyOutputTo(System.err);
process.executeAsync();
URL status = new URL(url.toString() + "/status");
new UrlChecker().waitUntilAvailable(120, SECONDS, status);
} catch (UrlChecker.TimeoutException e) {
process.checkForError();
throw new WebDriverException("Timed out waiting for driver server to start.", e);
} finally {
lock.unlock();
}
}
/**
* Stops this service is it is currently running. This method will attempt to block until the
* server has been fully shutdown.
*
* @see #start()
*/
public void stop() {
lock.lock();
try {
if (process == null) {
return;
}
URL killUrl = new URL(url.toString() + "/shutdown");
new UrlChecker().waitUntilUnavailable(3, SECONDS, killUrl);
process.destroy();
} catch (MalformedURLException e) {
throw new WebDriverException(e);
} catch (UrlChecker.TimeoutException e) {
throw new WebDriverException("Timed out waiting for driver server to shutdown.", e);
} finally {
process = null;
lock.unlock();
}
}
public static abstract class Builder<DS extends DriverService, B extends Builder> {
private int port = 0;
private File exe = null;
private ImmutableMap<String, String> environment = ImmutableMap.of();
private File logFile;
/**
* Sets which driver executable the builder will use.
*
* @param file The executable to use.
* @return A self reference.
*/
public B usingDriverExecutable(File file) {
checkNotNull(file);
checkExecutable(file);
this.exe = file;
return (B) this;
}
/**
* Sets which port the driver server should be started on. A value of 0 indicates that any
* free port may be used.
*
* @param port The port to use; must be non-negative.
* @return A self reference.
*/
public B usingPort(int port) {
checkArgument(port >= 0, "Invalid port number: %d", port);
this.port = port;
return (B) this;
}
protected int getPort() {
return port;
}
/**
* Configures the driver server to start on any available port.
*
* @return A self reference.
*/
public B usingAnyFreePort() {
this.port = 0;
return (B) this;
}
/**
* Defines the environment for the launched driver server. These
* settings will be inherited by every browser session launched by the
* server.
*
* @param environment A map of the environment variables to launch the
* server with.
* @return A self reference.
*/
@Beta
public B withEnvironment(Map<String, String> environment) {
this.environment = ImmutableMap.copyOf(environment);
return (B) this;
}
/**
* Configures the driver server to write log to the given file.
*
* @param logFile A file to write log to.
* @return A self reference.
*/
public B withLogFile(File logFile) {
this.logFile = logFile;
return (B) this;
}
protected File getLogFile() {
return logFile;
}
/**
* Creates a new service to manage the driver server. Before creating a new service, the
* builder will find a port for the server to listen to.
*
* @return The new service object.
*/
public DS build() {
if (port == 0) {
port = PortProber.findFreePort();
}
if (exe == null) {
exe = findDefaultExecutable();
}
ImmutableList<String> args = createArgs();
return createDriverService(exe, port, args, environment);
}
protected abstract File findDefaultExecutable();
protected abstract ImmutableList<String> createArgs();
protected abstract DS createDriverService(File exe, int port, ImmutableList<String> args,
ImmutableMap<String, String> environment);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hive.hcatalog.cli.SemanticAnalysis;
import java.io.IOException;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.exec.DDLTask;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.ASTNode;
import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
import org.apache.hadoop.hive.ql.parse.HiveParser;
import org.apache.hadoop.hive.ql.parse.HiveSemanticAnalyzerHookContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.StorageFormat;
import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
import org.apache.hadoop.hive.ql.security.authorization.Privilege;
import org.apache.hive.hcatalog.common.HCatConstants;
import org.apache.hive.hcatalog.common.HCatUtil;
final class CreateTableHook extends HCatSemanticAnalyzerBase {
private String tableName;
@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context,
ASTNode ast) throws SemanticException {
Hive db;
try {
db = context.getHive();
} catch (HiveException e) {
throw new SemanticException(
"Couldn't get Hive DB instance in semantic analysis phase.",
e);
}
// Analyze and create tbl properties object
int numCh = ast.getChildCount();
tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast
.getChild(0));
boolean likeTable = false;
StorageFormat format = new StorageFormat(context.getConf());
for (int num = 1; num < numCh; num++) {
ASTNode child = (ASTNode) ast.getChild(num);
if (format.fillStorageFormat(child)) {
if (org.apache.commons.lang.StringUtils
.isNotEmpty(format.getStorageHandler())) {
return ast;
}
continue;
}
switch (child.getToken().getType()) {
case HiveParser.TOK_QUERY: // CTAS
throw new SemanticException(
"Operation not supported. Create table as " +
"Select is not a valid operation.");
case HiveParser.TOK_ALTERTABLE_BUCKETS:
break;
case HiveParser.TOK_LIKETABLE:
likeTable = true;
break;
case HiveParser.TOK_IFNOTEXISTS:
try {
List<String> tables = db.getTablesByPattern(tableName);
if (tables != null && tables.size() > 0) { // table
// exists
return ast;
}
} catch (HiveException e) {
throw new SemanticException(e);
}
break;
case HiveParser.TOK_TABLEPARTCOLS:
List<FieldSchema> partCols = BaseSemanticAnalyzer
.getColumns((ASTNode) child.getChild(0), false);
for (FieldSchema fs : partCols) {
if (!fs.getType().equalsIgnoreCase("string")) {
throw new SemanticException(
"Operation not supported. HCatalog only " +
"supports partition columns of type string. "
+ "For column: "
+ fs.getName()
+ " Found type: " + fs.getType());
}
}
break;
}
}
if (!likeTable && (format.getInputFormat() == null || format.getOutputFormat() == null)) {
throw new SemanticException(
"STORED AS specification is either incomplete or incorrect.");
}
return ast;
}
@Override
public void postAnalyze(HiveSemanticAnalyzerHookContext context,
List<Task<? extends Serializable>> rootTasks)
throws SemanticException {
if (rootTasks.size() == 0) {
// There will be no DDL task created in case if its CREATE TABLE IF
// NOT EXISTS
return;
}
CreateTableDesc desc = ((DDLTask) rootTasks.get(rootTasks.size() - 1))
.getWork().getCreateTblDesc();
if (desc == null) {
// Desc will be null if its CREATE TABLE LIKE. Desc will be
// contained in CreateTableLikeDesc. Currently, HCat disallows CTLT in
// pre-hook. So, desc can never be null.
return;
}
Map<String, String> tblProps = desc.getTblProps();
if (tblProps == null) {
// tblProps will be null if user didnt use tblprops in his CREATE
// TABLE cmd.
tblProps = new HashMap<String, String>();
}
// first check if we will allow the user to create table.
String storageHandler = desc.getStorageHandler();
if (StringUtils.isEmpty(storageHandler)) {
} else {
try {
HiveStorageHandler storageHandlerInst = HCatUtil
.getStorageHandler(context.getConf(),
desc.getStorageHandler(),
desc.getSerName(),
desc.getInputFormat(),
desc.getOutputFormat());
//Authorization checks are performed by the storageHandler.getAuthorizationProvider(), if
//StorageDelegationAuthorizationProvider is used.
} catch (IOException e) {
throw new SemanticException(e);
}
}
if (desc != null) {
try {
Table table = context.getHive().newTable(desc.getTableName());
if (desc.getLocation() != null) {
table.setDataLocation(new Path(desc.getLocation()));
}
if (desc.getStorageHandler() != null) {
table.setProperty(
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE,
desc.getStorageHandler());
}
for (Map.Entry<String, String> prop : tblProps.entrySet()) {
table.setProperty(prop.getKey(), prop.getValue());
}
for (Map.Entry<String, String> prop : desc.getSerdeProps().entrySet()) {
table.setSerdeParam(prop.getKey(), prop.getValue());
}
//TODO: set other Table properties as needed
//authorize against the table operation so that location permissions can be checked if any
if (HCatAuthUtil.isAuthorizationEnabled(context.getConf())) {
authorize(table, Privilege.CREATE);
}
} catch (HiveException ex) {
throw new SemanticException(ex);
}
}
desc.setTblProps(tblProps);
context.getConf().set(HCatConstants.HCAT_CREATE_TBL_NAME, tableName);
}
}
| |
package org.appenders.log4j2.elasticsearch.hc.jmh;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.CompositeByteBuf;
import io.netty.buffer.UnpooledByteBufAllocator;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.Configuration;
import org.appenders.log4j2.elasticsearch.AsyncBatchEmitter;
import org.appenders.log4j2.elasticsearch.BatchOperations;
import org.appenders.log4j2.elasticsearch.BulkEmitter;
import org.appenders.log4j2.elasticsearch.ByteBufBoundedSizeLimitPolicy;
import org.appenders.log4j2.elasticsearch.ByteBufItemSource;
import org.appenders.log4j2.elasticsearch.ByteBufPooledObjectOps;
import org.appenders.log4j2.elasticsearch.ItemSource;
import org.appenders.log4j2.elasticsearch.ItemSourceFactory;
import org.appenders.log4j2.elasticsearch.JacksonJsonLayout;
import org.appenders.log4j2.elasticsearch.JacksonSerializer;
import org.appenders.log4j2.elasticsearch.Log4j2Lookup;
import org.appenders.log4j2.elasticsearch.PooledItemSourceFactory;
import org.appenders.log4j2.elasticsearch.ReleaseCallback;
import org.appenders.log4j2.elasticsearch.Serializer;
import org.appenders.log4j2.elasticsearch.UnlimitedResizePolicy;
import org.appenders.log4j2.elasticsearch.ValueResolver;
import org.appenders.log4j2.elasticsearch.hc.BatchRequest;
import org.appenders.log4j2.elasticsearch.hc.HCBatchOperations;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.TearDown;
import org.openjdk.jmh.infra.Blackhole;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Queue;
import java.util.Random;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import static org.appenders.log4j2.elasticsearch.QueueFactory.getQueueFactoryInstance;
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.SECONDS)
@State(Scope.Thread)
@Fork(jvmArgsAppend = {
"-ea",
"-Xmx40g",
"-Xms40g",
"-XX:+AlwaysPreTouch",
"-Djmh.pinned=true"
}
)
public class AsyncBatchEmitterWithBatchOperationsTest {
public static final String INDEX_NAME = UUID.randomUUID().toString();
private final ByteBufAllocator byteBufAllocator = UnpooledByteBufAllocator.DEFAULT;
@Param(value = {
"1",
"10",
"1000",
"10000",
"100000",
})
public int itemPoolSize;
@Param(value = {
"512",
"2048",
"4096",
"8192",
"16384",
})
public int itemSizeInBytes;
private int batchSize;
private AsyncBatchEmitter<BatchRequest> emitter;
private final Queue<Object> itemsQueue = getQueueFactoryInstance(BulkEmitter.class.getSimpleName()).tryCreateMpscQueue(
Integer.parseInt(System.getProperty("appenders." + BulkEmitter.class.getSimpleName() + ".initialSize", "65536")));
private final Collection<Object> items = new ArrayList<>();
private final AtomicLong bytesSerialized = new AtomicLong();
@Setup
public void prepare() {
batchSize = itemPoolSize;
final byte[] bytes = new byte[itemSizeInBytes];
new Random().nextBytes(bytes);
final PooledItemSourceFactory<byte[], ByteBuf> itemPool = new PooledItemSourceFactory.Builder<byte[], ByteBuf>()
.withPooledObjectOps(new ByteBufPooledObjectOps(UnpooledByteBufAllocator.DEFAULT,
new ByteBufBoundedSizeLimitPolicy(itemSizeInBytes, itemSizeInBytes * 2)) {
@Override
public void reset(ItemSource<ByteBuf> pooled) {
// don't remove test items content
}
@Override
public ByteBufItemSource createItemSource(ReleaseCallback<ByteBuf> releaseCallback) {
CompositeByteBuf buffer = new CompositeByteBuf(byteBufAllocator, true, 2);
return new ByteBufItemSource(buffer, releaseCallback) {
@Override
public void release() {
buffer.readerIndex(0); // simply prepare the item for the next test
}
};
}
})
.withInitialPoolSize(itemPoolSize)
.withPoolName("itemPool")
.build();
final PooledItemSourceFactory<byte[], ByteBuf> batchPool = new PooledItemSourceFactory.Builder<byte[], ByteBuf>()
.withPooledObjectOps(new ByteBufPooledObjectOps(UnpooledByteBufAllocator.DEFAULT,
new ByteBufBoundedSizeLimitPolicy(itemSizeInBytes * batchSize, itemSizeInBytes * batchSize * 2)))
.withInitialPoolSize(1)
.withMonitored(false)
.withMonitorTaskInterval(1000)
.withPoolName("batchPool")
.withResizePolicy(UnlimitedResizePolicy.newBuilder().withResizeFactor(0.5).build())
.withResizeTimeout(100)
.build();
final BatchOperations<BatchRequest> batchOperations = new HCBatchOperations(batchPool);
final Function<BatchRequest, Boolean> listener = batchRequest -> {
try {
final ItemSource<ByteBuf> serialize = batchRequest.serialize();
bytesSerialized.addAndGet(serialize.getSource().writerIndex());
batchRequest.completed();
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
};
itemPool.start();
final Serializer<byte[]> serializer = new JacksonSerializer<>(new JhmJacksonJsonLayout.Builder()
.withSingleThread(true)
.createConfiguredWriter());
for (int i = 0; i < batchSize; i++) {
ItemSource<ByteBuf> itemSource = itemPool.create(bytes, serializer);
items.add(batchOperations.createBatchItem(INDEX_NAME, itemSource));
}
emitter = new AsyncBatchEmitter<>(batchSize, 100000, batchOperations, itemsQueue);
emitter.addListener(listener);
batchPool.start();
emitter.start();
}
@Benchmark
public void smokeTest(Blackhole fox) {
itemsQueue.addAll(items);
final boolean emit = emitter.emit(batchSize);
fox.consume(emit);
}
private static class JhmJacksonJsonLayout extends JacksonJsonLayout {
protected JhmJacksonJsonLayout(Configuration config, ObjectWriter configuredWriter, ItemSourceFactory itemSourceFactory) {
super(config, configuredWriter, itemSourceFactory);
}
static class Builder extends JacksonJsonLayout.Builder {
@Override
protected ValueResolver createValueResolver() {
return new Log4j2Lookup(LoggerContext.getContext().getConfiguration().getStrSubstitutor());
}
@Override
protected ObjectWriter createConfiguredWriter() {
return super.createConfiguredWriter();
}
@Override
protected ObjectMapper createDefaultObjectMapper() {
return super.createDefaultObjectMapper();
}
@Override
public Builder withSingleThread(boolean singleThread) {
super.withSingleThread(singleThread);
return this;
}
}
}
@TearDown
public void teardown() {
System.out.println("Serialised batch bytes: " + bytesSerialized.getAndSet(0));
}
}
| |
package com.lehman.ic9.net;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
import java.util.Vector;
import javax.script.ScriptException;
import com.jcraft.jsch.Channel;
import com.jcraft.jsch.ChannelSftp;
import com.jcraft.jsch.ChannelSftp.LsEntry;
import com.jcraft.jsch.JSch;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SftpATTRS;
import com.jcraft.jsch.SftpException;
import com.lehman.ic9.ic9engine;
import com.lehman.ic9.ic9exception;
/**
* Implements SFTP connection functionality.
* @author Austin Lehman
*/
public class sftp
{
private ic9engine eng = null;
private JSch con = null;
private Session sess = null;
private ChannelSftp chan = null;
// Connection information.
private String host = "localhost";
private String userName = "";
private String password = "";
private int port = 22;
// Settings
private int connectTimeoutMills = 30000;
private boolean strictHostKeyChecking = false;
/**
* SFTP connection constructor that takes connection information
* as parameters.
* @param Eng Is an instance of the ic9engine.
* @param Host Is a String with the host to connect to.
* @param UserName Is a String with the user name to connect with.
* @param Password Is a String with the password to connect with.
* @param Port Is an int with the port number to connect to. (22)
*/
public sftp(ic9engine Eng, String Host, String UserName, String Password, int Port)
{
this.eng = Eng;
this.con = new JSch();
this.host = Host;
this.userName = UserName;
this.password = Password;
this.port = Port;
}
/**
* Attempts to establish a connection to the remote host.
* @throws ic9exception Exception
*/
public void connect() throws ic9exception
{
try
{
this.sess = this.con.getSession(this.userName, this.host, this.port);
this.sess.setPassword(this.password);
if(this.strictHostKeyChecking) { this.sess.setConfig("StrictHostKeyChecking", "yes"); }
else { this.sess.setConfig("StrictHostKeyChecking", "no"); }
this.sess.connect(this.connectTimeoutMills);
Channel tchan = this.sess.openChannel("sftp");
tchan.connect(this.connectTimeoutMills);
this.chan =(ChannelSftp)tchan;
}
catch (JSchException e)
{
throw new ic9exception("sftp.connect(): Failed to establish SSH connection. " + e.getMessage());
}
}
/**
* Disconnects from the remote host.
*/
public void disconnect()
{
if(this.chan != null)
{
this.chan.disconnect();
this.chan = null;
}
if(this.sess != null)
{
this.sess.disconnect();
this.sess = null;
}
}
/**
* Changes local directories with the provided new
* directory.
* @param NewDir Is a String with the new local directory.
* @throws ic9exception Exception
*/
public void lcd(String NewDir) throws ic9exception
{
try
{
this.chan.lcd(NewDir);
}
catch (SftpException e)
{
throw new ic9exception("sftp.lcd(): " + e.getMessage());
}
}
/**
* Changes remote directories with the provided new
* directory.
* @param NewDir Is a String with the new remote directory.
* @throws ic9exception Exception
*/
public void cd(String NewDir) throws ic9exception
{
try
{
this.chan.cd(NewDir);
}
catch (SftpException e)
{
throw new ic9exception("sftp.cd(): " + e.getMessage());
}
}
/**
* Makes a new remote directory with the provided path string.
* @param DirToMake Is a String with the path to create the new directory.
* @throws ic9exception Exception
*/
public void mkdir(String DirToMake) throws ic9exception
{
try
{
this.chan.mkdir(DirToMake);
}
catch (SftpException e)
{
throw new ic9exception("sftp.mkdir(): " + e.getMessage());
}
}
/**
* Removes the remote directory with the provided path string.
* @param DirToRemove Is a String with the remote path to remove.
* @throws ic9exception Exception
*/
public void rmdir(String DirToRemove) throws ic9exception
{
try
{
this.chan.rmdir(DirToRemove);
}
catch (SftpException e)
{
throw new ic9exception("sftp.rmdir(): " + e.getMessage());
}
}
/**
* Uploads a local file with the provided source file name to the
* remote with the destination file name.
* @param SrcFileName Is a String with the relative or absolute path of
* the local file to upload.
* @param DestFileName Is a String with the relative or absolute path to
* upload the file to.
* @throws ic9exception Exception
*/
public void put(String SrcFileName, String DestFileName) throws ic9exception
{
File f = new File(SrcFileName);
if(f.exists())
{
OutputStream os = null;
InputStream is = null;
try
{
os = this.chan.put(DestFileName);
is = new FileInputStream(SrcFileName);
byte[] buff = new byte[32768];
int read = is.read(buff);
while(read != -1)
{
os.write(buff, 0, read);
read = is.read(buff);
}
}
catch (FileNotFoundException e)
{
throw new ic9exception("sftp.put(): " + e.getMessage());
}
catch (IOException e)
{
throw new ic9exception("sftp.put(): " + e.getMessage());
}
catch (SftpException e)
{
throw new ic9exception("sftp.put(): " + e.getMessage());
}
finally
{
try
{
if(is != null) is.close();
if(os != null) os.close();
}
catch (IOException e) { }
}
}
else
{
throw new ic9exception("sftp.put(): Source file '" + SrcFileName + "' not found.");
}
}
/**
* Downloads a remote file with the provided source file name to the local
* file system at the destination file name.
* @param SrcFileName Is a String with the remote relative or absolute file name
* to download.
* @param DestFileName Is a String with the local relative or absolute file name
* to save the downloaded file to.
* @throws ic9exception Exception
*/
public void get(String SrcFileName, String DestFileName) throws ic9exception
{
OutputStream os = null;
InputStream is = null;
try
{
is = this.chan.get(SrcFileName);
os = new FileOutputStream(DestFileName);
byte[] buff = new byte[32768];
int read = is.read(buff);
while(read != -1)
{
os.write(buff, 0, read);
read = is.read(buff);
}
}
catch (SftpException e)
{
throw new ic9exception("sftp.get(): " + e.getMessage());
}
catch (FileNotFoundException e)
{
throw new ic9exception("sftp.get(): " + e.getMessage());
}
catch (IOException e)
{
throw new ic9exception("sftp.get(): " + e.getMessage());
}
finally
{
try
{
if(is != null) is.close();
if(os != null) os.close();
}
catch (IOException e) { }
}
}
/**
* Changes the owner group of the file or directory of the
* provided path to the provided group ID.
* @param GroupId Is an int with the new group ID.
* @param Path Is a String with the file or directory to change
* the owner group.
* @throws ic9exception Exception
*/
public void chgrp(int GroupId, String Path) throws ic9exception
{
try
{
this.chan.chgrp(GroupId, Path);
}
catch (SftpException e)
{
throw new ic9exception("sftp.chgrp(): " + e.getMessage());
}
}
/**
* Changes the permissions of the file or directory of the
* provided path.
* @param Perms Is an int with the new permissions.
* @param Path Is the file or directory to apply the permissions to.
* @throws ic9exception Exception
*/
public void chmod(int Perms, String Path) throws ic9exception
{
try
{
this.chan.chmod(Perms, Path);
}
catch (SftpException e)
{
throw new ic9exception("sftp.chgrp(): " + e.getMessage());
}
}
/**
* Changes the owner of the file or directory of the
* provided path.
* @param Uid Is an int with the new user ID to apply.
* @param Path Is the file or directory to apply the new ownership.
* @throws ic9exception Exception
*/
public void chown(int Uid, String Path) throws ic9exception
{
try
{
this.chan.chown(Uid, Path);
}
catch (SftpException e)
{
throw new ic9exception("sftp.chgrp(): " + e.getMessage());
}
}
/**
* Renames a remote file or directory to the new provided destination name.
* @param SrcPath Is a String with the source file or directory name.
* @param DestPath Is a String with the destination file or directory name.
* @throws ic9exception Exception
*/
public void rename(String SrcPath, String DestPath) throws ic9exception
{
try
{
this.chan.rename(SrcPath, DestPath);
}
catch (SftpException e)
{
throw new ic9exception("sftp.rename(): " + e.getMessage());
}
}
/**
* Sets an environmental variable on the remote system.
* @param Key Is a String with the variable name.
* @param Val Is a String with the variable value.
*/
public void setEnv(String Key, String Val)
{
this.chan.setEnv(Key, Val);
}
/**
* Creates a symlink on the remote system.
* @param SrcPath Is the remote path of the link target relative to the current directory.
* @param DestPath Is the remote path of the link to be created relative to the current directory
* @throws ic9exception Exception
*/
public void symlink(String SrcPath, String DestPath) throws ic9exception
{
try
{
this.chan.symlink(SrcPath, DestPath);
}
catch (SftpException e)
{
throw new ic9exception("sftp.symlink(): " + e.getMessage());
}
}
/**
* Removes the provided file.
* @param Path Is a String with the relative or absolute path of the file to remove.
* @throws ic9exception Exception
*/
public void rm(String Path) throws ic9exception
{
try
{
this.chan.rm(Path);
}
catch (SftpException e)
{
throw new ic9exception("sftp.rm(): " + e.getMessage());
}
}
/**
* Checks to see if the SFTP session is connected.
* @return A boolean with true for connected and false for not.
*/
public boolean isConnected()
{
if(this.sess != null)
if(this.sess.isConnected()) { return true; }
return false;
}
/**
* Checks to see if the SFTP channel is connected.
* @return A boolean with true for channel connected
* and false for not.
*/
public boolean isChannelConnected()
{
if(this.chan != null)
if(this.chan.isConnected()) { return true; }
return false;
}
/**
* Gets the absolute path of the home directory on the remote system.
* @return A String with the absolute path.
* @throws ic9exception Exception
*/
public String getHome() throws ic9exception
{
try
{
return this.chan.getHome();
}
catch (SftpException e)
{
throw new ic9exception("sftp.getHome(): " + e.getMessage());
}
}
/**
* Gets the absolute path of the current remote working directory.
* @return A String with the working directory.
* @throws ic9exception Exception
*/
public String pwd() throws ic9exception
{
try
{
return this.chan.pwd();
}
catch (SftpException e)
{
throw new ic9exception("sftp.pwd(): " + e.getMessage());
}
}
/**
* Gets the absolute path of the current local working directory.
* @return A String with the working directory.
*/
public String lpwd()
{
return this.chan.lpwd();
}
/**
* Lists the contents of the directory of the provided path.
* This method returns a JS object with key-value pairs where the
* key is the file name and the value is a DirRec object with item
* details.
* @param Path Is a String with the path to list contents of.
* @return A JS object with key-value pairs.
* @throws NoSuchMethodException Exception
* @throws ScriptException Exception
* @throws ic9exception Exception
*/
public Map<String, Object> ls(String Path) throws NoSuchMethodException, ScriptException, ic9exception
{
String dir = null;
try
{
if(Path == null) dir = this.chan.pwd();
else dir = Path;
@SuppressWarnings("unchecked")
Vector<LsEntry> vls = this.chan.ls(dir);
Map<String, Object> am = this.eng.newObj();
for(LsEntry entry : vls)
{
Map<String, Object> ao = this.eng.newObj("DirRec");
ao.put("name", entry.getFilename());
ao.put("longName", entry.getLongname());
SftpATTRS attrs = entry.getAttrs();
Map<String, Object> attrobj = this.eng.newObj();
attrobj.put("aTime", this.eng.newDate(((long)attrs.getATime()) * 1000));
attrobj.put("mTime", this.eng.newDate(((long)attrs.getMTime()) * 1000));
attrobj.put("aTimeStr", attrs.getAtimeString());
attrobj.put("mTimeStr", attrs.getMtimeString());
@SuppressWarnings("unchecked")
Map<String, Object> extended = (Map<String, Object>) this.eng.newList();
String[] extlist = attrs.getExtended();
if (extlist != null)
{
for(String ext : extlist)
{
this.eng.invokeMethod(extended, "push", ext);
}
}
attrobj.put("extended", extended);
attrobj.put("gid", attrs.getGId());
attrobj.put("uid", attrs.getUId());
attrobj.put("perms", attrs.getPermissions());
attrobj.put("permsStr", attrs.getPermissionsString());
attrobj.put("size", attrs.getSize());
attrobj.put("isDir", attrs.isDir());
attrobj.put("isLink", attrs.isLink());
ao.put("attr", attrobj);
am.put(entry.getFilename(), ao);
}
return am;
}
catch (SftpException e)
{
throw new ic9exception("sftp.ls(): " + e.getMessage());
}
}
/**
* Gets the servers protocol version.
* @return An integer with the server protocol version.
* @throws ic9exception Exception
*/
public int getServerVersion() throws ic9exception
{
try
{
return this.chan.getServerVersion();
}
catch (SftpException e)
{
throw new ic9exception("sftp.getServerVersion(): " + e.getMessage());
}
}
/**
* Reads a symbolic link and returns it's target.
* @param Path Is a String with the symlink to get.
* @return A String with the target of the provided symlink.
* @throws ic9exception Exception
*/
public String readLink(String Path) throws ic9exception
{
try
{
return this.chan.readlink(Path);
}
catch (SftpException e)
{
throw new ic9exception("sftp.readLink(): " + e.getMessage());
}
}
/**
* Converts a remote path to it's absolute version.
* @param Path Is a String with the path to convert.
* @return A String with the absolute path.
* @throws ic9exception Exception
*/
public String realPath(String Path) throws ic9exception
{
try
{
return this.chan.realpath(Path);
}
catch (SftpException e)
{
throw new ic9exception("sftp.realPath(): " + e.getMessage());
}
}
/**
* Sets the connect timeout in milliseconds. (Default is 3000) This must
* be set prior to calling connect or it has no effect.
* @param TimeoutMills Is an int with the connect timeout in milliseconds.
*/
public void setConnectTimeout(int TimeoutMills) { this.connectTimeoutMills = TimeoutMills; }
/**
* Gets the connect timeout in milliseconds.
* @return An int with the connect timeout in milliseconds.
*/
public int getConnectTimeout() { return this.connectTimeoutMills; }
/**
* Sets the strict host key checking flag. True is for yes and
* false is for no. (Default is no.) This must be set prior to calling
* connect or it has no effect.
* @param StrictHostKeyChecking Is a boolean with true for strict checking and false for not.
*/
public void setStrictHostKeyChecking(boolean StrictHostKeyChecking) { this.strictHostKeyChecking = StrictHostKeyChecking; }
/**
* Gets the strict host key checking flag. True is for yes and
* false is for no. (Default is no.)
* @return A boolean with true for yes and false for no.
*/
public boolean getStrictHostKeyChecking() { return this.strictHostKeyChecking; }
}
| |
/**
* Licensed to the Sakai Foundation (SF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.sakaiproject.nakamura.activity;
import static org.sakaiproject.nakamura.api.activity.ActivityConstants.ACTIVITY_FEED_RESOURCE_TYPE;
import static org.sakaiproject.nakamura.api.activity.ActivityConstants.ACTIVITY_SOURCE_ITEM_RESOURCE_TYPE;
import static org.sakaiproject.nakamura.api.activity.ActivityConstants.ACTIVITY_ITEM_RESOURCE_TYPE;
import static org.sakaiproject.nakamura.api.activity.ActivityConstants.ACTIVITY_STORE_RESOURCE_TYPE;
import static org.sakaiproject.nakamura.api.activity.ActivityConstants.PARAM_APPLICATION_ID;
import static org.sakaiproject.nakamura.api.activity.ActivityConstants.PARAM_TEMPLATE_ID;
import org.apache.felix.scr.annotations.Properties;
import org.apache.felix.scr.annotations.Property;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.sling.SlingServlet;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import org.apache.sling.api.request.RequestParameter;
import org.apache.sling.api.request.RequestPathInfo;
import org.apache.sling.api.servlets.SlingAllMethodsServlet;
import org.apache.sling.api.wrappers.SlingHttpServletRequestWrapper;
import org.sakaiproject.nakamura.api.doc.BindingType;
import org.sakaiproject.nakamura.api.doc.ServiceBinding;
import org.sakaiproject.nakamura.api.doc.ServiceDocumentation;
import org.sakaiproject.nakamura.api.doc.ServiceMethod;
import org.sakaiproject.nakamura.api.doc.ServiceParameter;
import org.sakaiproject.nakamura.api.doc.ServiceResponse;
import org.sakaiproject.nakamura.api.doc.ServiceSelector;
import org.sakaiproject.nakamura.api.lite.Session;
import org.sakaiproject.nakamura.api.lite.StorageClientException;
import org.sakaiproject.nakamura.api.lite.StorageClientUtils;
import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException;
import org.sakaiproject.nakamura.api.lite.content.Content;
import org.sakaiproject.nakamura.api.resource.lite.SparseContentResource;
import org.sakaiproject.nakamura.util.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
/**
* In addition to the required parameters, properties should be included that will be used
* to fill the bundle (i.e. macro expansions).
* <p>
* Required parameters:<br/>
* applicationId: (i.e. used to locate the bundles)<br/>
* templateId: (i.e. you know - the templateId. Locale will be appended to the templateId
* for resolution.)
*/
@SlingServlet(selectors = { "activity" }, methods = { "POST" }, resourceTypes = { "sparse/Content" }, generateService = true, generateComponent = true)
@Properties(value = {
@Property(name = "service.description", value = "Records the activity related to a particular content"),
@Property(name = "service.vendor", value = "The Sakai Foundation") })
@ServiceDocumentation(name = "ActivityCreateServlet", okForVersion = "1.2",
shortDescription = "Record activity related to a specific node.",
description = "Record activity related to a specific node.",
bindings = @ServiceBinding(type = BindingType.TYPE, bindings = "sparse/Content", selectors = @ServiceSelector(name = "activity")),
methods = {
@ServiceMethod(name = "POST", description = "Perform a post to a particular resource to record activity related to it.",
parameters = {
@ServiceParameter(name = "sakai:activity-appid", description = "i.e. used to locate the bundles"),
@ServiceParameter(name = "sakai:activity-templateid", description = "The id of the template that will be used for text and macro expansion."
+ "Locale will be appended to the templateId for resolution"),
@ServiceParameter(name = "*", description = "You should also include any parameters necessary to fill the template specified in sakai:activity-templateid.")
},
response = {
@ServiceResponse(code = 400, description = "if(applicationId == null || templateId == null || request.getRemoteUser() == null)"),
@ServiceResponse(code = HttpServletResponse.SC_PRECONDITION_FAILED, description = "Cannot record activities on activity content!"),
@ServiceResponse(code = 404, description = "The node was not found.")
})
})
public class LiteActivityCreateServlet extends SlingAllMethodsServlet {
private static final long serialVersionUID = -5367330873214708635L;
private static final Logger LOG = LoggerFactory
.getLogger(LiteActivityCreateServlet.class);
@Reference
private ActivityService activityService;
/**
* {@inheritDoc}
*
* @see org.apache.sling.api.servlets.SlingAllMethodsServlet#doPost(org.apache.sling.api.SlingHttpServletRequest,
* org.apache.sling.api.SlingHttpServletResponse)
*/
@Override
protected void doPost(final SlingHttpServletRequest request, final SlingHttpServletResponse response)
throws ServletException, IOException {
// Let's perform some validation on the request parameters.
// Do we have the minimum required?
RequestParameter applicationId = request.getRequestParameter(PARAM_APPLICATION_ID);
if (applicationId == null || "".equals(applicationId.toString())) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"The applicationId parameter must not be null");
return;
}
RequestParameter templateId = request.getRequestParameter(PARAM_TEMPLATE_ID);
if (templateId == null || "".equals(templateId.toString())) {
response.sendError(HttpServletResponse.SC_BAD_REQUEST,
"The templateId parameter must not be null");
return;
}
final String currentUser = request.getRemoteUser();
if (currentUser == null || "".equals(currentUser)) {
response.sendError(HttpServletResponse.SC_UNAUTHORIZED,
"CurrentUser could not be determined, user must be identifiable");
return;
}
// Create or verify that an ActivityStore exists for content node
// An activity store will be created for each node where a .activity gets executed.
Content location = request.getResource().adaptTo(Content.class);
if (location == null) {
response.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
final String resourceType = (String) location.getProperty("sling:resourceType");
// Do not allow for reserved activity resource types
if (ACTIVITY_STORE_RESOURCE_TYPE.equals(resourceType)
|| ACTIVITY_FEED_RESOURCE_TYPE.equals(resourceType)
|| ACTIVITY_ITEM_RESOURCE_TYPE.equals(resourceType)
|| ACTIVITY_SOURCE_ITEM_RESOURCE_TYPE.equals(resourceType)) {
LOG.info(
"Denied attempt to record an activity against a reserved resourceType: {}",
resourceType);
response.sendError(HttpServletResponse.SC_PRECONDITION_FAILED);
return;
}
try {
final Session session = StorageClientUtils.adaptToSession(request.getResourceResolver().adaptTo(
javax.jcr.Session.class));
activityService.createActivity(session, location, session.getUserId(), new ActivityServiceCallback() {
public void processRequest(Content activityNode) throws StorageClientException, ServletException, IOException {
RequestPathInfo requestPathInfo = request.getRequestPathInfo();
// Wrapper which needs to remove the .activity selector from RequestPathInfo to
// avoid
// an infinite loop.
final RequestPathInfo wrappedPathInfo = createRequestPathInfo(requestPathInfo,
activityNode.getPath());
// Next insert the new RequestPathInfo into a wrapped Request
SlingHttpServletRequest wrappedRequest = new SlingHttpServletRequestWrapper(request) {
@Override
public RequestPathInfo getRequestPathInfo() {
return wrappedPathInfo;
}
};
SparseContentResource target = new SparseContentResource(activityNode, session, request.getResourceResolver());
request.getRequestDispatcher(target).forward(wrappedRequest, response);
}
});
} catch (StorageClientException e) {
throw new ServletException(e.getMessage(), e);
} catch (AccessDeniedException e) {
throw new ServletException(e.getMessage(), e);
}
}
/**
* @param requestPathInfo
* @param activityItemPath
* @return
*/
protected RequestPathInfo createRequestPathInfo(final RequestPathInfo requestPathInfo,
final String activityItemPath) {
return new RequestPathInfo() {
public String getSuffix() {
return requestPathInfo.getSuffix();
}
public String[] getSelectors() {
return StringUtils.removeString(requestPathInfo.getSelectors(), "activity");
}
public String getSelectorString() {
return requestPathInfo.getSelectorString().replaceAll("\\.activity", "");
}
public String getResourcePath() {
return activityItemPath;
}
public String getExtension() {
return requestPathInfo.getExtension();
}
};
}
}
| |
package com.thebluealliance.spectrum;
import com.thebluealliance.spectrum.internal.ColorItem;
import com.thebluealliance.spectrum.internal.ColorUtil;
import com.thebluealliance.spectrum.internal.SelectedColorChangedEvent;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import android.content.Context;
import android.content.res.TypedArray;
import android.support.annotation.ColorInt;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.LinearLayout;
import java.util.ArrayList;
import java.util.List;
/**
* General-purpose class that displays colors in a grid.
*/
public class SpectrumPalette extends LinearLayout {
private static final int DEFAULT_COLUMN_COUNT = 4;
private int mColorItemDimension;
private int mColorItemMargin;
private @ColorInt int[] mColors;
private @ColorInt int mSelectedColor;
private OnColorSelectedListener mListener;
private boolean mAutoPadding = false;
private boolean mHasFixedColumnCount = false;
private int mFixedColumnCount = -1;
private int mOutlineWidth = 0;
private int mComputedVerticalPadding = 0;
private int mOriginalPaddingTop = 0;
private int mOriginalPaddingBottom = 0;
private boolean mSetPaddingCalledInternally = false;
private int mNumColumns = 2;
private int mOldNumColumns = -1;
private boolean mViewInitialized = false;
private EventBus mEventBus;
private List<ColorItem> mItems = new ArrayList<>();
public SpectrumPalette(Context context) {
super(context);
init();
}
public SpectrumPalette(Context context, AttributeSet attrs) {
super(context, attrs);
TypedArray a = getContext().getTheme().obtainStyledAttributes(attrs, R.styleable.SpectrumPalette, 0, 0);
int id = a.getResourceId(R.styleable.SpectrumPalette_spectrum_colors, 0);
if (id != 0) {
mColors = getContext().getResources().getIntArray(id);
}
mAutoPadding = a.getBoolean(R.styleable.SpectrumPalette_spectrum_autoPadding, false);
mOutlineWidth = a.getDimensionPixelSize(R.styleable.SpectrumPalette_spectrum_outlineWidth, 0);
mFixedColumnCount = a.getInt(R.styleable.SpectrumPalette_spectrum_columnCount, -1);
if (mFixedColumnCount != -1) {
mHasFixedColumnCount = true;
}
a.recycle();
mOriginalPaddingTop = getPaddingTop();
mOriginalPaddingBottom = getPaddingBottom();
init();
}
private void init() {
mEventBus = new EventBus();
mEventBus.register(this);
mColorItemDimension = getResources().getDimensionPixelSize(R.dimen.color_item_small);
mColorItemMargin = getResources().getDimensionPixelSize(R.dimen.color_item_margins_small);
setOrientation(LinearLayout.VERTICAL);
}
/**
* Sets the colors that this palette will display
*
* @param colors an array of ARGB colors
*/
public void setColors(@ColorInt int[] colors) {
mColors = colors;
mViewInitialized = false;
createPaletteView();
}
/**
* Sets the currently selected color. This should be one of the colors specified via
* {@link #setColors(int[])}; behavior is undefined if {@code color} is not among those colors.
*
* @param color the color to be marked as selected
*/
public void setSelectedColor(@ColorInt int color) {
mSelectedColor = color;
mEventBus.post(new SelectedColorChangedEvent(mSelectedColor));
}
/**
* Registers a callback to be invoked when a new color is selected.
*/
public void setOnColorSelectedListener(OnColorSelectedListener listener) {
mListener = listener;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int widthMode = MeasureSpec.getMode(widthMeasureSpec);
int widthSize = MeasureSpec.getSize(widthMeasureSpec);
int heightMode = MeasureSpec.getMode(heightMeasureSpec);
int heightSize = MeasureSpec.getSize(heightMeasureSpec);
int width, height;
if (!mHasFixedColumnCount) {
if (widthMode == MeasureSpec.EXACTLY) {
width = widthSize;
mNumColumns = computeColumnCount(widthSize - (getPaddingLeft() + getPaddingRight()));
} else if (widthMode == MeasureSpec.AT_MOST) {
width = widthSize;
mNumColumns = computeColumnCount(widthSize - (getPaddingLeft() + getPaddingRight()));
} else {
width = computeWidthForNumColumns(DEFAULT_COLUMN_COUNT) + getPaddingLeft() + getPaddingRight();
mNumColumns = DEFAULT_COLUMN_COUNT;
}
} else {
width = computeWidthForNumColumns(mFixedColumnCount) + getPaddingLeft() + getPaddingRight();
mNumColumns = mFixedColumnCount;
}
mComputedVerticalPadding = (width - (computeWidthForNumColumns(mNumColumns) + getPaddingLeft() + getPaddingRight())) / 2;
if (heightMode == MeasureSpec.EXACTLY) {
height = heightSize;
} else if (heightMode == MeasureSpec.AT_MOST) {
int desiredHeight = computeHeight(mNumColumns) + mOriginalPaddingTop + mOriginalPaddingBottom;
if (mAutoPadding) {
desiredHeight += (2 * mComputedVerticalPadding);
}
height = Math.min(desiredHeight, heightSize);
} else {
height = computeHeight(mNumColumns) + mOriginalPaddingTop + mOriginalPaddingBottom;
if (mAutoPadding) {
height += (2 * mComputedVerticalPadding);
}
}
if (mAutoPadding) {
setPaddingInternal(getPaddingLeft(), mOriginalPaddingTop + mComputedVerticalPadding, getPaddingRight(), mOriginalPaddingBottom + mComputedVerticalPadding);
}
createPaletteView();
super.onMeasure(MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY));
}
private int computeColumnCount(int maxWidth) {
int numColumns = 0;
while (((numColumns + 1) * mColorItemDimension) + ((numColumns + 1) * 2 * mColorItemMargin) <= maxWidth) {
numColumns++;
}
return numColumns;
}
private int computeWidthForNumColumns(int columnCount) {
return columnCount * (mColorItemDimension + 2 * mColorItemMargin);
}
private int computeHeight(int columnCount) {
if (mColors == null) {
// View does not have any colors to display, so we won't take up any room
return 0;
}
int rowCount = mColors.length / columnCount;
if (mColors.length % columnCount != 0) {
rowCount++;
}
return rowCount * (mColorItemDimension + 2 * mColorItemMargin);
}
private void setPaddingInternal(int left, int top, int right, int bottom) {
mSetPaddingCalledInternally = true;
setPadding(left, top, right, bottom);
}
@Override public void setPadding(int left, int top, int right, int bottom) {
super.setPadding(left, top, right, bottom);
if (!mSetPaddingCalledInternally) {
mOriginalPaddingTop = top;
mOriginalPaddingBottom = bottom;
}
}
private int getOriginalPaddingTop() {
return mOriginalPaddingTop;
}
private int getOriginalPaddingBottom() {
return mOriginalPaddingBottom;
}
/**
* Generates the views to represent this palette's colors. The grid is implemented with
* {@link LinearLayout}s. This class itself subclasses {@link LinearLayout} and is set up in
* the vertical orientation. Rows consist of horizontal {@link LinearLayout}s which themselves
* hold views that display the individual colors.
*/
protected void createPaletteView() {
// Only create the view if it hasn't been created yet or if the number of columns has changed
if (mViewInitialized && mNumColumns == mOldNumColumns) {
return;
}
mViewInitialized = true;
mOldNumColumns = mNumColumns;
removeAllViews();
if (mColors == null) {
return;
}
// Add rows
int numItemsInRow = 0;
LinearLayout row = createRow();
for (int i = 0; i < mColors.length; i++) {
View colorItem = createColorItem(mColors[i], mSelectedColor);
row.addView(colorItem);
numItemsInRow++;
if (numItemsInRow == mNumColumns) {
addView(row);
row = createRow();
numItemsInRow = 0;
}
}
if (numItemsInRow > 0) {
while (numItemsInRow < mNumColumns) {
row.addView(createSpacer());
numItemsInRow++;
}
addView(row);
}
}
private LinearLayout createRow() {
LinearLayout row = new LinearLayout(getContext());
row.setOrientation(LinearLayout.HORIZONTAL);
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(LayoutParams.MATCH_PARENT,
LayoutParams.WRAP_CONTENT);
row.setLayoutParams(params);
row.setGravity(Gravity.CENTER_HORIZONTAL);
return row;
}
private ColorItem createColorItem(int color, int selectedColor) {
ColorItem view = new ColorItem(getContext(), color, color == selectedColor, mEventBus);
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(mColorItemDimension, mColorItemDimension);
params.setMargins(mColorItemMargin, mColorItemMargin, mColorItemMargin, mColorItemMargin);
view.setLayoutParams(params);
if (mOutlineWidth != 0) {
view.setOutlineWidth(mOutlineWidth);
}
mItems.add(view);
return view;
}
private ImageView createSpacer() {
ImageView view = new ImageView(getContext());
LinearLayout.LayoutParams params = new LinearLayout.LayoutParams(mColorItemDimension, mColorItemDimension);
params.setMargins(mColorItemMargin, mColorItemMargin, mColorItemMargin, mColorItemMargin);
view.setLayoutParams(params);
return view;
}
@Subscribe
public void onSelectedColorChanged(SelectedColorChangedEvent event) {
mSelectedColor = event.getSelectedColor();
if (mListener != null) {
mListener.onColorSelected(mSelectedColor);
}
}
public interface OnColorSelectedListener {
void onColorSelected(@ColorInt int color);
}
/**
* Returns true if for the given color a dark checkmark is used.
*
* @return true if color is "dark"
*/
public boolean usesDarkCheckmark(@ColorInt int color) {
return ColorUtil.isColorDark(color);
}
/**
* Change the size of the outlining
*
* @param width in px
*/
public void setOutlineWidth(int width) {
mOutlineWidth = width;
for (ColorItem item : mItems) {
item.setOutlineWidth(width);
}
}
/**
* Tells the palette to use a fixed number of columns during layout.
*
* @param columnCount how many columns to use
*/
public void setFixedColumnCount(int columnCount) {
if (columnCount > 0) {
Log.d("spectrum", "set column count to " + columnCount);
mHasFixedColumnCount = true;
mFixedColumnCount = columnCount;
requestLayout();
invalidate();
} else {
mHasFixedColumnCount = false;
mFixedColumnCount = -1;
requestLayout();
invalidate();
}
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.map.impl.recordstore;
import com.hazelcast.concurrent.lock.LockService;
import com.hazelcast.config.InMemoryFormat;
import com.hazelcast.config.NativeMemoryConfig;
import com.hazelcast.core.EntryView;
import com.hazelcast.logging.ILogger;
import com.hazelcast.map.impl.EntryViews;
import com.hazelcast.map.impl.MapContainer;
import com.hazelcast.map.impl.MapEntries;
import com.hazelcast.map.impl.MapKeyLoader;
import com.hazelcast.map.impl.MapService;
import com.hazelcast.map.impl.MapServiceContext;
import com.hazelcast.map.impl.event.EntryEventData;
import com.hazelcast.map.impl.iterator.MapEntriesWithCursor;
import com.hazelcast.map.impl.iterator.MapKeysWithCursor;
import com.hazelcast.map.impl.mapstore.MapDataStore;
import com.hazelcast.map.impl.mapstore.writebehind.WriteBehindQueue;
import com.hazelcast.map.impl.mapstore.writebehind.WriteBehindStore;
import com.hazelcast.map.impl.mapstore.writebehind.entry.DelayedEntry;
import com.hazelcast.map.impl.querycache.QueryCacheContext;
import com.hazelcast.map.impl.querycache.publisher.MapPublisherRegistry;
import com.hazelcast.map.impl.querycache.publisher.PublisherContext;
import com.hazelcast.map.impl.querycache.publisher.PublisherRegistry;
import com.hazelcast.map.impl.record.Record;
import com.hazelcast.map.impl.record.Records;
import com.hazelcast.map.merge.MapMergePolicy;
import com.hazelcast.nio.serialization.Data;
import com.hazelcast.query.impl.Indexes;
import com.hazelcast.spi.NodeEngine;
import com.hazelcast.spi.ObjectNamespace;
import com.hazelcast.spi.exception.RetryableHazelcastException;
import com.hazelcast.util.Clock;
import com.hazelcast.util.CollectionUtil;
import com.hazelcast.util.ExceptionUtil;
import com.hazelcast.util.FutureUtil;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.Future;
import static com.hazelcast.config.NativeMemoryConfig.MemoryAllocatorType.POOLED;
import static com.hazelcast.core.EntryEventType.ADDED;
import static com.hazelcast.map.impl.ExpirationTimeSetter.updateExpiryTime;
import static com.hazelcast.map.impl.mapstore.MapDataStores.EMPTY_MAP_DATA_STORE;
import static com.hazelcast.util.MapUtil.createHashMap;
import static java.util.Collections.emptyList;
/**
* Default implementation of record-store.
*/
public class DefaultRecordStore extends AbstractEvictableRecordStore {
protected final ILogger logger;
protected final RecordStoreLoader recordStoreLoader;
protected final MapKeyLoader keyLoader;
/**
* A collection of futures representing pending completion of the key and
* value loading tasks.
* The loadingFutures are modified by partition threads and can be accessed
* by query threads.
*
* @see #loadAll(boolean)
* @see #loadAllFromStore(List, boolean)
*/
protected final Collection<Future> loadingFutures = new ConcurrentLinkedQueue<Future>();
/**
* The record store may be created with or without triggering the load.
* This flag guards that the loading on create is invoked not more than
* once should the record store be migrated.
*/
private boolean loadedOnCreate;
/**
* Records if the record store on the migration source has been loaded.
* If the record store has already been loaded, the migration target should
* NOT trigger loading again on migration commit, otherwise it may trigger
* key loading.
*/
private boolean loadedOnPreMigration;
public DefaultRecordStore(MapContainer mapContainer, int partitionId,
MapKeyLoader keyLoader, ILogger logger) {
super(mapContainer, partitionId);
this.logger = logger;
this.keyLoader = keyLoader;
this.recordStoreLoader = createRecordStoreLoader(mapStoreContext);
this.loadedOnCreate = false;
}
@Override
public void startLoading() {
if (logger.isFinestEnabled()) {
logger.finest("StartLoading invoked " + getStateMessage());
}
if (mapStoreContext.isMapLoader() && !loadedOnCreate) {
if (!loadedOnPreMigration) {
if (logger.isFinestEnabled()) {
logger.finest("Triggering load " + getStateMessage());
}
loadedOnCreate = true;
loadingFutures.add(keyLoader.startInitialLoad(mapStoreContext, partitionId));
} else {
if (logger.isFinestEnabled()) {
logger.finest("Promoting to loaded on migration " + getStateMessage());
}
keyLoader.promoteToLoadedOnMigration();
}
}
}
@Override
public void setPreMigrationLoadedStatus(boolean loaded) {
loadedOnPreMigration = loaded;
}
@Override
public boolean isLoaded() {
return FutureUtil.allDone(loadingFutures);
}
@Override
public void loadAll(boolean replaceExistingValues) {
if (logger.isFinestEnabled()) {
logger.finest("loadAll invoked " + getStateMessage());
}
logger.info("Starting to load all keys for map " + name + " on partitionId=" + partitionId);
Future<?> loadingKeysFuture = keyLoader.startLoading(mapStoreContext, replaceExistingValues);
loadingFutures.add(loadingKeysFuture);
}
@Override
public void loadAllFromStore(List<Data> keys, boolean replaceExistingValues) {
if (!keys.isEmpty()) {
Future f = recordStoreLoader.loadValues(keys, replaceExistingValues);
loadingFutures.add(f);
}
// We should not track key loading here. IT's not key loading but values loading.
// Apart from that it's irrelevant for RECEIVER nodes. SENDER and SENDER_BACKUP will track the key-loading anyway.
// Fixes https://github.com/hazelcast/hazelcast/issues/9255
}
@Override
public void updateLoadStatus(boolean lastBatch, Throwable exception) {
keyLoader.trackLoading(lastBatch, exception);
if (lastBatch) {
logger.finest("Completed loading map " + name + " on partitionId=" + partitionId);
}
}
@Override
public void maybeDoInitialLoad() {
if (keyLoader.shouldDoInitialLoad()) {
loadAll(false);
}
}
@Override
public void destroy() {
clearPartition(false);
storage.destroy(false);
eventJournal.destroy(mapContainer.getObjectNamespace(), partitionId);
}
@Override
public boolean isKeyLoadFinished() {
return keyLoader.isKeyLoadFinished();
}
@Override
public void checkIfLoaded() {
if (loadingFutures.isEmpty()) {
return;
}
if (isLoaded()) {
List<Future> doneFutures = null;
try {
doneFutures = FutureUtil.getAllDone(loadingFutures);
// check all finished loading futures for exceptions
FutureUtil.checkAllDone(doneFutures);
} catch (Exception e) {
logger.severe("Exception while loading map " + name, e);
ExceptionUtil.rethrow(e);
} finally {
loadingFutures.removeAll(doneFutures);
}
} else {
keyLoader.triggerLoadingWithDelay();
throw new RetryableHazelcastException("Map " + getName()
+ " is still loading data from external store");
}
}
@Override
public long softFlush() {
updateStoreStats();
return mapDataStore.softFlush();
}
/**
* Flushes evicted records to map store.
*
* @param recordsToBeFlushed records to be flushed to map-store.
* @param backup <code>true</code> if backup, false otherwise.
*/
protected void flush(Collection<Record> recordsToBeFlushed, boolean backup) {
Iterator<Record> iterator = recordsToBeFlushed.iterator();
while (iterator.hasNext()) {
Record record = iterator.next();
mapDataStore.flush(record.getKey(), record.getValue(), backup);
}
}
@Override
public Record getRecord(Data key) {
return storage.get(key);
}
@Override
public void putRecord(Data key, Record record) {
markRecordStoreExpirable(record.getTtl());
storage.put(key, record);
eventJournal.writeAddEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
key, record.getValue());
updateStatsOnPut(record.getHits());
}
@Override
public Record putBackup(Data key, Object value) {
return putBackup(key, value, DEFAULT_TTL, false);
}
@Override
public Record putBackup(Data key, Object value, long ttl, boolean putTransient) {
long now = getNow();
markRecordStoreExpirable(ttl);
Record record = getRecordOrNull(key, now, true);
if (record == null) {
record = createRecord(value, ttl, now);
storage.put(key, record);
eventJournal.writeAddEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
key, record.getValue());
} else {
updateRecord(key, record, value, now);
}
if (putTransient) {
mapDataStore.addTransient(key, now);
} else {
mapDataStore.addBackup(key, value, now);
}
return record;
}
public Iterator<Record> iterator() {
return new ReadOnlyRecordIterator(storage.values());
}
@Override
public Iterator<Record> iterator(long now, boolean backup) {
return new ReadOnlyRecordIterator(storage.values(), now, backup);
}
@Override
public MapKeysWithCursor fetchKeys(int tableIndex, int size) {
return storage.fetchKeys(tableIndex, size);
}
@Override
public MapEntriesWithCursor fetchEntries(int tableIndex, int size) {
return storage.fetchEntries(tableIndex, size, serializationService);
}
@Override
public Iterator<Record> loadAwareIterator(long now, boolean backup) {
checkIfLoaded();
return iterator(now, backup);
}
@Override
public void clearPartition(boolean onShutdown) {
NodeEngine nodeEngine = mapServiceContext.getNodeEngine();
LockService lockService = nodeEngine.getSharedService(LockService.SERVICE_NAME);
if (lockService != null) {
ObjectNamespace namespace = MapService.getObjectNamespace(name);
lockService.clearLockStore(partitionId, namespace);
}
Indexes indexes = mapContainer.getIndexes(partitionId);
if (indexes.isGlobal()) {
if (indexes.hasIndex()) {
for (Record record : storage.values()) {
Data key = record.getKey();
Object value = Records.getValueOrCachedValue(record, serializationService);
indexes.removeEntryIndex(key, value);
}
}
} else {
indexes.clearIndexes();
}
mapDataStore.reset();
if (onShutdown) {
NativeMemoryConfig nativeMemoryConfig = nodeEngine.getConfig().getNativeMemoryConfig();
boolean shouldClear = (nativeMemoryConfig != null && nativeMemoryConfig.getAllocatorType() != POOLED);
if (shouldClear) {
storage.clear(true);
}
storage.destroy(true);
} else {
storage.clear(false);
}
}
/**
* Size may not give precise size at a specific moment
* due to the expiration logic. But eventually, it should be correct.
*
* @return record store size.
*/
@Override
public int size() {
// do not add checkIfLoaded(), size() is also used internally
return storage.size();
}
@Override
public boolean isEmpty() {
checkIfLoaded();
return storage.isEmpty();
}
@Override
public boolean containsValue(Object value) {
checkIfLoaded();
long now = getNow();
Collection<Record> records = storage.values();
if (!records.isEmpty()) {
// optimisation to skip serialisation/deserialisation
// in each call to RecordComparator.isEqual()
value = inMemoryFormat == InMemoryFormat.OBJECT
? serializationService.toObject(value)
: serializationService.toData(value);
}
for (Record record : records) {
if (getOrNullIfExpired(record, now, false) == null) {
continue;
}
if (recordComparator.isEqual(value, record.getValue())) {
return true;
}
}
return false;
}
@Override
public boolean txnLock(Data key, String caller, long threadId, long referenceId, long ttl, boolean blockReads) {
checkIfLoaded();
return lockStore != null && lockStore.txnLock(key, caller, threadId, referenceId, ttl, blockReads);
}
@Override
public boolean extendLock(Data key, String caller, long threadId, long ttl) {
checkIfLoaded();
return lockStore != null && lockStore.extendLeaseTime(key, caller, threadId, ttl);
}
@Override
public boolean localLock(Data key, String caller, long threadId, long referenceId, long ttl) {
checkIfLoaded();
return lockStore != null && lockStore.localLock(key, caller, threadId, referenceId, ttl);
}
@Override
public boolean unlock(Data key, String caller, long threadId, long referenceId) {
checkIfLoaded();
return lockStore != null && lockStore.unlock(key, caller, threadId, referenceId);
}
@Override
public boolean lock(Data key, String caller, long threadId, long referenceId, long ttl) {
checkIfLoaded();
return lockStore != null && lockStore.lock(key, caller, threadId, referenceId, ttl);
}
@Override
public boolean forceUnlock(Data dataKey) {
return lockStore != null && lockStore.forceUnlock(dataKey);
}
@Override
public boolean isLocked(Data dataKey) {
return lockStore != null && lockStore.isLocked(dataKey);
}
@Override
public boolean isTransactionallyLocked(Data key) {
return lockStore != null && lockStore.shouldBlockReads(key);
}
@Override
public boolean canAcquireLock(Data key, String caller, long threadId) {
return lockStore == null || lockStore.canAcquireLock(key, caller, threadId);
}
@Override
public boolean isLockedBy(Data key, String caller, long threadId) {
return lockStore != null && lockStore.isLockedBy(key, caller, threadId);
}
@Override
public String getLockOwnerInfo(Data key) {
return lockStore != null ? lockStore.getOwnerInfo(key) : null;
}
@Override
public Record loadRecordOrNull(Data key, boolean backup) {
Record record = null;
Object value = mapDataStore.load(key);
if (value != null) {
record = createRecord(value, DEFAULT_TTL, getNow());
storage.put(key, record);
eventJournal.writeAddEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
key, record.getValue());
if (!backup) {
saveIndex(record, null);
}
evictEntries(key);
}
// here, we are only publishing events for loaded entries. This is required for notifying query-caches
// otherwise query-caches cannot see loaded entries
if (!backup && record != null && hasQueryCache()) {
addEventToQueryCache(record);
}
return record;
}
@Override
public int clear() {
checkIfLoaded();
// we don't remove locked keys. These are clearable records.
Collection<Record> clearableRecords = getNotLockedRecords();
// This conversion is required by mapDataStore#removeAll call.
List<Data> keys = getKeysFromRecords(clearableRecords);
mapDataStore.removeAll(keys);
mapDataStore.reset();
removeIndex(clearableRecords);
return removeRecords(clearableRecords);
}
protected List<Data> getKeysFromRecords(Collection<Record> clearableRecords) {
List<Data> keys = new ArrayList<Data>(clearableRecords.size());
for (Record clearableRecord : clearableRecords) {
keys.add(clearableRecord.getKey());
}
return keys;
}
protected int removeRecords(Collection<Record> recordsToRemove) {
if (CollectionUtil.isEmpty(recordsToRemove)) {
return 0;
}
int removalSize = recordsToRemove.size();
Iterator<Record> iterator = recordsToRemove.iterator();
while (iterator.hasNext()) {
Record record = iterator.next();
eventJournal.writeRemoveEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
record.getKey(), record.getValue());
storage.removeRecord(record);
iterator.remove();
}
return removalSize;
}
protected Collection<Record> getNotLockedRecords() {
Set<Data> lockedKeySet = lockStore == null ? null : lockStore.getLockedKeys();
if (CollectionUtil.isEmpty(lockedKeySet)) {
return storage.values();
}
int notLockedKeyCount = storage.size() - lockedKeySet.size();
if (notLockedKeyCount <= 0) {
return emptyList();
}
List<Record> notLockedRecords = new ArrayList<Record>(notLockedKeyCount);
Collection<Record> records = storage.values();
for (Record record : records) {
if (!lockedKeySet.contains(record.getKey())) {
notLockedRecords.add(record);
}
}
return notLockedRecords;
}
/**
* Resets the record store to it's initial state.
*/
@Override
public void reset() {
mapDataStore.reset();
storage.clear(false);
eventJournal.destroy(mapContainer.getObjectNamespace(), partitionId);
stats.reset();
}
@Override
public Object evict(Data key, boolean backup) {
Record record = storage.get(key);
Object value = null;
if (record != null) {
value = record.getValue();
mapDataStore.flush(key, value, backup);
removeIndex(record);
eventJournal.writeEvictEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
key, value);
storage.removeRecord(record);
if (!backup) {
mapServiceContext.interceptRemove(name, value);
}
}
return value;
}
@Override
public int evictAll(boolean backup) {
checkIfLoaded();
Collection<Record> evictableRecords = getNotLockedRecords();
flush(evictableRecords, backup);
removeIndex(evictableRecords);
return removeRecords(evictableRecords);
}
@Override
public void removeBackup(Data key) {
long now = getNow();
Record record = getRecordOrNull(key, now, true);
if (record == null) {
return;
}
eventJournal.writeRemoveEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
record.getKey(), record.getValue());
storage.removeRecord(record);
mapDataStore.removeBackup(key, now);
}
@Override
public Object remove(Data key) {
checkIfLoaded();
long now = getNow();
Record record = getRecordOrNull(key, now, false);
Object oldValue;
if (record == null) {
oldValue = mapDataStore.load(key);
if (oldValue != null) {
mapDataStore.remove(key, now);
}
} else {
oldValue = removeRecord(key, record, now);
}
return oldValue;
}
@Override
public boolean remove(Data key, Object testValue) {
checkIfLoaded();
long now = getNow();
Record record = getRecordOrNull(key, now, false);
Object oldValue;
boolean removed = false;
if (record == null) {
oldValue = mapDataStore.load(key);
if (oldValue == null) {
return false;
}
} else {
oldValue = record.getValue();
}
if (recordComparator.isEqual(testValue, oldValue)) {
mapServiceContext.interceptRemove(name, oldValue);
removeIndex(record);
mapDataStore.remove(key, now);
onStore(record);
eventJournal.writeRemoveEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
key, oldValue);
storage.removeRecord(record);
removed = true;
}
return removed;
}
@Override
public boolean delete(Data key) {
checkIfLoaded();
long now = getNow();
Record record = getRecordOrNull(key, now, false);
if (record == null) {
mapDataStore.remove(key, now);
} else {
return removeRecord(key, record, now) != null;
}
return false;
}
@Override
public Object get(Data key, boolean backup) {
checkIfLoaded();
long now = getNow();
Record record = getRecordOrNull(key, now, backup);
if (record == null) {
record = loadRecordOrNull(key, backup);
} else {
accessRecord(record, now);
}
Object value = record == null ? null : record.getValue();
value = mapServiceContext.interceptGet(name, value);
return value;
}
@Override
public Data readBackupData(Data key) {
Record record = getRecord(key);
if (record == null) {
return null;
}
Object value = record.getValue();
mapServiceContext.interceptAfterGet(name, value);
// this serialization step is needed not to expose the object, see issue 1292
return mapServiceContext.toData(value);
}
@Override
public MapEntries getAll(Set<Data> keys) {
checkIfLoaded();
long now = getNow();
MapEntries mapEntries = new MapEntries(keys.size());
Iterator<Data> iterator = keys.iterator();
while (iterator.hasNext()) {
Data key = iterator.next();
Record record = getRecordOrNull(key, now, false);
if (record != null) {
addMapEntrySet(key, record.getValue(), mapEntries);
accessRecord(record, now);
iterator.remove();
}
}
Map loadedEntries = loadEntries(keys);
addMapEntrySet(loadedEntries, mapEntries);
return mapEntries;
}
protected Map<Data, Object> loadEntries(Set<Data> keys) {
Map loadedEntries = mapDataStore.loadAll(keys);
if (loadedEntries == null || loadedEntries.isEmpty()) {
return Collections.emptyMap();
}
// holds serialized keys and if values are serialized, also holds them in serialized format.
Map<Data, Object> resultMap = createHashMap(loadedEntries.size());
// add loaded key-value pairs to this record-store.
Set entrySet = loadedEntries.entrySet();
for (Object object : entrySet) {
Map.Entry entry = (Map.Entry) object;
Data key = toData(entry.getKey());
Object value = entry.getValue();
resultMap.put(key, value);
putFromLoad(key, value);
}
if (hasQueryCache()) {
for (Data key : resultMap.keySet()) {
Record record = storage.get(key);
// here we are only publishing events for loaded entries. This is required for notifying query-caches
// otherwise query-caches cannot see loaded entries
addEventToQueryCache(record);
}
}
return resultMap;
}
protected void addMapEntrySet(Object key, Object value, MapEntries mapEntries) {
if (key == null || value == null) {
return;
}
value = mapServiceContext.interceptGet(name, value);
Data dataKey = mapServiceContext.toData(key);
Data dataValue = mapServiceContext.toData(value);
mapEntries.add(dataKey, dataValue);
}
protected void addMapEntrySet(Map<Object, Object> entries, MapEntries mapEntries) {
for (Map.Entry<Object, Object> entry : entries.entrySet()) {
addMapEntrySet(entry.getKey(), entry.getValue(), mapEntries);
}
}
@Override
public boolean existInMemory(Data key) {
return storage.containsKey(key);
}
@Override
public boolean containsKey(Data key) {
checkIfLoaded();
long now = getNow();
Record record = getRecordOrNull(key, now, false);
if (record == null) {
record = loadRecordOrNull(key, false);
}
boolean contains = record != null;
if (contains) {
accessRecord(record, now);
}
return contains;
}
/**
* @return {@code true} if this IMap has any query-cache, otherwise return {@code false}
*/
private boolean hasQueryCache() {
QueryCacheContext queryCacheContext = mapServiceContext.getQueryCacheContext();
PublisherContext publisherContext = queryCacheContext.getPublisherContext();
MapPublisherRegistry mapPublisherRegistry = publisherContext.getMapPublisherRegistry();
PublisherRegistry publisherRegistry = mapPublisherRegistry.getOrNull(name);
return publisherRegistry != null;
}
private void addEventToQueryCache(Record record) {
EntryEventData eventData = new EntryEventData(thisAddress.toString(), name, thisAddress,
record.getKey(), mapServiceContext.toData(record.getValue()), null, null, ADDED.getType());
mapEventPublisher.addEventToQueryCache(eventData);
}
@Override
public Object put(Data key, Object value, long ttl) {
return putInternal(key, value, ttl, true);
}
protected Object putInternal(Data key, Object value, long ttl, boolean loadFromStore) {
checkIfLoaded();
long now = getNow();
markRecordStoreExpirable(ttl);
Record record = getRecordOrNull(key, now, false);
Object oldValue = record == null ? (loadFromStore ? mapDataStore.load(key) : null) : record.getValue();
value = mapServiceContext.interceptPut(name, oldValue, value);
value = mapDataStore.add(key, value, now);
onStore(record);
if (record == null) {
record = createRecord(value, ttl, now);
storage.put(key, record);
eventJournal.writeAddEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
record.getKey(), record.getValue());
} else {
updateRecord(key, record, value, now);
updateExpiryTime(record, ttl, mapContainer.getMapConfig());
}
saveIndex(record, oldValue);
return oldValue;
}
@Override
public boolean merge(Data key, EntryView mergingEntry, MapMergePolicy mergePolicy) {
checkIfLoaded();
long now = getNow();
Record record = getRecordOrNull(key, now, false);
mergingEntry = EntryViews.convertToLazyEntryView(mergingEntry, serializationService, mergePolicy);
Object newValue;
Object oldValue = null;
if (record == null) {
Object notExistingKey = mapServiceContext.toObject(key);
EntryView nullEntryView = EntryViews.createNullEntryView(notExistingKey);
newValue = mergePolicy.merge(name, mergingEntry, nullEntryView);
if (newValue == null) {
return false;
}
newValue = mapDataStore.add(key, newValue, now);
record = createRecord(newValue, DEFAULT_TTL, now);
mergeRecordExpiration(record, mergingEntry);
storage.put(key, record);
eventJournal.writeUpdateEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
key, null, record.getValue());
} else {
oldValue = record.getValue();
EntryView existingEntry = EntryViews.createLazyEntryView(record.getKey(), record.getValue(),
record, serializationService, mergePolicy);
newValue = mergePolicy.merge(name, mergingEntry, existingEntry);
// existing entry will be removed
if (newValue == null) {
removeIndex(record);
mapDataStore.remove(key, now);
onStore(record);
eventJournal.writeUpdateEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(),
partitionId, key, oldValue, null);
storage.removeRecord(record);
return true;
}
if (newValue == mergingEntry.getValue()) {
mergeRecordExpiration(record, mergingEntry);
}
// same with the existing entry so no need to map-store etc operations.
if (recordComparator.isEqual(newValue, oldValue)) {
return true;
}
newValue = mapDataStore.add(key, newValue, now);
onStore(record);
eventJournal.writeUpdateEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
key, oldValue, newValue);
storage.updateRecordValue(key, record, newValue);
}
saveIndex(record, oldValue);
return newValue != null;
}
// TODO why does not replace method load data from map store if currently not available in memory.
@Override
public Object replace(Data key, Object update) {
checkIfLoaded();
long now = getNow();
Record record = getRecordOrNull(key, now, false);
if (record == null || record.getValue() == null) {
return null;
}
Object oldValue = record.getValue();
update = mapServiceContext.interceptPut(name, oldValue, update);
update = mapDataStore.add(key, update, now);
onStore(record);
updateRecord(key, record, update, now);
updateExpiryTime(record, record.getTtl(), mapContainer.getMapConfig());
saveIndex(record, oldValue);
return oldValue;
}
@Override
public boolean replace(Data key, Object expect, Object update) {
checkIfLoaded();
long now = getNow();
Record record = getRecordOrNull(key, now, false);
if (record == null) {
return false;
}
MapServiceContext mapServiceContext = this.mapServiceContext;
Object current = record.getValue();
if (!recordComparator.isEqual(expect, current)) {
return false;
}
update = mapServiceContext.interceptPut(name, current, update);
update = mapDataStore.add(key, update, now);
onStore(record);
updateRecord(key, record, update, now);
updateExpiryTime(record, record.getTtl(), mapContainer.getMapConfig());
saveIndex(record, current);
return true;
}
@Override
public Object putTransient(Data key, Object value, long ttl) {
checkIfLoaded();
long now = getNow();
markRecordStoreExpirable(ttl);
Record record = getRecordOrNull(key, now, false);
Object oldValue = null;
if (record == null) {
value = mapServiceContext.interceptPut(name, null, value);
record = createRecord(value, ttl, now);
storage.put(key, record);
eventJournal.writeAddEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
record.getKey(), record.getValue());
} else {
oldValue = record.getValue();
value = mapServiceContext.interceptPut(name, oldValue, value);
updateRecord(key, record, value, now);
updateExpiryTime(record, ttl, mapContainer.getMapConfig());
}
saveIndex(record, oldValue);
mapDataStore.addTransient(key, now);
return oldValue;
}
@Override
public Object putFromLoad(Data key, Object value) {
return putFromLoadInternal(key, value, DEFAULT_TTL, false);
}
@Override
public Object putFromLoadBackup(Data key, Object value) {
return putFromLoadInternal(key, value, DEFAULT_TTL, true);
}
@Override
public Object putFromLoad(Data key, Object value, long ttl) {
return putFromLoadInternal(key, value, ttl, false);
}
private Object putFromLoadInternal(Data key, Object value, long ttl, boolean backup) {
if (!isKeyAndValueLoadable(key, value)) {
return null;
}
long now = getNow();
if (shouldEvict()) {
return null;
}
markRecordStoreExpirable(ttl);
Record record = getRecordOrNull(key, now, false);
Object oldValue = null;
if (record == null) {
value = mapServiceContext.interceptPut(name, null, value);
record = createRecord(value, ttl, now);
storage.put(key, record);
eventJournal.writeAddEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
record.getKey(), record.getValue());
} else {
oldValue = record.getValue();
value = mapServiceContext.interceptPut(name, oldValue, value);
updateRecord(key, record, value, now);
updateExpiryTime(record, ttl, mapContainer.getMapConfig());
}
if (!backup) {
saveIndex(record, oldValue);
}
return oldValue;
}
protected boolean isKeyAndValueLoadable(Data key, Object value) {
if (key == null) {
logger.warning("Found an attempt to load a null key from map-store, ignoring it.");
return false;
}
if (value == null) {
logger.warning("Found an attempt to load a null value from map-store, ignoring it.");
return false;
}
return true;
}
@Override
public boolean set(Data dataKey, Object value, long ttl) {
Object oldValue = putInternal(dataKey, value, ttl, false);
return oldValue == null;
}
@Override
public Object putIfAbsent(Data key, Object value, long ttl) {
checkIfLoaded();
long now = getNow();
markRecordStoreExpirable(ttl);
Record record = getRecordOrNull(key, now, false);
Object oldValue;
if (record == null) {
oldValue = mapDataStore.load(key);
if (oldValue != null) {
record = createRecord(oldValue, DEFAULT_TTL, now);
storage.put(key, record);
eventJournal.writeAddEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
record.getKey(), record.getValue());
}
} else {
accessRecord(record, now);
oldValue = record.getValue();
}
if (oldValue == null) {
value = mapServiceContext.interceptPut(name, null, value);
value = mapDataStore.add(key, value, now);
onStore(record);
record = createRecord(value, ttl, now);
storage.put(key, record);
eventJournal.writeAddEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
record.getKey(), record.getValue());
updateExpiryTime(record, ttl, mapContainer.getMapConfig());
}
saveIndex(record, oldValue);
return oldValue;
}
@Override
public MapDataStore<Data, Object> getMapDataStore() {
return mapDataStore;
}
protected Object removeRecord(Data key, Record record, long now) {
Object oldValue = record.getValue();
oldValue = mapServiceContext.interceptRemove(name, oldValue);
if (oldValue != null) {
removeIndex(record);
mapDataStore.remove(key, now);
onStore(record);
}
eventJournal.writeRemoveEvent(mapContainer.getEventJournalConfig(), mapContainer.getObjectNamespace(), partitionId,
record.getKey(), record.getValue());
storage.removeRecord(record);
return oldValue;
}
@Override
public Record getRecordOrNull(Data key) {
long now = getNow();
return getRecordOrNull(key, now, false);
}
protected Record getRecordOrNull(Data key, long now, boolean backup) {
Record record = storage.get(key);
if (record == null) {
return null;
}
return getOrNullIfExpired(record, now, backup);
}
protected void onStore(Record record) {
if (record == null || mapDataStore == EMPTY_MAP_DATA_STORE) {
return;
}
record.onStore();
}
private void updateStoreStats() {
if (!(mapDataStore instanceof WriteBehindStore)
|| !mapContainer.getMapConfig().isStatisticsEnabled()) {
return;
}
long now = Clock.currentTimeMillis();
WriteBehindQueue<DelayedEntry> writeBehindQueue = ((WriteBehindStore) mapDataStore).getWriteBehindQueue();
List<DelayedEntry> delayedEntries = writeBehindQueue.asList();
for (DelayedEntry delayedEntry : delayedEntries) {
Record record = getRecordOrNull(toData(delayedEntry.getKey()), now, false);
onStore(record);
}
}
private String getStateMessage() {
return "on partitionId=" + partitionId + " on " + mapServiceContext.getNodeEngine().getThisAddress()
+ " loadedOnCreate=" + loadedOnCreate + " loadedOnPreMigration=" + loadedOnPreMigration
+ " isLoaded=" + isLoaded();
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.spdy.server.proxy;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.client.HttpClient;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.NegotiatingServerConnectionFactory;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.handler.DefaultHandler;
import org.eclipse.jetty.spdy.api.DataInfo;
import org.eclipse.jetty.spdy.api.ReplyInfo;
import org.eclipse.jetty.spdy.api.SPDY;
import org.eclipse.jetty.spdy.api.Session;
import org.eclipse.jetty.spdy.api.Stream;
import org.eclipse.jetty.spdy.api.StreamFrameListener;
import org.eclipse.jetty.spdy.api.StringDataInfo;
import org.eclipse.jetty.spdy.api.SynInfo;
import org.eclipse.jetty.spdy.client.SPDYClient;
import org.eclipse.jetty.spdy.server.http.SPDYTestUtils;
import org.eclipse.jetty.toolchain.test.TestTracker;
import org.eclipse.jetty.util.Callback;
import org.eclipse.jetty.util.Fields;
import org.eclipse.jetty.util.IO;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
import org.eclipse.jetty.util.ssl.SslContextFactory;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import static junit.framework.Assert.fail;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertThat;
@Ignore
@RunWith(value = Parameterized.class)
public abstract class ProxySPDYToHTTPLoadTest
{
private static final Logger LOG = Log.getLogger(ProxySPDYToHTTPLoadTest.class);
@Parameterized.Parameters
public static Collection<Short[]> parameters()
{
return Arrays.asList(new Short[]{SPDY.V2}, new Short[]{SPDY.V3});
}
@Rule
public final TestTracker tracker = new TestTracker();
private final short version;
private final NegotiatingServerConnectionFactory negotiator;
private final String server1String = "server1";
private final String server2String = "server2";
private SPDYClient.Factory factory;
private Server server1;
private Server server2;
private Server proxy;
private ServerConnector proxyConnector;
private SslContextFactory sslContextFactory = SPDYTestUtils.newSslContextFactory();
public ProxySPDYToHTTPLoadTest(short version, NegotiatingServerConnectionFactory negotiator)
{
this.version = version;
this.negotiator = negotiator;
}
@Before
public void init() throws Exception
{
// change the ports if you want to trace the network traffic
server1 = startServer(new TestServerHandler(server1String, null), 0);
server2 = startServer(new TestServerHandler(server2String, null), 0);
factory = new SPDYClient.Factory(sslContextFactory);
factory.start();
}
@After
public void destroy() throws Exception
{
stopServer(server1);
stopServer(server2);
if (proxy != null)
{
proxy.stop();
proxy.join();
}
factory.stop();
}
private void stopServer(Server server) throws Exception
{
if (server != null)
{
server.stop();
server.join();
}
}
protected Server startServer(Handler handler, int port) throws Exception
{
QueuedThreadPool threadPool = new QueuedThreadPool(256);
threadPool.setName("upstreamServerQTP");
Server server = new Server(threadPool);
ServerConnector connector = new ServerConnector(server);
connector.setPort(port);
server.setHandler(handler);
server.addConnector(connector);
server.start();
return server;
}
private InetSocketAddress getServerAddress(Server server)
{
return new InetSocketAddress("localhost", ((ServerConnector)server.getConnectors()[0]).getLocalPort());
}
protected InetSocketAddress startProxy(InetSocketAddress server1, InetSocketAddress server2,
long proxyConnectorTimeout, long proxyEngineTimeout) throws Exception
{
QueuedThreadPool threadPool = new QueuedThreadPool(256);
threadPool.setName("proxyQTP");
proxy = new Server(threadPool);
ProxyEngineSelector proxyEngineSelector = new ProxyEngineSelector();
HttpClient httpClient = new HttpClient();
httpClient.start();
httpClient.setIdleTimeout(proxyEngineTimeout);
HTTPProxyEngine httpProxyEngine = new HTTPProxyEngine(httpClient);
proxyEngineSelector.putProxyEngine("http/1.1", httpProxyEngine);
proxyEngineSelector.putProxyServerInfo("localhost", new ProxyEngineSelector.ProxyServerInfo("http/1.1",
server1.getHostName(), server1.getPort()));
// server2 will be available at two different ProxyServerInfos with different hosts
proxyEngineSelector.putProxyServerInfo("127.0.0.1", new ProxyEngineSelector.ProxyServerInfo("http/1.1",
server2.getHostName(), server2.getPort()));
proxyEngineSelector.putProxyServerInfo("127.0.0.2", new ProxyEngineSelector.ProxyServerInfo("http/1.1",
server2.getHostName(), server2.getPort()));
proxyConnector = new HTTPSPDYProxyServerConnector(proxy, sslContextFactory, new HttpConfiguration(), proxyEngineSelector, negotiator);
proxyConnector.setPort(0);
proxyConnector.setIdleTimeout(proxyConnectorTimeout);
proxy.addConnector(proxyConnector);
proxy.start();
return new InetSocketAddress("localhost", proxyConnector.getLocalPort());
}
@Test
public void testSimpleLoadTest() throws Exception
{
final InetSocketAddress proxyAddress = startProxy(getServerAddress(server1), getServerAddress(server2), 30000,
30000);
final int requestsPerClient = 50;
ExecutorService executorService = Executors.newFixedThreadPool(3);
Runnable client1 = createClientRunnable(proxyAddress, requestsPerClient, server1String, "localhost");
Runnable client2 = createClientRunnable(proxyAddress, requestsPerClient, server2String, "127.0.0.1");
Runnable client3 = createClientRunnable(proxyAddress, requestsPerClient, server2String, "127.0.0.2");
List<Future> futures = new ArrayList<>();
futures.add(executorService.submit(client1));
futures.add(executorService.submit(client2));
futures.add(executorService.submit(client3));
for (Future future : futures)
{
future.get(60, TimeUnit.SECONDS);
}
}
private Runnable createClientRunnable(final InetSocketAddress proxyAddress, final int requestsPerClient,
final String serverIdentificationString, final String serverHost)
{
Runnable client = new Runnable()
{
@Override
public void run()
{
try
{
Session client = factory.newSPDYClient(version).connect(proxyAddress, null);
for (int i = 0; i < requestsPerClient; i++)
{
sendSingleClientRequest(proxyAddress, client, serverIdentificationString, serverHost);
}
}
catch (InterruptedException | ExecutionException | TimeoutException e)
{
e.printStackTrace();
fail();
}
}
};
return client;
}
private void sendSingleClientRequest(InetSocketAddress proxyAddress, Session client, final String serverIdentificationString, String serverHost) throws ExecutionException, InterruptedException, TimeoutException
{
final String data = UUID.randomUUID().toString();
final CountDownLatch replyLatch = new CountDownLatch(1);
final CountDownLatch dataLatch = new CountDownLatch(1);
Fields headers = SPDYTestUtils.createHeaders(serverHost, proxyAddress.getPort(), version, "POST", "/");
Stream stream = client.syn(new SynInfo(headers, false), new StreamFrameListener.Adapter()
{
private final ByteArrayOutputStream result = new ByteArrayOutputStream();
@Override
public void onReply(Stream stream, ReplyInfo replyInfo)
{
LOG.debug("Got reply: {}", replyInfo);
Fields headers = replyInfo.getHeaders();
assertThat("response comes from the given server", headers.get(serverIdentificationString),
is(notNullValue()));
replyLatch.countDown();
}
@Override
public void onData(Stream stream, DataInfo dataInfo)
{
result.write(dataInfo.asBytes(true), 0, dataInfo.length());
if (dataInfo.isClose())
{
LOG.debug("Got last dataFrame: {}", dataInfo);
assertThat("received data matches send data", result.toString(), is(data));
dataLatch.countDown();
}
}
});
stream.data(new StringDataInfo(data, true), new Callback.Adapter());
assertThat("reply has been received", replyLatch.await(15, TimeUnit.SECONDS), is(true));
assertThat("data has been received", dataLatch.await(15, TimeUnit.SECONDS), is(true));
LOG.debug("Successfully received response");
}
private class TestServerHandler extends DefaultHandler
{
private final String responseHeader;
private final byte[] responseData;
private TestServerHandler(String responseHeader, byte[] responseData)
{
this.responseHeader = responseHeader;
this.responseData = responseData;
}
@Override
public void handle(String target, Request baseRequest, HttpServletRequest request,
HttpServletResponse response) throws IOException, ServletException
{
assertThat("Via Header is set", baseRequest.getHeader("X-Forwarded-For"), is(notNullValue()));
assertThat("X-Forwarded-For Header is set", baseRequest.getHeader("X-Forwarded-For"),
is(notNullValue()));
assertThat("X-Forwarded-Host Header is set", baseRequest.getHeader("X-Forwarded-Host"),
is(notNullValue()));
assertThat("X-Forwarded-Proto Header is set", baseRequest.getHeader("X-Forwarded-Proto"),
is(notNullValue()));
assertThat("X-Forwarded-Server Header is set", baseRequest.getHeader("X-Forwarded-Server"),
is(notNullValue()));
baseRequest.setHandled(true);
IO.copy(request.getInputStream(), response.getOutputStream());
if (responseHeader != null)
response.addHeader(responseHeader, "bar");
if (responseData != null)
response.getOutputStream().write(responseData);
}
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.json;
//import dalvik.annotation.compat.UnsupportedAppUsage;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
// Note: this class was written without inspecting the non-free org.json sourcecode.
/**
* A dense indexed sequence of values. Values may be any mix of
* {@link JSONObject JSONObjects}, other {@link JSONArray JSONArrays}, Strings,
* Booleans, Integers, Longs, Doubles, {@code null} or {@link JSONObject#NULL}.
* Values may not be {@link Double#isNaN() NaNs}, {@link Double#isInfinite()
* infinities}, or of any type not listed here.
*
* <p>{@code JSONArray} has the same type coercion behavior and
* optional/mandatory accessors as {@link JSONObject}. See that class'
* documentation for details.
*
* <p><strong>Warning:</strong> this class represents null in two incompatible
* ways: the standard Java {@code null} reference, and the sentinel value {@link
* JSONObject#NULL}. In particular, {@code get} fails if the requested index
* holds the null reference, but succeeds if it holds {@code JSONObject.NULL}.
*
* <p>Instances of this class are not thread safe. Although this class is
* nonfinal, it was not designed for inheritance and should not be subclassed.
* In particular, self-use by overridable methods is not specified. See
* <i>Effective Java</i> Item 17, "Design and Document or inheritance or else
* prohibit it" for further information.
*/
public class JSONArray {
@UnsupportedAppUsage
private final List<Object> values;
/**
* Creates a {@code JSONArray} with no values.
*/
public JSONArray() {
values = new ArrayList<Object>();
}
/**
* Creates a new {@code JSONArray} by copying all values from the given
* collection.
*
* @param copyFrom a collection whose values are of supported types.
* Unsupported values are not permitted and will yield an array in an
* inconsistent state.
*/
/* Accept a raw type for API compatibility */
public JSONArray(Collection copyFrom) {
this();
if (copyFrom != null) {
for (Iterator it = copyFrom.iterator(); it.hasNext();) {
put(JSONObject.wrap(it.next()));
}
}
}
/**
* Creates a new {@code JSONArray} with values from the next array in the
* tokener.
*
* @param readFrom a tokener whose nextValue() method will yield a
* {@code JSONArray}.
* @throws JSONException if the parse fails or doesn't yield a
* {@code JSONArray}.
*/
public JSONArray(JSONTokener readFrom) throws JSONException {
/*
* Getting the parser to populate this could get tricky. Instead, just
* parse to temporary JSONArray and then steal the data from that.
*/
Object object = readFrom.nextValue();
if (object instanceof JSONArray) {
values = ((JSONArray) object).values;
} else {
throw JSON.typeMismatch(object, "JSONArray");
}
}
/**
* Creates a new {@code JSONArray} with values from the JSON string.
*
* @param json a JSON-encoded string containing an array.
* @throws JSONException if the parse fails or doesn't yield a {@code
* JSONArray}.
*/
public JSONArray(String json) throws JSONException {
this(new JSONTokener(json));
}
/**
* Creates a new {@code JSONArray} with values from the given primitive array.
*/
public JSONArray(Object array) throws JSONException {
if (!array.getClass().isArray()) {
throw new JSONException("Not a primitive array: " + array.getClass());
}
final int length = Array.getLength(array);
values = new ArrayList<Object>(length);
for (int i = 0; i < length; ++i) {
put(JSONObject.wrap(Array.get(array, i)));
}
}
/**
* Returns the number of values in this array.
*/
public int length() {
return values.size();
}
/**
* Appends {@code value} to the end of this array.
*
* @return this array.
*/
public JSONArray put(boolean value) {
values.add(value);
return this;
}
/**
* Appends {@code value} to the end of this array.
*
* @param value a finite value. May not be {@link Double#isNaN() NaNs} or
* {@link Double#isInfinite() infinities}.
* @return this array.
*/
public JSONArray put(double value) throws JSONException {
values.add(JSON.checkDouble(value));
return this;
}
/**
* Appends {@code value} to the end of this array.
*
* @return this array.
*/
public JSONArray put(int value) {
values.add(value);
return this;
}
/**
* Appends {@code value} to the end of this array.
*
* @return this array.
*/
public JSONArray put(long value) {
values.add(value);
return this;
}
/**
* Appends {@code value} to the end of this array.
*
* @param value a {@link JSONObject}, {@link JSONArray}, String, Boolean,
* Integer, Long, Double, {@link JSONObject#NULL}, or {@code null}. May
* not be {@link Double#isNaN() NaNs} or {@link Double#isInfinite()
* infinities}. Unsupported values are not permitted and will cause the
* array to be in an inconsistent state.
* @return this array.
*/
public JSONArray put(Object value) {
values.add(value);
return this;
}
/**
* Same as {@link #put}, with added validity checks.
*/
void checkedPut(Object value) throws JSONException {
if (value instanceof Number) {
JSON.checkDouble(((Number) value).doubleValue());
}
put(value);
}
/**
* Sets the value at {@code index} to {@code value}, null padding this array
* to the required length if necessary. If a value already exists at {@code
* index}, it will be replaced.
*
* @return this array.
*/
public JSONArray put(int index, boolean value) throws JSONException {
return put(index, (Boolean) value);
}
/**
* Sets the value at {@code index} to {@code value}, null padding this array
* to the required length if necessary. If a value already exists at {@code
* index}, it will be replaced.
*
* @param value a finite value. May not be {@link Double#isNaN() NaNs} or
* {@link Double#isInfinite() infinities}.
* @return this array.
*/
public JSONArray put(int index, double value) throws JSONException {
return put(index, (Double) value);
}
/**
* Sets the value at {@code index} to {@code value}, null padding this array
* to the required length if necessary. If a value already exists at {@code
* index}, it will be replaced.
*
* @return this array.
*/
public JSONArray put(int index, int value) throws JSONException {
return put(index, (Integer) value);
}
/**
* Sets the value at {@code index} to {@code value}, null padding this array
* to the required length if necessary. If a value already exists at {@code
* index}, it will be replaced.
*
* @return this array.
*/
public JSONArray put(int index, long value) throws JSONException {
return put(index, (Long) value);
}
/**
* Sets the value at {@code index} to {@code value}, null padding this array
* to the required length if necessary. If a value already exists at {@code
* index}, it will be replaced.
*
* @param value a {@link JSONObject}, {@link JSONArray}, String, Boolean,
* Integer, Long, Double, {@link JSONObject#NULL}, or {@code null}. May
* not be {@link Double#isNaN() NaNs} or {@link Double#isInfinite()
* infinities}.
* @return this array.
*/
public JSONArray put(int index, Object value) throws JSONException {
if (value instanceof Number) {
// deviate from the original by checking all Numbers, not just floats & doubles
JSON.checkDouble(((Number) value).doubleValue());
}
while (values.size() <= index) {
values.add(null);
}
values.set(index, value);
return this;
}
/**
* Returns true if this array has no value at {@code index}, or if its value
* is the {@code null} reference or {@link JSONObject#NULL}.
*/
public boolean isNull(int index) {
Object value = opt(index);
return value == null || value == JSONObject.NULL;
}
/**
* Returns the value at {@code index}.
*
* @throws JSONException if this array has no value at {@code index}, or if
* that value is the {@code null} reference. This method returns
* normally if the value is {@code JSONObject#NULL}.
*/
public Object get(int index) throws JSONException {
try {
Object value = values.get(index);
if (value == null) {
throw new JSONException("Value at " + index + " is null.");
}
return value;
} catch (IndexOutOfBoundsException e) {
throw new JSONException("Index " + index + " out of range [0.." + values.size() + ")", e);
}
}
/**
* Returns the value at {@code index}, or null if the array has no value
* at {@code index}.
*/
public Object opt(int index) {
if (index < 0 || index >= values.size()) {
return null;
}
return values.get(index);
}
/**
* Removes and returns the value at {@code index}, or null if the array has no value
* at {@code index}.
*/
public Object remove(int index) {
if (index < 0 || index >= values.size()) {
return null;
}
return values.remove(index);
}
/**
* Returns the value at {@code index} if it exists and is a boolean or can
* be coerced to a boolean.
*
* @throws JSONException if the value at {@code index} doesn't exist or
* cannot be coerced to a boolean.
*/
public boolean getBoolean(int index) throws JSONException {
Object object = get(index);
Boolean result = JSON.toBoolean(object);
if (result == null) {
throw JSON.typeMismatch(index, object, "boolean");
}
return result;
}
/**
* Returns the value at {@code index} if it exists and is a boolean or can
* be coerced to a boolean. Returns false otherwise.
*/
public boolean optBoolean(int index) {
return optBoolean(index, false);
}
/**
* Returns the value at {@code index} if it exists and is a boolean or can
* be coerced to a boolean. Returns {@code fallback} otherwise.
*/
public boolean optBoolean(int index, boolean fallback) {
Object object = opt(index);
Boolean result = JSON.toBoolean(object);
return result != null ? result : fallback;
}
/**
* Returns the value at {@code index} if it exists and is a double or can
* be coerced to a double.
*
* @throws JSONException if the value at {@code index} doesn't exist or
* cannot be coerced to a double.
*/
public double getDouble(int index) throws JSONException {
Object object = get(index);
Double result = JSON.toDouble(object);
if (result == null) {
throw JSON.typeMismatch(index, object, "double");
}
return result;
}
/**
* Returns the value at {@code index} if it exists and is a double or can
* be coerced to a double. Returns {@code NaN} otherwise.
*/
public double optDouble(int index) {
return optDouble(index, Double.NaN);
}
/**
* Returns the value at {@code index} if it exists and is a double or can
* be coerced to a double. Returns {@code fallback} otherwise.
*/
public double optDouble(int index, double fallback) {
Object object = opt(index);
Double result = JSON.toDouble(object);
return result != null ? result : fallback;
}
/**
* Returns the value at {@code index} if it exists and is an int or
* can be coerced to an int.
*
* @throws JSONException if the value at {@code index} doesn't exist or
* cannot be coerced to a int.
*/
public int getInt(int index) throws JSONException {
Object object = get(index);
Integer result = JSON.toInteger(object);
if (result == null) {
throw JSON.typeMismatch(index, object, "int");
}
return result;
}
/**
* Returns the value at {@code index} if it exists and is an int or
* can be coerced to an int. Returns 0 otherwise.
*/
public int optInt(int index) {
return optInt(index, 0);
}
/**
* Returns the value at {@code index} if it exists and is an int or
* can be coerced to an int. Returns {@code fallback} otherwise.
*/
public int optInt(int index, int fallback) {
Object object = opt(index);
Integer result = JSON.toInteger(object);
return result != null ? result : fallback;
}
/**
* Returns the value at {@code index} if it exists and is a long or
* can be coerced to a long.
*
* @throws JSONException if the value at {@code index} doesn't exist or
* cannot be coerced to a long.
*/
public long getLong(int index) throws JSONException {
Object object = get(index);
Long result = JSON.toLong(object);
if (result == null) {
throw JSON.typeMismatch(index, object, "long");
}
return result;
}
/**
* Returns the value at {@code index} if it exists and is a long or
* can be coerced to a long. Returns 0 otherwise.
*/
public long optLong(int index) {
return optLong(index, 0L);
}
/**
* Returns the value at {@code index} if it exists and is a long or
* can be coerced to a long. Returns {@code fallback} otherwise.
*/
public long optLong(int index, long fallback) {
Object object = opt(index);
Long result = JSON.toLong(object);
return result != null ? result : fallback;
}
/**
* Returns the value at {@code index} if it exists, coercing it if
* necessary.
*
* @throws JSONException if no such value exists.
*/
public String getString(int index) throws JSONException {
Object object = get(index);
String result = JSON.toString(object);
if (result == null) {
throw JSON.typeMismatch(index, object, "String");
}
return result;
}
/**
* Returns the value at {@code index} if it exists, coercing it if
* necessary. Returns the empty string if no such value exists.
*/
public String optString(int index) {
return optString(index, "");
}
/**
* Returns the value at {@code index} if it exists, coercing it if
* necessary. Returns {@code fallback} if no such value exists.
*/
public String optString(int index, String fallback) {
Object object = opt(index);
String result = JSON.toString(object);
return result != null ? result : fallback;
}
/**
* Returns the value at {@code index} if it exists and is a {@code
* JSONArray}.
*
* @throws JSONException if the value doesn't exist or is not a {@code
* JSONArray}.
*/
public JSONArray getJSONArray(int index) throws JSONException {
Object object = get(index);
if (object instanceof JSONArray) {
return (JSONArray) object;
} else {
throw JSON.typeMismatch(index, object, "JSONArray");
}
}
/**
* Returns the value at {@code index} if it exists and is a {@code
* JSONArray}. Returns null otherwise.
*/
public JSONArray optJSONArray(int index) {
Object object = opt(index);
return object instanceof JSONArray ? (JSONArray) object : null;
}
/**
* Returns the value at {@code index} if it exists and is a {@code
* JSONObject}.
*
* @throws JSONException if the value doesn't exist or is not a {@code
* JSONObject}.
*/
public JSONObject getJSONObject(int index) throws JSONException {
Object object = get(index);
if (object instanceof JSONObject) {
return (JSONObject) object;
} else {
throw JSON.typeMismatch(index, object, "JSONObject");
}
}
/**
* Returns the value at {@code index} if it exists and is a {@code
* JSONObject}. Returns null otherwise.
*/
public JSONObject optJSONObject(int index) {
Object object = opt(index);
return object instanceof JSONObject ? (JSONObject) object : null;
}
/**
* Returns a new object whose values are the values in this array, and whose
* names are the values in {@code names}. Names and values are paired up by
* index from 0 through to the shorter array's length. Names that are not
* strings will be coerced to strings. This method returns null if either
* array is empty.
*/
public JSONObject toJSONObject(JSONArray names) throws JSONException {
JSONObject result = new JSONObject();
int length = Math.min(names.length(), values.size());
if (length == 0) {
return null;
}
for (int i = 0; i < length; i++) {
String name = JSON.toString(names.opt(i));
result.put(name, opt(i));
}
return result;
}
/**
* Returns a new string by alternating this array's values with {@code
* separator}. This array's string values are quoted and have their special
* characters escaped. For example, the array containing the strings '12"
* pizza', 'taco' and 'soda' joined on '+' returns this:
* <pre>"12\" pizza"+"taco"+"soda"</pre>
*/
public String join(String separator) throws JSONException {
JSONStringer stringer = new JSONStringer();
stringer.open(JSONStringer.Scope.NULL, "");
for (int i = 0, size = values.size(); i < size; i++) {
if (i > 0) {
stringer.out.append(separator);
}
stringer.value(values.get(i));
}
stringer.close(JSONStringer.Scope.NULL, JSONStringer.Scope.NULL, "");
return stringer.out.toString();
}
/**
* Encodes this array as a compact JSON string, such as:
* <pre>[94043,90210]</pre>
*/
@Override public String toString() {
try {
JSONStringer stringer = new JSONStringer();
writeTo(stringer);
return stringer.toString();
} catch (JSONException e) {
return null;
}
}
/**
* Encodes this array as a human readable JSON string for debugging, such
* as:
* <pre>
* [
* 94043,
* 90210
* ]</pre>
*
* @param indentSpaces the number of spaces to indent for each level of
* nesting.
*/
public String toString(int indentSpaces) throws JSONException {
JSONStringer stringer = new JSONStringer(indentSpaces);
writeTo(stringer);
return stringer.toString();
}
@UnsupportedAppUsage
void writeTo(JSONStringer stringer) throws JSONException {
stringer.array();
for (Object value : values) {
stringer.value(value);
}
stringer.endArray();
}
@Override public boolean equals(Object o) {
return o instanceof JSONArray && ((JSONArray) o).values.equals(values);
}
@Override public int hashCode() {
// diverge from the original, which doesn't implement hashCode
return values.hashCode();
}
}
| |
package edu.harvard.econcs.turkserver.server;
import static org.junit.Assert.*;
import java.util.Collection;
import java.util.HashSet;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import net.andrewmao.math.RandomSelection;
import net.andrewmao.misc.ConcurrentBooleanCounter;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang.RandomStringUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Multimap;
import edu.harvard.econcs.turkserver.client.SessionClient;
import edu.harvard.econcs.turkserver.client.TestClient;
import edu.harvard.econcs.turkserver.config.DataModule;
import edu.harvard.econcs.turkserver.config.ConfigModules;
import edu.harvard.econcs.turkserver.config.TSConfig;
import edu.harvard.econcs.turkserver.config.TestConfigModules;
import edu.harvard.econcs.turkserver.config.TestServerModule;
public class SingleExperimentsTest {
static final int EVENT_PUSH_MILLIS = 150;
static final int numWorkers = 10;
static final int numHITs = 40;
String url = "http://localhost:9876/cometd/";
Random rnd = new Random();
ConcurrentBooleanCounter<String> usedHITs;
ConcurrentHashMap<String, String> hitToAssignment;
Set<String> workers;
TurkServer ts;
ClientGenerator cg;
@Before
public void setUp() throws Exception {
usedHITs = new ConcurrentBooleanCounter<String>();
hitToAssignment = new ConcurrentHashMap<String, String>();
workers = new HashSet<String>();
// Set up shit
for( int i = 0; i < numHITs; i++ )
usedHITs.put(RandomStringUtils.randomAlphanumeric(30), false);
for( String hitId : usedHITs.keySet() )
hitToAssignment.put(hitId, RandomStringUtils.randomAlphanumeric(30));
for( int i = 0; i < numWorkers; i++ )
workers.add(RandomStringUtils.randomAlphanumeric(14));
TestUtils.waitForPort(9876);
TestUtils.waitForPort(9877);
// Make sure the ports are clear
Thread.sleep(500);
}
class SingleTestModule extends TestServerModule {
@Override
public void configure() {
super.configure();
bindExperimentClass(TestExperiment.class);
bindConfigurator(new TestConfigurator(1, 0));
bindString(TSConfig.EXP_SETID, "test");
}
}
@After
public void tearDown() throws Exception {
if( cg != null ) {
cg.disposeAllClients();
}
if( ts != null ) {
ts.orderlyShutdown();
}
}
@Test
public void test() throws Exception {
DataModule dm = new DataModule();
Configuration conf = dm.getConfiguration();
conf.setProperty(TSConfig.SERVER_HITGOAL, numHITs);
conf.setProperty(TSConfig.EXP_REPEAT_LIMIT, numHITs / numWorkers + 2);
ts = new TurkServer(dm);
ts.runExperiment(
new SingleTestModule(),
ConfigModules.SINGLE_EXPERIMENTS,
TestConfigModules.TEMP_DATABASE,
TestConfigModules.NO_HITS,
TestConfigModules.SCREEN_LOGGING
);
Thread.sleep(500);
// Start doin stuff
Set<String> untakenHITs = new HashSet<String>(usedHITs.keySet());
Set<String> takenHITs = new HashSet<String>();
Multimap<String, SessionClient<TestClient>> workerToPC = HashMultimap.create();
cg = new ClientGenerator(url);
do {
double r = rnd.nextDouble();
/*
* Probabilities:
* 0.4 someone takes (accepts) a HIT
* 0.4 someone submits a HIT
* 0.2 someone stops working (returns) a HIT
*/
if( r < 0.4 ) {
// Pick one of the unassigned HITs and assign it to someone.
if( untakenHITs.size() == 0 ) continue;
String randomWorker = RandomSelection.selectRandom(workers);
// Don't take multiple HITs per worker for now.
if( workerToPC.get(randomWorker).size() > 0 ) continue;
String randomHIT = RandomSelection.selectRandom(untakenHITs);
String randomAsst = hitToAssignment.get(randomHIT);
SessionClient<TestClient> pc =
cg.getSessionClient(TestClient.class, randomHIT, randomAsst, randomWorker);
untakenHITs.remove(randomHIT);
takenHITs.add(randomHIT);
workerToPC.put(randomWorker, pc);
Thread.sleep(EVENT_PUSH_MILLIS);
assertTrue(pc.isConnected());
if( pc.isError() ) {
System.out.println("Server rejected this accept, returning");
// We need to return this HIT and close it
cg.disposeClient(pc);
workerToPC.remove(randomWorker, pc);
String hitId = pc.getHitId();
takenHITs.remove(hitId);
untakenHITs.add(hitId);
}
else {
Thread.sleep(EVENT_PUSH_MILLIS);
/*
* check that the client got start message
* TODO this is giving startExp instead of broadcast
*/
assertNotNull(pc.getClientBean().lastCall);
// assertEquals("broadcast", pc.getClientBean().lastCall);
}
}
else if( r < 0.9 ) {
// Pick a worker and complete its HIT
if( workerToPC.size() == 0) continue;
String randomWorker = RandomSelection.selectRandom(workerToPC.keySet());
SessionClient<TestClient> pc = RandomSelection.selectRandom(
workerToPC.get(randomWorker));
pc.getClientBean().getController().sendExperimentBroadcast(
ImmutableMap.<String, Object>of("msg", pc.getClientBean().toString())
);
Thread.sleep(EVENT_PUSH_MILLIS);
// Should get return at this point
assertEquals("finishExp", pc.getClientBean().lastCall);
pc.submit("some comments");
Thread.sleep(EVENT_PUSH_MILLIS);
// Make sure it got the complete message and disconnected
assertTrue(!pc.isConnected());
cg.disposeClient(pc);
workerToPC.remove(randomWorker, pc);
String hitId = pc.getHitId();
usedHITs.put(hitId, true);
takenHITs.remove(hitId);
assertEquals(ts.getSessionServer().getNumCompleted(), usedHITs.getTrueCount());
}
else {
// Pick a worker and return its HIT
if( workerToPC.size() == 0) continue;
String randomWorker = RandomSelection.selectRandom(workerToPC.keySet());
Collection<SessionClient<TestClient>> pcs = workerToPC.get(randomWorker);
assertTrue(pcs.size() <= 1);
if( pcs.size() > 0) {
SessionClient<TestClient> pc = RandomSelection.selectRandom(pcs);
cg.disposeClient(pc);
workerToPC.remove(randomWorker, pc);
String hitId = pc.getHitId();
takenHITs.remove(hitId);
untakenHITs.add(hitId);
}
}
} while (usedHITs.getTrueCount() < numHITs);
assertEquals(ts.getSessionServer().getNumCompleted(), numHITs);
System.out.println("finished");
}
}
| |
package com.craftmend.openaudiomc.velocity.modules.configuration;
import com.craftmend.openaudiomc.generic.logging.OpenAudioLogger;
import com.craftmend.openaudiomc.generic.storage.enums.StorageKey;
import com.craftmend.openaudiomc.generic.storage.enums.StorageLocation;
import com.craftmend.openaudiomc.generic.storage.interfaces.ConfigurationImplementation;
import com.craftmend.openaudiomc.velocity.OpenAudioMcVelocity;
import com.google.common.reflect.TypeToken;
import lombok.SneakyThrows;
import ninja.leaping.configurate.ConfigurationNode;
import ninja.leaping.configurate.yaml.YAMLConfigurationLoader;
import org.apache.commons.lang.Validate;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
public class VelocityConfigurationImplementation implements ConfigurationImplementation {
private final ConfigurationNode dataConfig;
private final Map<StorageKey, String> cachedConfigStrings = new HashMap<>();
private ConfigurationNode mainConfig;
public VelocityConfigurationImplementation() {
//save default
saveDefaultFile("data.yml", false);
saveDefaultFile("config.yml", false);
dataConfig = getFile("data.yml");
mainConfig = getFile("config.yml");
OpenAudioLogger.toConsole("Starting configuration module");
this.loadSettings();
}
public ConfigurationImplementation loadSettings() {
// deperecated
return this;
}
/**
* get a specific string from a config
*
* @param storageKey The storage key
* @return the string
*/
@Override
public String getString(StorageKey storageKey) {
Object[] path = storageKey.getPath().split("\\.");
switch (storageKey.getStorageLocation()) {
case DATA_FILE:
return dataConfig.getNode(path).getString();
case CONFIG_FILE:
return ((mainConfig.getNode(path).isVirtual() ? "<unknown openaudiomc value " + storageKey.getPath() + ">"
: mainConfig.getNode(path).getString()));
default:
return "<unknown openaudiomc value " + storageKey.getPath() + ">";
}
}
/**
* get a specific int from a config
*
* @param storageKey The storage key
* @return the int, -1 if absent
*/
@Override
public int getInt(StorageKey storageKey) {
Object[] path = storageKey.getPath().split("\\.");
switch (storageKey.getStorageLocation()) {
case DATA_FILE:
return dataConfig.getNode(path).getInt();
case CONFIG_FILE:
return mainConfig.getNode(path).getInt();
default:
return -1;
}
}
/**
* A safe string getter
*
* @param path The path
* @param storageLocation specified file
* @return the string, or a empty string instead of null
*/
@Override
public String getStringFromPath(String path, StorageLocation storageLocation) {
Validate.isTrue(storageLocation == StorageLocation.DATA_FILE, "Getting strings from a config file with hardcoded paths is not allowed");
ConfigurationNode node = dataConfig.getNode((Object[]) path.split("\\."));
if(node.isVirtual()) {
return "";
}
return node.getString();
}
@Override
public boolean isPathValid(String path, StorageLocation storageLocation) {
Object[] pathArr = path.split("\\.");
switch (storageLocation) {
case DATA_FILE:
return !dataConfig.getNode(pathArr).isVirtual();
case CONFIG_FILE:
return !mainConfig.getNode(pathArr).isVirtual();
default:
return false;
}
}
/**
* A safe int getter
*
* @param path The path
* @param storageLocation specified file
* @return the int value
*/
@Override
public Integer getIntFromPath(String path, StorageLocation storageLocation) {
Validate.isTrue(storageLocation == StorageLocation.DATA_FILE, "Getting strings from a config file with hardcoded paths is not allowed");
return dataConfig.getNode((Object[]) path.split("\\.")).getInt();
}
/**
* get a set of keys under a config section, will never be null so its safe
*
* @param path Path
* @param storageLocation file target
* @return a set of keys, can be empty
*/
@Override
public Set<String> getStringSet(String path, StorageLocation storageLocation) {
throw new UnsupportedOperationException("Not supported in velocity (proxy) mode");
}
/**
* Write/update a string value for a file
*
* @param location The storage key
* @param string the new value
*/
@SneakyThrows
@Override
public void setString(StorageKey location, String string) {
Object[] path = location.getPath().split("\\.");
TypeToken token = TypeToken.of(String.class);
switch (location.getStorageLocation()) {
case DATA_FILE:
dataConfig.getNode(path).setValue(token, string);
case CONFIG_FILE:
mainConfig.getNode(path).setValue(token, string);
cachedConfigStrings.put(location, string);
}
}
/**
* write a soft value to a file
*
* @param storageLocation The file to save to
* @param path the path
* @param string the value
*/
@SneakyThrows
@Override
public void setString(StorageLocation storageLocation, String path, String string) {
Object[] pathArr = path.split("\\.");
TypeToken token = TypeToken.of(String.class);
switch (storageLocation) {
case DATA_FILE:
dataConfig.getNode(pathArr).setValue(token, string);
case CONFIG_FILE:
mainConfig.getNode(pathArr).setValue(token, string);
}
}
/**
* write a soft value to a file
*
* @param storageLocation The file to save to
* @param path the path
* @param value the value
*/
@Override
public void setInt(StorageLocation storageLocation, String path, int value) {
Object[] pathArr = path.split("\\.");
switch (storageLocation) {
case DATA_FILE:
dataConfig.getNode(pathArr).setValue(value);
case CONFIG_FILE:
mainConfig.getNode(pathArr).setValue(value);
}
}
/**
* Get a boolean value, from any file
*
* @param location The storage key
* @return boolean value
*/
@Override
public boolean getBoolean(StorageKey location) {
Object[] path = location.getPath().split("\\.");
switch (location.getStorageLocation()) {
case DATA_FILE:
return dataConfig.getNode(path).getBoolean();
case CONFIG_FILE:
return mainConfig.getNode(path).getBoolean();
default:
return false;
}
}
@Override
public Object get(StorageKey location) {
Object[] path = location.getPath().split("\\.");
switch (location.getStorageLocation()) {
case DATA_FILE:
return dataConfig.getNode(path).getValue();
case CONFIG_FILE:
return mainConfig.getNode(path).getValue();
default:
return false;
}
}
@SneakyThrows
@Override
public void set(StorageKey location, Object value) {
Object[] path = location.getPath().split("\\.");
TypeToken token = TypeToken.of(value.getClass());
switch (location.getStorageLocation()) {
case DATA_FILE:
dataConfig.getNode(path).setValue(token, value);
case CONFIG_FILE:
mainConfig.getNode(path).setValue(token, value);
}
}
/**
* Reload the config file
*/
@Override
public void reloadConfig() {
this.cachedConfigStrings.clear();
mainConfig = getFile("config.yml");
this.loadSettings();
}
/**
* saves the data to the file, like new regions and speakers.
*/
@Override
public void saveAll() {
try {
File config = new File(OpenAudioMcVelocity.getInstance().getDataDir(), "config.yml");
File data = new File(OpenAudioMcVelocity.getInstance().getDataDir(), "data.yml");
YAMLConfigurationLoader.builder()
.setFile(config)
.build().save(mainConfig);
YAMLConfigurationLoader.builder()
.setFile(data)
.build().save(dataConfig);
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void overwriteConfigFile() {
saveDefaultFile("config.yml", true);
}
@Override
public boolean hasDataFile() {
return true;
}
@Override
public boolean hasStorageKey(StorageKey storageKey) {
Object[] path = storageKey.getPath().split("\\.");
if (storageKey.getStorageLocation() == StorageLocation.DATA_FILE) {
return !dataConfig.getNode(path).isVirtual();
}
return !mainConfig.getNode(path).isVirtual();
}
private ConfigurationNode getFile(String filename) {
File file = new File(OpenAudioMcVelocity.getInstance().getDataDir(), filename);
YAMLConfigurationLoader yamlLoader = YAMLConfigurationLoader.builder()
.setFile(file)
.build();
try {
return yamlLoader.load();
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
private void saveDefaultFile(String filename, boolean hard) {
if (!OpenAudioMcVelocity.getInstance().getDataDir().exists())
OpenAudioMcVelocity.getInstance().getDataDir().mkdir();
File file = new File(OpenAudioMcVelocity.getInstance().getDataDir(), filename);
if (hard && file.exists()) {
try {
Files.delete(file.toPath());
} catch (IOException e) {
e.printStackTrace();
}
}
if (!file.exists() || hard) {
try (InputStream in = OpenAudioMcVelocity.getInstance().getClass().getClassLoader().getResourceAsStream(filename)) {
Files.copy(in, file.toPath());
} catch (IOException e) {
e.printStackTrace();
}
}
}
@Override
public void setBoolean(StorageKey location, boolean value) {
Object[] path = location.getPath().split("\\.");
switch (location.getStorageLocation()) {
case DATA_FILE:
dataConfig.getNode(path).setValue(value);
case CONFIG_FILE:
mainConfig.getNode(path).setValue(value);
}
}
}
| |
/**
* The MIT License (MIT)
* <p>
* Copyright (c) 2016-2021 the the original author or authors.
* <p>
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* <p>
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* <p>
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.bernardomg.example.jpa.test.integration.temporal.time;
import java.sql.Time;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.Locale;
import javax.persistence.EntityManager;
import javax.persistence.Query;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.junit.jupiter.EnabledIf;
import org.springframework.test.context.junit4.AbstractJUnit4SpringContextTests;
import com.bernardomg.example.jpa.test.config.annotation.PersistenceIntegrationTest;
import com.bernardomg.example.jpa.test.config.criteria.temporal.TimeEntityCriteriaFactory;
/**
* Integration tests for a {@code TimeEntity} testing it loads values correctly
* by using the criteria API.
*
* @author Bernardo Martínez Garrido
*/
@PersistenceIntegrationTest
@EnabledIf(expression = "#{!'${jpa.adapter.class}'.contains('Hibernate')}",
reason = "Supports persisted time", loadContext = true)
public class ITTimeEntityQueryCriteriaApi
extends AbstractJUnit4SpringContextTests {
/**
* Calendar for the test ranges.
*/
private Calendar calendar;
/**
* Java date for the test ranges.
*/
private Date date;
/**
* The persistence entity manager.
*/
@Autowired
private EntityManager entityManager;
/**
* Time for the test ranges.
*/
private Time time;
/**
* String to generate the time for the test ranges.
*/
private final String timeString = "11:11:11";
/**
* Default constructor.
*/
public ITTimeEntityQueryCriteriaApi() {
super();
}
/**
* Initializes the time to be used in the tests.
*
* @throws ParseException
* if the time string can't be parsed
*/
@BeforeEach
public final void setUpTime() throws ParseException {
final DateFormat format; // Format for parsing the time string
format = new SimpleDateFormat("hh:mm:ss", Locale.ENGLISH);
date = format.parse(timeString);
time = new Time(date.getTime());
calendar = Calendar.getInstance();
calendar.setTime(date);
}
/**
* Tests that retrieving all the entities after a time gives the correct
* number of them when using a {@code Calendar} for the time.
*/
@Test
public final void testGetAfterTime_Calendar() {
final Integer readCount;
readCount = getAfterTimeCalendarQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(3, readCount);
}
/**
* Tests that retrieving all the entities after a time gives the correct
* number of them when using a Java {@code Time} for the time.
*/
@Test
public final void testGetAfterTime_Java() {
final Integer readCount;
readCount = getAfterTimeJavaQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(3, readCount);
}
/**
* Tests that retrieving all the entities after a time gives the correct
* number of them when using a {@code Time} for the time.
*/
@Test
public final void testGetAfterTime_Sql() {
final Integer readCount;
readCount = getAfterTimeSqlQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(3, readCount);
}
/**
* Tests that retrieving all the entities before a time gives the correct
* number of them when using a {@code Calendar} for the time.
*/
@Test
public final void testGetBeforeTime_Calendar() {
final Integer readCount;
readCount = getBeforeTimeCalendarQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(2, readCount);
}
/**
* Tests that retrieving all the entities before a time gives the correct
* number of them when using a Java {@code Time} for the time.
*/
@Test
public final void testGetBeforeTime_Java() {
final Integer readCount;
readCount = getBeforeTimeJavaQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(2, readCount);
}
/**
* Tests that retrieving all the entities before a time gives the correct
* number of them when using a {@code Time} for the time.
*/
@Test
public final void testGetBeforeTime_Sql() {
final Integer readCount;
readCount = getBeforeTimeSqlQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(2, readCount);
}
/**
* Tests that retrieving all the entities in a time gives the correct number
* of them when using a {@code Calendar} for the time.
*/
@Test
public final void testGetInTime_Calendar() {
final Integer readCount;
readCount = getInTimeCalendarQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(1, readCount);
}
/**
* Tests that retrieving all the entities in a time gives the correct number
* of them when using a Java {@code Time} for the time.
*/
@Test
public final void testGetInTime_Java() {
final Integer readCount;
readCount = getInTimeJavaQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(1, readCount);
}
/**
* Tests that retrieving all the entities in a time gives the correct number
* of them when using a {@code Time} for the time.
*/
@Test
public final void testGetInTime_Sql() {
final Integer readCount;
readCount = getInTimeSqlQuery().getResultList().size();
// Reads the expected number of entities
Assertions.assertEquals(1, readCount);
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getAfterTimeCalendarQuery() {
return entityManager.createQuery(TimeEntityCriteriaFactory
.findAfterTime(entityManager, calendar));
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getAfterTimeJavaQuery() {
return entityManager.createQuery(
TimeEntityCriteriaFactory.findAfterTime(entityManager, date));
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getAfterTimeSqlQuery() {
return entityManager.createQuery(TimeEntityCriteriaFactory
.findAfterSqlTime(entityManager, time));
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getBeforeTimeCalendarQuery() {
return entityManager.createQuery(TimeEntityCriteriaFactory
.findBeforeTime(entityManager, calendar));
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getBeforeTimeJavaQuery() {
return entityManager.createQuery(
TimeEntityCriteriaFactory.findBeforeTime(entityManager, date));
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getBeforeTimeSqlQuery() {
return entityManager.createQuery(TimeEntityCriteriaFactory
.findBeforeSqlTime(entityManager, time));
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getInTimeCalendarQuery() {
return entityManager.createQuery(
TimeEntityCriteriaFactory.findInTime(entityManager, calendar));
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getInTimeJavaQuery() {
return entityManager.createQuery(
TimeEntityCriteriaFactory.findInTime(entityManager, date));
}
/**
* Returns the query for the test.
*
* @return the query for the test
*/
private final Query getInTimeSqlQuery() {
return entityManager.createQuery(
TimeEntityCriteriaFactory.findInSqlTime(entityManager, time));
}
}
| |
package natlab.toolkits.analysis.varorfun;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import natlab.LookupFile;
import natlab.toolkits.analysis.Mergable;
import natlab.toolkits.analysis.MergeUtil;
import natlab.toolkits.filehandling.FunctionOrScriptQuery;
import analysis.AbstractStructuralForwardAnalysis;
import ast.ASTNode;
import ast.AssignStmt;
import ast.CellIndexExpr;
import ast.EndExpr;
import ast.Expr;
import ast.Function;
import ast.FunctionHandleExpr;
import ast.GlobalStmt;
import ast.LValueExpr;
import ast.LambdaExpr;
import ast.Name;
import ast.NameExpr;
import ast.ParameterizedExpr;
import ast.PersistentStmt;
import ast.Script;
import ast.StringLiteralExpr;
public class VFFlowSensitiveAnalysis extends AbstractStructuralForwardAnalysis<Map<String, VFDatum>> implements VFAnalysis{
private Function currentFunction = null;
private Script currentScript = null;
private FunctionOrScriptQuery lookupQuery = null;
private LValueExpr outerParameterizedExpr=null;
private boolean inFunction=false;
private java.util.List<NameExpr> endCandidates = null;
@Override
public void caseCondition(Expr condExpr) {
if (condExpr instanceof NameExpr)
caseNameExpr((NameExpr) condExpr);
if (condExpr instanceof ParameterizedExpr)
caseParameterizedExpr((ParameterizedExpr) condExpr);
if (condExpr instanceof CellIndexExpr)
caseCellIndexExpr((CellIndexExpr) condExpr);
else
caseASTNode( condExpr );
}
@Override
public Map<String, VFDatum> newInitialFlow() {
return new HashMap<>();
}
public VFFlowSensitiveAnalysis( ASTNode tree , FunctionOrScriptQuery lookup )
{
super( tree );
lookupQuery = lookup;
}
@Deprecated
public VFFlowSensitiveAnalysis( ASTNode tree )
{
this(tree,LookupFile.getFunctionOrScriptQueryObject());
}
private void putInVar(String name) {
currentInSet.merge(name, VFDatum.VAR, Mergable::merge);
}
private void putOutVar(String name) {
currentOutSet.merge(name, VFDatum.VAR, Mergable::merge);
}
public void caseScript( Script node )
{
inFunction=false;
currentScript=node;
currentInSet = newInitialFlow();
currentOutSet = currentInSet;
node.getStmts().analyze(this);
currentScript=null;
outFlowSets.put( node, new HashMap<>(currentOutSet) );
}
@Override
public void caseCellIndexExpr(CellIndexExpr node){
NameExpr n = new ArrayList<NameExpr>(((CellIndexExpr) node).getTarget().getNameExpressions()).get(0);
putOutVar(n.getName().getID());
annotateNode(n.getName());
caseMyLValue(node);
}
@Override
public void caseFunction( Function node )
{
currentFunction=node;
inFunction=true;
currentInSet = newInitialFlow();
currentOutSet = currentInSet;
if (node.getParent().getParent() instanceof Function){
for( Map.Entry<String, VFDatum> pair : outFlowSets.get(node.getParent().getParent()).entrySet()) {
if( pair.getValue()==VFDatum.VAR || pair.getValue()==VFDatum.BOT)
currentInSet.merge(pair.getKey(), pair.getValue(), Mergable::merge);
}
}
for( Name n : node.getOutputParams() ){
putInVar(n.getID());
}
for( Name n : node.getInputParams() ){
putInVar(n.getID());
}
node.getStmts().analyze(this);
//HashSet<ASTNode> toPatch= new HashSet<ASTNode>(outFlowSets.keySet());
//backup currentSet
outFlowSets.put( node, currentOutSet );
for( Function f : node.getNestedFunctions() ){
f.analyze( this );
}
inFunction=false;
// for (ASTNode n: toPatch){
// outFlowSets.put(n, myFlowSet);
// }
currentFunction = null;
}
private boolean scriptOrFunctionExists(String name){
return
(currentFunction != null && currentFunction.lookupFunction(name) != null)
|| (currentScript != null && name.equals(currentScript.getName()))
|| lookupQuery.isFunctionOrScript(name);
}
private boolean packageExists(String name){
return lookupQuery.isPackage(name);
}
@Override
public void caseAssignStmt( AssignStmt node )
{
node.getRHS().analyze( this );
for( NameExpr n : node.getLHS().getNameExpressions() ){
putOutVar(n.getName().getID());
annotateNode(n.getName());
}
outFlowSets.put(node, currentOutSet);
node.getLHS().analyze( this );
}
@Override
public void caseGlobalStmt( GlobalStmt node )
{
for( Name n : node.getNames() ){
putOutVar(n.getID());
annotateNode(n);
}
}
@Override
public void casePersistentStmt( PersistentStmt node )
{
for( Name n : node.getNames() ){
putOutVar(n.getID());
annotateNode(n);
}
}
@Override
public void caseFunctionHandleExpr( FunctionHandleExpr node )
{
currentOutSet.merge(node.getName().getID(), VFDatum.FUN, Mergable::merge);
annotateNode(node.getName());
}
@Override
public void caseNameExpr( NameExpr node )
{
currentOutSet = currentInSet;
if ( inFunction )
{
String s = node.getName().getID();
VFDatum d = currentOutSet.get( s );
if ( s!=null && d==null || VFDatum.BOT.equals( d ) )
{
VFDatum val = scriptOrFunctionExists(s) ? VFDatum.FUN
: packageExists(s) ? VFDatum.PREFIX : VFDatum.BOT;
currentOutSet.merge(s, val, Mergable::merge);
}
}
else{
String s = node.getName().getID();
VFDatum d = currentOutSet.get( s );
if ( d==null || VFDatum.BOT.equals(d) )
{
currentOutSet.merge(s, VFDatum.LDVAR, Mergable::merge);
}
}
annotateNode(node.getName());
}
public void caseLambdaExpr(LambdaExpr node){/*
VFFlowset backup = currentOutSet.copy();
Set<String> inputSet=new TreeSet<String>();
for (Name inputParam: node.getInputParams()){
currentOutSet.add(new ValueDatumPair<String,VFDatum>(inputParam.getID(),VFDatum.VAR));
inputSet.add(inputParam.getID());
}
caseASTNode(node);
for (Map.Entry<String, VFDatum> p:currentOutSet.getMap().entrySet()){
if (! inputSet.contains(p.getKey()))
backup.add(new ValueDatumPair<String, VFDatum>(p.getKey(),p.getValue()));
}
currentOutSet=backup;*/
}
private ASTNode getTarget(LValueExpr node){
ASTNode target = null;
if (node instanceof CellIndexExpr)
target = ((CellIndexExpr) node).getTarget();
else if (node instanceof ParameterizedExpr)
target = ((ParameterizedExpr) node).getTarget();
else
log("in LValue without any target");
return target;
}
private ASTNode getArgs(LValueExpr node){
ASTNode args = null;
if (node instanceof CellIndexExpr)
args = ((CellIndexExpr) node).getArgs();
if (node instanceof ParameterizedExpr)
args = ((ParameterizedExpr) node).getArgs();
return args;
}
public void caseMyLValue(LValueExpr node) {
ASTNode target = getTarget(node);
ASTNode args = getArgs(node);
NameExpr res = new ArrayList<NameExpr>(target.getNameExpressions()).get(0);
String targetName=res.getName().getID();
List<NameExpr> candidates_backup;
if (endCandidates!=null)
candidates_backup = new ArrayList<NameExpr>(endCandidates);
else
candidates_backup = new ArrayList<NameExpr>();
if (outerParameterizedExpr==null){
endCandidates = new ArrayList<NameExpr>();
outerParameterizedExpr=node;
}
target.analyze( this );
VFDatum d = currentOutSet.get( targetName );
if ( d == null || d == VFDatum.BOT || VFDatum.LDVAR == d || d == VFDatum.WAR || d == VFDatum.TOP ) {
endCandidates.add(res);
}
if (d == VFDatum.VAR){
endCandidates = new ArrayList<NameExpr>();
endCandidates.add(res);
}
args.analyze( this );
endCandidates = candidates_backup;
if (outerParameterizedExpr==node){ //Reached the outermost expression
endCandidates=null;
outerParameterizedExpr=null;
}
}
private void handleLoad(ParameterizedExpr node){
if (! (node.getTarget() instanceof NameExpr)) return ;
NameExpr target = (NameExpr)node.getTarget();
if (target.getName().getID().equals( "load" ) ){
ASTNode args = node.getChild(1);
for (int i=1;i< args.getNumChild();i++)
if( args.getChild( i ) instanceof StringLiteralExpr ) {
String param = ( (StringLiteralExpr) args.getChild( i ) ).getValue();
if (param.charAt(0)!='-'){
putOutVar(param);
annotateNode(target.getName());
}
}
}
}
@Override public void caseParameterizedExpr( ParameterizedExpr node ) {
handleLoad(node);
caseMyLValue(node);
}
@Override
public void caseEndExpr(EndExpr e) {
if (endCandidates == null || endCandidates.size() == 0 ){
if (outerParameterizedExpr==null)
log("Cannot bind end to anything");
else {
NameExpr res = new ArrayList<NameExpr>(getTarget(outerParameterizedExpr).getNameExpressions()).get(0);
log("No candidates, making " +res.getName().getID() + " a TOP" );
bindError(res, e);
}
return;
}
if ( endCandidates.size() == 1 ){
bindWarn(endCandidates.get(0), e);
}
if (inFunction && endCandidates.size() > 1 ){
bindError(endCandidates.get(0), e);
log("More than one candidate, making " +endCandidates.get(0).getName().getID() + " a TOP" );
return;
}
if ( (!inFunction) && endCandidates.size() > 1){
NameExpr toBind = null;
for (NameExpr n: endCandidates){
if (!scriptOrFunctionExists(n.getName().getID())){
if ( toBind==null )
toBind=n;
else{
log("More than one candidate, making " + n.getName().getID() + " a TOP" );
bindError(n, e);
}
}
}
if (toBind == null){ // all cands exist in the lib{
log("More than one candidate, making " +endCandidates.get(0).getName().getID() + " a TOP" );
bindError(endCandidates.get(0), e);
}
else
bindWarn(toBind, e);
}
}
private void bindError(NameExpr n, EndExpr e){
currentOutSet.merge(n.getName().getID(), VFDatum.TOP, Mergable::merge);
annotateNode(n.getName());
}
private void bindWarn(NameExpr n, EndExpr e){
VFDatum d = currentOutSet.get(n.getName().getID());
if (d != VFDatum.VAR) {
currentOutSet.merge(n.getName().getID(), VFDatum.WAR, Mergable::merge);
}
annotateNode(n.getName());
}
private void annotateNode(Name n){
outFlowSets.put(n, currentOutSet);
}
@Override
public VFDatum getResult(Name n) {
return outFlowSets.get(n).get(n.getID());
}
@Override
public Map<String, VFDatum> processBreaks() {
return currentOutSet;
}
@Override
public Map<String, VFDatum> processContinues() {
return currentOutSet;
}
@Override
public void caseLoopVar(AssignStmt loopVar) {
loopVar.analyze(this);
}
@Override
public void caseLoopVarAsCondition(AssignStmt loopVar) {
caseLoopVar( loopVar );
}
@Override
public void caseLoopVarAsInit(AssignStmt loopVar) {
caseLoopVar( loopVar );
}
@Override
public void caseLoopVarAsUpdate(AssignStmt loopVar) {
caseLoopVar( loopVar );
}
@Override
public Map<String, VFDatum> copy(Map<String, VFDatum> source) {
return new HashMap<>(source);
}
@Override
public Map<String, VFDatum> merge(Map<String, VFDatum> in1, Map<String, VFDatum> in2) {
// TODO(isbadawi): Originally this was doing AbstractFlowSet.union, and the
// element type was Map.Entry<String, VFDatum>. Make sure behavior is the same
return MergeUtil.unionMerge(in1, in2);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.jcr2spi.hierarchy;
import java.lang.ref.Reference;
import java.lang.ref.SoftReference;
import java.util.ArrayList;
import java.util.Collections;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import javax.jcr.ItemNotFoundException;
import javax.jcr.RepositoryException;
import org.apache.commons.collections.list.AbstractLinkedList;
import org.apache.jackrabbit.jcr2spi.state.Status;
import org.apache.jackrabbit.spi.ChildInfo;
import org.apache.jackrabbit.spi.Name;
import org.apache.jackrabbit.spi.NodeId;
import org.apache.jackrabbit.spi.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <code>ChildNodeEntriesImpl</code> implements a memory sensitive implementation
* of the <code>ChildNodeEntries</code> interface.
*/
final class ChildNodeEntriesImpl implements ChildNodeEntries {
private static Logger log = LoggerFactory.getLogger(ChildNodeEntriesImpl.class);
private boolean complete = false;
/**
* Linked list of {@link NodeEntry} instances.
*/
private final LinkedEntries entries = new LinkedEntries();
/**
* Map used for lookup by name.
*/
private final NameMap entriesByName = new NameMap();
private final NodeEntry parent;
private final EntryFactory factory;
/**
* Create a new <code>ChildNodeEntries</code> collection from the given
* <code>childNodeInfos</code> instead of retrieving them from the
* persistent layer.
*
* @param parent
* @param factory
* @param childNodeInfos The complete list of child infos or
* <code>null</code> if an 'empty' ChildNodeEntriesImpl should be created.
* In the latter case, individual child entries will be added on demand
* and the complete list will be retrieved only to answer {@link #iterator()}
* if the passed boolean is <code>true</code>.
*/
ChildNodeEntriesImpl(NodeEntry parent, EntryFactory factory, Iterator<ChildInfo> childNodeInfos) {
this.parent = parent;
this.factory = factory;
if (childNodeInfos != null) {
while (childNodeInfos.hasNext()) {
ChildInfo ci = childNodeInfos.next();
NodeEntry entry = factory.createNodeEntry(parent, ci.getName(), ci.getUniqueID());
add(entry, ci.getIndex());
}
complete = true;
} else {
complete = false;
}
}
/**
* @param childEntry
* @return The node entry that directly follows the given <code>childEntry</code>
* or <code>null</code> if the given <code>childEntry</code> has no successor
* or was not found in this <code>ChildNodeEntries</code>.
*/
NodeEntry getNext(NodeEntry childEntry) {
LinkedEntries.LinkNode ln = entries.getLinkNode(childEntry);
LinkedEntries.LinkNode nextLn = (ln == null) ? null : ln.getNextLinkNode();
return (nextLn == null) ? null : nextLn.getNodeEntry();
}
/**
* @param childEntry
* @return The node entry that directly precedes the given <code>childEntry</code>
* or <code>null</code> if the given <code>childEntry</code> is the first
* or was not found in this <code>ChildNodeEntries</code>.
*/
NodeEntry getPrevious(NodeEntry childEntry) {
LinkedEntries.LinkNode ln = entries.getLinkNode(childEntry);
LinkedEntries.LinkNode prevLn = (ln == null) ? null : ln.getPreviousLinkNode();
return (prevLn == null) ? null : prevLn.getNodeEntry();
}
/**
* @see ChildNodeEntries#isComplete()
*/
public boolean isComplete() {
return (parent.getStatus() != Status.INVALIDATED && complete) ||
parent.getStatus() == Status.NEW ||
Status.isTerminal(parent.getStatus());
}
/**
* @see ChildNodeEntries#reload()
*/
public synchronized void reload() throws ItemNotFoundException, RepositoryException {
if (isComplete()) {
// nothing to do
return;
}
NodeId id = parent.getWorkspaceId();
Iterator<ChildInfo> childNodeInfos = factory.getItemStateFactory().getChildNodeInfos(id);
update(childNodeInfos);
}
/**
* Update the child node entries according to the child-infos obtained
* from the persistence layer.
* NOTE: the status of the entries already present is not respected. Thus
* new or removed entries are not touched in order not to modify the
* transient status of the parent. Operations that affect the set or order
* of child entries (AddNode, Move, Reorder) currently assert the
* completeness of the ChildNodeEntries, therefore avoiding an update
* resulting in inconsistent entries.
*
* @param childNodeInfos
* @see HierarchyEntry#reload(boolean) that ignores items with
* pending changes.
* @see org.apache.jackrabbit.jcr2spi.operation.AddNode
* @see org.apache.jackrabbit.jcr2spi.operation.Move
* @see org.apache.jackrabbit.jcr2spi.operation.ReorderNodes
*/
synchronized void update(Iterator<ChildInfo> childNodeInfos) {
// insert missing entries and reorder all if necessary.
LinkedEntries.LinkNode prevLN = null;
while (childNodeInfos.hasNext()) {
ChildInfo ci = childNodeInfos.next();
LinkedEntries.LinkNode ln = entriesByName.getLinkNode(ci.getName(), ci.getIndex(), ci.getUniqueID());
if (ln == null) {
// add missing at the correct position.
NodeEntry entry = factory.createNodeEntry(parent, ci.getName(), ci.getUniqueID());
ln = internalAddAfter(entry, ci.getIndex(), prevLN);
} else if (prevLN != null) {
// assert correct order of existing
if (prevLN != ln) {
reorderAfter(ln, prevLN);
} else {
// there was an existing entry but it's the same as the one
// created/retrieved before. getting here indicates that
// the SPI implementation provided invalid childNodeInfos.
log.error("ChildInfo iterator contains multiple entries with the same name|index or uniqueID -> ignore ChildNodeInfo.");
}
}
prevLN = ln;
}
// finally reset the status
complete = true;
}
/**
* @see ChildNodeEntries#iterator()
*/
public Iterator<NodeEntry> iterator() {
List<NodeEntry> l = new ArrayList<NodeEntry>(entries.size());
for (Iterator<LinkedEntries.LinkNode> it = entries.linkNodeIterator(); it.hasNext();) {
l.add(it.next().getNodeEntry());
}
return Collections.unmodifiableList(l).iterator();
}
/**
* @see ChildNodeEntries#get(Name)
*/
public List<NodeEntry> get(Name nodeName) {
return entriesByName.getList(nodeName);
}
/**
* @see ChildNodeEntries#get(Name, int)
*/
public NodeEntry get(Name nodeName, int index) {
if (index < Path.INDEX_DEFAULT) {
throw new IllegalArgumentException("index is 1-based");
}
return entriesByName.getNodeEntry(nodeName, index);
}
/**
* @see ChildNodeEntries#get(Name, String)
*/
public NodeEntry get(Name nodeName, String uniqueID) {
if (uniqueID == null || nodeName == null) {
throw new IllegalArgumentException();
}
for (NodeEntry cne : get(nodeName)) {
if (uniqueID.equals(cne.getUniqueID())) {
return cne;
}
}
return null;
}
/**
* Adds a <code>NodeEntry</code> to the end of the list. Same as
* {@link #add(NodeEntry, int)}, where the index is {@link Path#INDEX_UNDEFINED}.
*
* @param cne the <code>NodeEntry</code> to add.
* @see ChildNodeEntries#add(NodeEntry)
*/
public synchronized void add(NodeEntry cne) {
internalAdd(cne, Path.INDEX_UNDEFINED);
}
/**
* @see ChildNodeEntries#add(NodeEntry, int)
*/
public synchronized void add(NodeEntry cne, int index) {
if (index < Path.INDEX_UNDEFINED) {
throw new IllegalArgumentException("Invalid index" + index);
}
internalAdd(cne, index);
}
/**
* @see ChildNodeEntries#add(NodeEntry, int, NodeEntry)
*/
public synchronized void add(NodeEntry entry, int index, NodeEntry beforeEntry) {
if (beforeEntry != null) {
// the link node where the new entry is ordered before
LinkedEntries.LinkNode beforeLN = entries.getLinkNode(beforeEntry);
if (beforeLN == null) {
throw new NoSuchElementException();
}
LinkedEntries.LinkNode insertLN = internalAdd(entry, index);
reorder(entry.getName(), insertLN, beforeLN);
} else {
// 'before' is null -> simply append new entry at the end
add(entry);
}
}
/**
*
* @param entry
* @param index
* @return the <code>LinkNode</code> belonging to the added entry.
*/
private LinkedEntries.LinkNode internalAdd(NodeEntry entry, int index) {
Name nodeName = entry.getName();
// retrieve ev. sibling node with same index. if index is 'undefined'
// the existing entry is always null and no reordering occurs.
LinkedEntries.LinkNode existing = null;
if (index >= Path.INDEX_DEFAULT) {
existing = entriesByName.getLinkNode(nodeName, index);
}
// in case index greater than default -> create intermediate entries.
// TODO: TOBEFIXED in case of orderable node the order in the 'linked-entries' must be respected.
for (int i = Path.INDEX_DEFAULT; i < index; i++) {
LinkedEntries.LinkNode previous = entriesByName.getLinkNode(nodeName, i);
if (previous == null) {
NodeEntry sibling = factory.createNodeEntry(parent, nodeName, null);
internalAdd(sibling, i);
}
}
// add new entry
LinkedEntries.LinkNode ln = entries.add(entry, index);
entriesByName.put(nodeName, index, ln);
// reorder the child entries if, the new entry must be inserted rather
// than appended at the end of the list.
if (existing != null) {
reorder(nodeName, ln, existing);
}
return ln;
}
/**
* Add the specified new entry after the specified <code>insertAfter</code>.
*
* @param newEntry
* @param index
* @param insertAfter
* @return the <code>LinkNode</code> associated with the <code>newEntry</code>.
*/
private LinkedEntries.LinkNode internalAddAfter(NodeEntry newEntry, int index,
LinkedEntries.LinkNode insertAfter) {
LinkedEntries.LinkNode ln = entries.addAfter(newEntry, index, insertAfter);
entriesByName.put(newEntry.getName(), index, ln);
return ln;
}
/**
* Removes the child node entry referring to the node state.
*
* @param childEntry the entry to be removed.
* @return the removed entry or <code>null</code> if there is no such entry.
* @see ChildNodeEntries#remove(NodeEntry)
*/
public synchronized NodeEntry remove(NodeEntry childEntry) {
LinkedEntries.LinkNode ln = entries.removeNodeEntry(childEntry);
if (ln != null) {
entriesByName.remove(childEntry.getName(), ln);
return childEntry;
} else {
return null;
}
}
/**
* Reorders an existing <code>NodeState</code> before another
* <code>NodeState</code>. If <code>beforeNode</code> is
* <code>null</code> <code>insertNode</code> is moved to the end of the
* child node entries.
*
* @param insertEntry the NodeEntry to move.
* @param beforeEntry the NodeEntry where <code>insertNode</code> is
* reordered to.
* @return the NodeEntry that followed the 'insertNode' before the reordering.
* @throws NoSuchElementException if <code>insertNode</code> or
* <code>beforeNode</code> does not have a <code>NodeEntry</code>
* in this <code>ChildNodeEntries</code>.
* @see ChildNodeEntries#reorder(NodeEntry, NodeEntry)
*/
public synchronized NodeEntry reorder(NodeEntry insertEntry, NodeEntry beforeEntry) {
// the link node to move
LinkedEntries.LinkNode insertLN = entries.getLinkNode(insertEntry);
if (insertLN == null) {
throw new NoSuchElementException();
}
// the link node where insertLN is ordered before
LinkedEntries.LinkNode beforeLN = (beforeEntry != null) ? entries.getLinkNode(beforeEntry) : null;
if (beforeEntry != null && beforeLN == null) {
throw new NoSuchElementException();
}
NodeEntry previousBefore = insertLN.getNextLinkNode().getNodeEntry();
if (previousBefore != beforeEntry) {
reorder(insertEntry.getName(), insertLN, beforeLN);
}
return previousBefore;
}
/**
* @see ChildNodeEntries#reorderAfter(NodeEntry, NodeEntry)
*/
public void reorderAfter(NodeEntry insertEntry, NodeEntry afterEntry) {
// the link node to move
LinkedEntries.LinkNode insertLN = entries.getLinkNode(insertEntry);
if (insertLN == null) {
throw new NoSuchElementException();
}
// the link node where insertLN is ordered before
LinkedEntries.LinkNode afterLN = (afterEntry != null) ? entries.getLinkNode(afterEntry) : null;
if (afterEntry != null && afterLN == null) {
throw new NoSuchElementException();
}
LinkedEntries.LinkNode previousLN = insertLN.getPreviousLinkNode();
if (previousLN != afterLN) {
reorderAfter(insertLN, afterLN);
} // else: already in correct position. nothing to do
}
/**
*
* @param insertName
* @param insertLN
* @param beforeLN
*/
private void reorder(Name insertName, LinkedEntries.LinkNode insertLN,
LinkedEntries.LinkNode beforeLN) {
// reorder named map
if (entriesByName.containsSiblings(insertName)) {
int position;
if (beforeLN == null) {
// reorder to the end -> use illegal position as marker
position = - 1;
} else {
// count all SNS-entries that are before 'beforeLN' in order to
// determine the new position of the reordered node regarding
// his siblings.
position = 0;
for (Iterator<LinkedEntries.LinkNode> it = entries.linkNodeIterator(); it.hasNext(); ) {
LinkedEntries.LinkNode ln = it.next();
if (ln == beforeLN) {
break;
} else if (ln != insertLN && insertName.equals(ln.qName)) {
position++;
} // else: ln == insertLN OR no SNS -> not relevant for position count
}
}
entriesByName.reorder(insertName, insertLN, position);
}
// reorder in linked list
entries.reorderNode(insertLN, beforeLN);
}
/**
*
* @param insertLN
* @param afterLN
*/
private void reorderAfter(LinkedEntries.LinkNode insertLN, LinkedEntries.LinkNode afterLN) {
// the link node to move
if (insertLN == null) {
throw new NoSuchElementException();
}
// the link node where insertLN is ordered after
if (afterLN == null) {
// move to first position
afterLN = entries.getHeader();
}
LinkedEntries.LinkNode currentAfter = afterLN.getNextLinkNode();
if (currentAfter == insertLN) {
log.debug("Already ordered behind 'afterEntry'.");
// nothing to do
return;
} else {
// reorder named map
Name insertName = insertLN.qName;
if (entriesByName.containsSiblings(insertName)) {
int position = -1; // default: reorder to the end.
if (afterLN == entries.getHeader()) {
// move to the beginning
position = 0;
} else {
// count all SNS-entries that are before 'afterLN' in order to
// determine the new position of the reordered node regarding
// his siblings.
position = 0;
for (Iterator<LinkedEntries.LinkNode> it = entries.linkNodeIterator(); it.hasNext(); ) {
LinkedEntries.LinkNode ln = it.next();
if (insertName.equals(ln.qName) && (ln != insertLN)) {
position++;
}
if (ln == afterLN) {
break;
}
}
}
entriesByName.reorder(insertName, insertLN, position);
}
// reorder in linked list
entries.reorderNode(insertLN, currentAfter);
}
}
//-------------------------------------------------< AbstractLinkedList >---
/**
* An implementation of a linked list which provides access to the internal
* LinkNode which links the entries of the list.
*/
private final class LinkedEntries extends AbstractLinkedList {
LinkedEntries() {
super();
init();
}
/**
* Returns the matching <code>LinkNode</code> from a list or a single
* <code>LinkNode</code>. This method will return <code>null</code>
* if none of the entries matches either due to missing entry for given
* state name or due to missing availability of the <code>NodeEntry</code>.
*
* @param nodeEntry the <code>NodeEntry</code> that is compared to the
* resolution of any <code>NodeEntry</code> that matches by name.
* @return the matching <code>LinkNode</code> or <code>null</code>
*/
private LinkedEntries.LinkNode getLinkNode(NodeEntry nodeEntry) {
for (Iterator<LinkedEntries.LinkNode> it = linkNodeIterator(); it.hasNext();) {
LinkedEntries.LinkNode ln = it.next();
if (ln.getNodeEntry() == nodeEntry) {
return ln;
}
}
// not found
return null;
}
private LinkedEntries.LinkNode getHeader() {
return (LinkedEntries.LinkNode) header;
}
/**
* Adds a child node entry at the end of this list.
*
* @param cne the child node entry to add.
* @param index
* @return the LinkNode which refers to the added <code>NodeEntry</code>.
*/
LinkedEntries.LinkNode add(NodeEntry cne, int index) {
LinkedEntries.LinkNode ln = new LinkedEntries.LinkNode(cne, index);
addNode(ln, header);
return ln;
}
/**
* Adds the given child node entry to this list after the specified
* <code>entry</code> or at the beginning if <code>entry</code> is
* <code>null</code>.
*
* @param cne the child node entry to add.
* @param index
* @param insertAfter after which to insert the new entry
* @return the LinkNode which refers to the added <code>NodeEntry</code>.
*/
LinkedEntries.LinkNode addAfter(NodeEntry cne, int index, LinkedEntries.LinkNode insertAfter) {
LinkedEntries.LinkNode newNode;
if (insertAfter == null) {
// insert at the beginning
newNode = new LinkedEntries.LinkNode(cne, index);
addNode(newNode, header);
} else if (insertAfter.getNextLinkNode() == null) {
newNode = add(cne, index);
} else {
newNode = new LinkedEntries.LinkNode(cne, index);
addNode(newNode, insertAfter.getNextLinkNode());
}
return newNode;
}
/**
* Remove the LinkEntry the contains the given NodeEntry as value.
*
* @param cne NodeEntry to be removed.
* @return LinkedEntries.LinkNode that has been removed.
*/
LinkedEntries.LinkNode removeNodeEntry(NodeEntry cne) {
LinkedEntries.LinkNode ln = getLinkNode(cne);
if (ln != null) {
ln.remove();
}
return ln;
}
/**
* Reorders an existing <code>LinkNode</code> before another existing
* <code>LinkNode</code>. If <code>before</code> is <code>null</code>
* the <code>insert</code> node is moved to the end of the list.
*
* @param insert the node to reorder.
* @param before the node where to reorder node <code>insert</code>.
*/
void reorderNode(LinkedEntries.LinkNode insert, LinkedEntries.LinkNode before) {
removeNode(insert);
if (before == null) {
addNode(insert, header);
} else {
addNode(insert, before);
}
}
/**
* Create a new <code>LinkNode</code> for a given {@link NodeEntry}
* <code>value</code>.
*
* @param value a child node entry.
* @return a wrapping {@link LinkedEntries.LinkNode}.
* @see AbstractLinkedList#createNode(Object)
*/
@Override
protected Node createNode(Object value) {
return new LinkedEntries.LinkNode(value, Path.INDEX_DEFAULT);
}
/**
* @return a new <code>LinkNode</code>.
* @see AbstractLinkedList#createHeaderNode()
*/
@Override
protected Node createHeaderNode() {
return new LinkedEntries.LinkNode();
}
/**
* @return iterator over all LinkNode entries in this list.
*/
private Iterator<LinkedEntries.LinkNode> linkNodeIterator() {
return new LinkNodeIterator();
}
//----------------------------------------------------------------------
/**
* Extends the <code>AbstractLinkedList.Node</code>.
*/
private final class LinkNode extends Node {
private final Name qName;
protected LinkNode() {
super();
qName = null;
}
protected LinkNode(Object value, int index) {
// add soft reference from linkNode to the NodeEntry (value)
// unless the entry is a SNSibling. TODO: review again.
super(index > Path.INDEX_DEFAULT ? value : new SoftReference<Object>(value));
qName = ((NodeEntry) value).getName();
}
@Override
protected void setValue(Object value) {
throw new UnsupportedOperationException("Not implemented");
}
@Override
protected Object getValue() {
Object val = super.getValue();
NodeEntry ne;
if (val == null) {
ne = null;
} else if (val instanceof Reference) {
ne = (NodeEntry) ((Reference) val).get();
} else {
ne = (NodeEntry) val;
}
// if the nodeEntry has been g-collected in the mean time
// create a new NodeEntry in order to avoid returning null.
if (ne == null && this != header) {
ne = factory.createNodeEntry(parent, qName, null);
super.setValue(new SoftReference<NodeEntry>(ne));
}
return ne;
}
/**
* @return the wrapped <code>NodeEntry</code>.
*/
public NodeEntry getNodeEntry() {
return (NodeEntry) getValue();
}
/**
* Removes this <code>LinkNode</code> from the linked list.
*/
public void remove() {
removeNode(this);
}
/**
* @return the next LinkNode.
*/
public LinkedEntries.LinkNode getNextLinkNode() {
return (LinkedEntries.LinkNode) super.getNextNode();
}
/**
* @return the next LinkNode.
*/
public LinkedEntries.LinkNode getPreviousLinkNode() {
return (LinkedEntries.LinkNode) super.getPreviousNode();
}
}
//----------------------------------------------------------------------
private class LinkNodeIterator implements Iterator<LinkedEntries.LinkNode> {
private LinkedEntries.LinkNode next = ((LinkedEntries.LinkNode) header).getNextLinkNode();
private final int expectedModCount = modCount;
public boolean hasNext() {
checkModCount();
return next != header;
}
public LinkedEntries.LinkNode next() {
checkModCount();
if (!hasNext()) {
throw new NoSuchElementException();
}
LinkedEntries.LinkNode n = next;
next = next.getNextLinkNode();
return n;
}
public void remove() {
throw new UnsupportedOperationException("remove");
}
private void checkModCount() {
if (expectedModCount != modCount) {
throw new ConcurrentModificationException();
}
}
}
}
//--------------------------------------------------------------------------
/**
* Mapping of Name to LinkNode OR List of LinkNode(s) in case of SNSiblings.
*/
private static class NameMap {
private final Map<Name, List<LinkedEntries.LinkNode>> snsMap = new HashMap<Name, List<LinkedEntries.LinkNode>>();
private final Map<Name, LinkedEntries.LinkNode> nameMap = new HashMap<Name, LinkedEntries.LinkNode>();
/**
* Return true if more than one NodeEntry with the given name exists.
*
* @param qName
* @return true if more than one NodeEntry with the given name exists.
*/
public boolean containsSiblings(Name qName) {
return snsMap.containsKey(qName);
}
/**
* Returns a single <code>NodeEntry</code> or an unmodifiable
* <code>List</code> of NodeEntry objects.
*
* @param qName
* @return a single <code>NodeEntry</code> or a <code>List</code> of
* NodeEntry objects.
*/
private Object get(Name qName) {
LinkedEntries.LinkNode val = nameMap.get(qName);
if (val != null) {
return val.getNodeEntry();
} else {
List<LinkedEntries.LinkNode> l = snsMap.get(qName);
if (l != null) {
List<NodeEntry> nodeEntries = new ArrayList<NodeEntry>(l.size());
for (Iterator<LinkedEntries.LinkNode> it = l.iterator(); it.hasNext();) {
LinkedEntries.LinkNode ln = it.next();
nodeEntries.add(ln.getNodeEntry());
}
return nodeEntries;
}
}
return null;
}
/**
* Returns a unmodifiable List of NodeEntry objects even if the name map
* only contains a single entry for the given name. If no matching entry
* exists for the given <code>Name</code> an empty list is returned.
*
* @param name
* @return list of entries or an empty list.
*/
@SuppressWarnings("unchecked")
public List<NodeEntry> getList(Name name) {
Object obj = get(name);
if (obj == null) {
return Collections.emptyList();
} else if (obj instanceof List) {
List<NodeEntry> l = new ArrayList<NodeEntry>((List<NodeEntry>) obj);
return Collections.unmodifiableList(l);
} else {
// NodeEntry
return Collections.singletonList((NodeEntry)obj);
}
}
@SuppressWarnings("unchecked")
public NodeEntry getNodeEntry(Name name, int index) {
Object obj = get(name);
if (obj == null) {
return null;
}
if (obj instanceof List) {
// map entry is a list of siblings
return findMatchingEntry((List<NodeEntry>) obj, index);
} else {
// map entry is a single child node entry
if (index == Path.INDEX_DEFAULT) {
return (NodeEntry) obj;
}
}
return null;
}
public LinkedEntries.LinkNode getLinkNode(Name name, int index) {
if (index < Path.INDEX_DEFAULT) {
throw new IllegalArgumentException("Illegal index " + index);
}
LinkedEntries.LinkNode val = nameMap.get(name);
if (val != null) {
return (index == Path.INDEX_DEFAULT) ? val : null;
} else {
// look in snsMap
List<LinkedEntries.LinkNode> l = snsMap.get(name);
int pos = index - 1; // Index of NodeEntry is 1-based
return (l != null && pos < l.size()) ? l.get(pos) : null;
}
}
public LinkedEntries.LinkNode getLinkNode(Name name, int index, String uniqueID) {
if (uniqueID != null) {
// -> try if any entry matches.
// if none matches it be might that entry doesn't have uniqueID
// set yet -> search without uniqueID
LinkedEntries.LinkNode val = nameMap.get(name);
if (val != null) {
if (uniqueID.equals(val.getNodeEntry().getUniqueID())) {
return val;
}
} else {
// look in snsMap
List<LinkedEntries.LinkNode> l = snsMap.get(name);
if (l != null) {
for (Iterator<LinkedEntries.LinkNode> it = l.iterator(); it.hasNext();) {
LinkedEntries.LinkNode ln = it.next();
if (uniqueID.equals(ln.getNodeEntry().getUniqueID())) {
return ln;
}
}
}
}
}
// no uniqueID passed or not match.
// try to load the child entry by name and index.
return getLinkNode(name, index);
}
public void put(Name name, int index, LinkedEntries.LinkNode value) {
// if 'nameMap' already contains a single entry -> move it to snsMap
LinkedEntries.LinkNode single = nameMap.remove(name);
List<LinkedEntries.LinkNode> l;
if (single != null) {
l = new ArrayList<LinkedEntries.LinkNode>();
l.add(single);
snsMap.put(name, l);
} else {
// if 'snsMap' already contains list
l = snsMap.get(name);
}
if (l == null) {
// no same name siblings -> simply put to the name map.
nameMap.put(name, value);
} else {
// sibling(s) already present -> insert into the list
int position = index - 1;
if (position < 0 || position > l.size()) {
l.add(value); // invalid position -> append at the end.
} else {
l.add(position, value); // insert with the correct index.
}
}
}
public LinkedEntries.LinkNode remove(Name name, LinkedEntries.LinkNode value) {
LinkedEntries.LinkNode rm = nameMap.remove(name);
if (rm == null) {
List<LinkedEntries.LinkNode> l = snsMap.get(name);
if (l != null && l.remove(value)) {
rm = value;
}
}
return rm;
}
public void reorder(Name name, LinkedEntries.LinkNode insertValue, int position) {
List<LinkedEntries.LinkNode> sns = snsMap.get(name);
if (sns == null) {
// no same name siblings -> no special handling required
return;
}
// reorder sns in the name-list
sns.remove(insertValue);
if (position < 0 || position > sns.size()) {
// simply move to end of list
sns.add(insertValue);
} else {
sns.add(position, insertValue);
}
}
/**
*
* @param siblings
* @param index
* @return matching entry or <code>null</code>.
*/
private static NodeEntry findMatchingEntry(List<NodeEntry> siblings, int index) {
// shortcut if index can never match
if (index > siblings.size()) {
return null;
} else {
return siblings.get(index - 1);
}
}
}
}
| |
package com.messagemedia.service;
import java.util.HashMap;
import java.util.Map;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAnyAttribute;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.datatype.XMLGregorianCalendar;
import javax.xml.namespace.QName;
/**
* <p>Java class for MessageType complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="MessageType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <all>
* <element name="origin" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="recipients" type="{http://xml.m4u.com.au/2009}RecipientsType"/>
* <element name="content" type="{http://www.w3.org/2001/XMLSchema}string"/>
* <element name="scheduled" type="{http://www.w3.org/2001/XMLSchema}dateTime" minOccurs="0"/>
* <element name="deliveryReport" type="{http://www.w3.org/2001/XMLSchema}boolean" minOccurs="0"/>
* <element name="validityPeriod" type="{http://www.w3.org/2001/XMLSchema}unsignedByte" minOccurs="0"/>
* <element name="tags" type="{http://xml.m4u.com.au/2009}MessageTagListType" minOccurs="0"/>
* </all>
* <attribute name="format" type="{http://xml.m4u.com.au/2009}MessageFormatType" default="SMS" />
* <attribute name="sequenceNumber" type="{http://www.w3.org/2001/XMLSchema}unsignedInt" default="0" />
* <anyAttribute processContents='lax'/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "MessageType", propOrder = {
})
public class MessageType {
protected String origin;
@XmlElement(required = true)
protected RecipientsType recipients;
@XmlElement(required = true)
protected String content;
@XmlSchemaType(name = "dateTime")
protected XMLGregorianCalendar scheduled;
@XmlElement(defaultValue = "false")
protected Boolean deliveryReport;
@XmlElement(defaultValue = "169")
@XmlSchemaType(name = "unsignedByte")
protected Short validityPeriod;
protected MessageTagListType tags;
@XmlAttribute(name = "format")
protected MessageFormatType format;
@XmlAttribute(name = "sequenceNumber")
@XmlSchemaType(name = "unsignedInt")
protected Long sequenceNumber;
@XmlAnyAttribute
private Map<QName, String> otherAttributes = new HashMap<QName, String>();
/**
* Gets the value of the origin property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOrigin() {
return origin;
}
/**
* Sets the value of the origin property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOrigin(String value) {
this.origin = value;
}
/**
* Gets the value of the recipients property.
*
* @return
* possible object is
* {@link RecipientsType }
*
*/
public RecipientsType getRecipients() {
return recipients;
}
/**
* Sets the value of the recipients property.
*
* @param value
* allowed object is
* {@link RecipientsType }
*
*/
public void setRecipients(RecipientsType value) {
this.recipients = value;
}
/**
* Gets the value of the content property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getContent() {
return content;
}
/**
* Sets the value of the content property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setContent(String value) {
this.content = value;
}
/**
* Gets the value of the scheduled property.
*
* @return
* possible object is
* {@link XMLGregorianCalendar }
*
*/
public XMLGregorianCalendar getScheduled() {
return scheduled;
}
/**
* Sets the value of the scheduled property.
*
* @param value
* allowed object is
* {@link XMLGregorianCalendar }
*
*/
public void setScheduled(XMLGregorianCalendar value) {
this.scheduled = value;
}
/**
* Gets the value of the deliveryReport property.
*
* @return
* possible object is
* {@link Boolean }
*
*/
public Boolean isDeliveryReport() {
return deliveryReport;
}
/**
* Sets the value of the deliveryReport property.
*
* @param value
* allowed object is
* {@link Boolean }
*
*/
public void setDeliveryReport(Boolean value) {
this.deliveryReport = value;
}
/**
* Gets the value of the validityPeriod property.
*
* @return
* possible object is
* {@link Short }
*
*/
public Short getValidityPeriod() {
return validityPeriod;
}
/**
* Sets the value of the validityPeriod property.
*
* @param value
* allowed object is
* {@link Short }
*
*/
public void setValidityPeriod(Short value) {
this.validityPeriod = value;
}
/**
* Gets the value of the tags property.
*
* @return
* possible object is
* {@link MessageTagListType }
*
*/
public MessageTagListType getTags() {
return tags;
}
/**
* Sets the value of the tags property.
*
* @param value
* allowed object is
* {@link MessageTagListType }
*
*/
public void setTags(MessageTagListType value) {
this.tags = value;
}
/**
* Gets the value of the format property.
*
* @return
* possible object is
* {@link MessageFormatType }
*
*/
public MessageFormatType getFormat() {
if (format == null) {
return MessageFormatType.SMS;
} else {
return format;
}
}
/**
* Sets the value of the format property.
*
* @param value
* allowed object is
* {@link MessageFormatType }
*
*/
public void setFormat(MessageFormatType value) {
this.format = value;
}
/**
* Gets the value of the sequenceNumber property.
*
* @return
* possible object is
* {@link Long }
*
*/
public long getSequenceNumber() {
if (sequenceNumber == null) {
return 0L;
} else {
return sequenceNumber;
}
}
/**
* Sets the value of the sequenceNumber property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
public void setSequenceNumber(Long value) {
this.sequenceNumber = value;
}
/**
* Gets a map that contains attributes that aren't bound to any typed property on this class.
*
* <p>
* the map is keyed by the name of the attribute and
* the value is the string value of the attribute.
*
* the map returned by this method is live, and you can add new attribute
* by updating the map directly. Because of this design, there's no setter.
*
*
* @return
* always non-null
*/
public Map<QName, String> getOtherAttributes() {
return otherAttributes;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.jcr.resource;
import java.io.InputStream;
import java.lang.reflect.Array;
import java.math.BigDecimal;
import java.util.Calendar;
import java.util.StringTokenizer;
import javax.jcr.Node;
import javax.jcr.Property;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.Value;
import javax.jcr.ValueFactory;
import javax.jcr.query.Query;
import javax.jcr.query.QueryManager;
import javax.jcr.query.QueryResult;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.apache.sling.api.resource.ResourceUtil;
import org.apache.sling.jcr.resource.internal.helper.LazyInputStream;
/**
* The <code>JcrResourceUtil</code> class provides helper methods used
* throughout this bundle.
*/
public class JcrResourceUtil {
/** Helper method to execute a JCR query */
public static QueryResult query(Session session, String query,
String language) throws RepositoryException {
QueryManager qManager = session.getWorkspace().getQueryManager();
Query q = qManager.createQuery(query, language);
return q.execute();
}
/** Converts a JCR Value to a corresponding Java Object */
public static Object toJavaObject(Value value) throws RepositoryException {
switch (value.getType()) {
case PropertyType.DECIMAL:
return value.getDecimal();
case PropertyType.BINARY:
return new LazyInputStream(value);
case PropertyType.BOOLEAN:
return value.getBoolean();
case PropertyType.DATE:
return value.getDate();
case PropertyType.DOUBLE:
return value.getDouble();
case PropertyType.LONG:
return value.getLong();
case PropertyType.NAME: // fall through
case PropertyType.PATH: // fall through
case PropertyType.REFERENCE: // fall through
case PropertyType.STRING: // fall through
case PropertyType.UNDEFINED: // not actually expected
default: // not actually expected
return value.getString();
}
}
/**
* Converts the value(s) of a JCR Property to a corresponding Java Object.
* If the property has multiple values the result is an array of Java
* Objects representing the converted values of the property.
*/
public static Object toJavaObject(Property property)
throws RepositoryException {
// multi-value property: return an array of values
if (property.isMultiple()) {
Value[] values = property.getValues();
final Object firstValue = values.length > 0 ? toJavaObject(values[0]) : null;
final Object[] result;
if ( firstValue instanceof Boolean ) {
result = new Boolean[values.length];
} else if ( firstValue instanceof Calendar ) {
result = new Calendar[values.length];
} else if ( firstValue instanceof Double ) {
result = new Double[values.length];
} else if ( firstValue instanceof Long ) {
result = new Long[values.length];
} else if ( firstValue instanceof BigDecimal) {
result = new BigDecimal[values.length];
} else if ( firstValue instanceof InputStream) {
result = new Object[values.length];
} else {
result = new String[values.length];
}
for (int i = 0; i < values.length; i++) {
Value value = values[i];
if (value != null) {
result[i] = toJavaObject(value);
}
}
return result;
}
// single value property
return toJavaObject(property.getValue());
}
/**
* Creates a {@link javax.jcr.Value JCR Value} for the given object with
* the given Session.
* Selects the the {@link javax.jcr.PropertyType PropertyType} according
* the instance of the object's Class
*
* @param value object
* @param session to create value for
* @return the value or null if not convertible to a valid PropertyType
* @throws RepositoryException in case of error, accessing the Repository
*/
public static Value createValue(Object value, Session session)
throws RepositoryException {
Value val;
ValueFactory fac = session.getValueFactory();
if(value instanceof Calendar) {
val = fac.createValue((Calendar)value);
} else if (value instanceof InputStream) {
val = fac.createValue(fac.createBinary((InputStream)value));
} else if (value instanceof Node) {
val = fac.createValue((Node)value);
} else if (value instanceof BigDecimal) {
val = fac.createValue((BigDecimal)value);
} else if (value instanceof Long) {
val = fac.createValue((Long)value);
} else if (value instanceof Short) {
val = fac.createValue((Short)value);
} else if (value instanceof Integer) {
val = fac.createValue((Integer)value);
} else if (value instanceof Number) {
val = fac.createValue(((Number)value).doubleValue());
} else if (value instanceof Boolean) {
val = fac.createValue((Boolean) value);
} else if ( value instanceof String ){
val = fac.createValue((String)value);
} else {
val = null;
}
return val;
}
/**
* Sets the value of the property.
* Selects the {@link javax.jcr.PropertyType PropertyType} according
* to the instance of the object's class.
* @param node The node where the property will be set on.
* @param propertyName The name of the property.
* @param propertyValue The value for the property.
*/
public static void setProperty(final Node node,
final String propertyName,
final Object propertyValue)
throws RepositoryException {
if ( propertyValue == null ) {
node.setProperty(propertyName, (String)null);
} else if ( propertyValue.getClass().isArray() ) {
final int length = Array.getLength(propertyValue);
final Value[] setValues = new Value[length];
for(int i=0; i<length; i++) {
final Object value = Array.get(propertyValue, i);
setValues[i] = createValue(value, node.getSession());
}
node.setProperty(propertyName, setValues);
} else {
node.setProperty(propertyName, createValue(propertyValue, node.getSession()));
}
}
/**
* Helper method, which returns the given resource type as returned from the
* {@link org.apache.sling.api.resource.Resource#getResourceType()} as a
* relative path.
*
* @param type The resource type to be converted into a path
* @return The resource type as a path.
* @deprecated Use {@link ResourceUtil#resourceTypeToPath(String)}
*/
@Deprecated
public static String resourceTypeToPath(String type) {
return type.replaceAll("\\:", "/");
}
/**
* Returns the super type of the given resource type. This is the result of
* adapting the child resource
* {@link JcrResourceConstants#SLING_RESOURCE_SUPER_TYPE_PROPERTY} of the
* <code>Resource</code> addressed by the <code>resourceType</code> to a
* string. If no such child resource exists or if the resource does not
* adapt to a string, this method returns <code>null</code>.
*
* @param resourceResolver The <code>ResourceResolver</code> used to
* access the resource whose path (relative or absolute) is given
* by the <code>resourceType</code> parameter.
* @param resourceType The resource type whose super type is to be returned.
* This type is turned into a path by calling the
* {@link #resourceTypeToPath(String)} method before trying to
* get the resource through the <code>resourceResolver</code>.
* @return the super type of the <code>resourceType</code> or
* <code>null</code> if the resource type does not have a child
* resource
* {@link JcrResourceConstants#SLING_RESOURCE_SUPER_TYPE_PROPERTY}
* adapting to a string.
* @deprecated Use {@link ResourceUtil#getResourceSuperType(ResourceResolver, String)}
*/
@Deprecated
public static String getResourceSuperType(
ResourceResolver resourceResolver, String resourceType) {
return ResourceUtil.getResourceSuperType(resourceResolver, resourceType);
}
/**
* Returns the resource super type of the given resource. This is either the
* child resource
* {@link JcrResourceConstants#SLING_RESOURCE_SUPER_TYPE_PROPERTY} if the
* given <code>resource</code> adapted to a string or the result of
* calling the {@link #getResourceSuperType(ResourceResolver, String)}
* method on the resource type of the <code>resource</code>.
* <p>
* This mechanism allows to specifically set the resource super type on a
* per-resource level overwriting any resource super type hierarchy
* pre-defined by the actual resource type of the resource.
*
* @param resource The <code>Resource</code> whose resource super type is
* requested.
* @return The resource super type or <code>null</code> if the algorithm
* described above does not yield a resource super type.
* @deprecated Call {@link ResourceUtil#findResourceSuperType(Resource)}
*/
@Deprecated
public static String getResourceSuperType(Resource resource) {
String resourceSuperType = resource.getResourceSuperType();
if ( resourceSuperType == null ) {
final ResourceResolver resolver = resource.getResourceResolver();
// try explicit resourceSuperType resource
final String resourceType = resource.getResourceType();
resourceSuperType = ResourceUtil.getResourceSuperType(resolver, resourceType);
}
return resourceSuperType;
}
/**
* Creates or gets the {@link javax.jcr.Node Node} at the given Path.
* In case it has to create the Node all non-existent intermediate path-elements
* will be create with the given intermediate node type and the returned node
* will be created with the given nodeType
*
* @param path to create
* @param intermediateNodeType to use for creation of intermediate nodes (or null)
* @param nodeType to use for creation of the final node (or null)
* @param session to use
* @param autoSave Should save be called when a new node is created?
* @return the Node at path
* @throws RepositoryException in case of exception accessing the Repository
*/
public static Node createPath(String path,
String intermediateNodeType,
String nodeType,
Session session,
boolean autoSave)
throws RepositoryException {
if (path == null || path.length() == 0 || "/".equals(path)) {
return session.getRootNode();
}
// sanitize path if it ends with a slash
if ( path.endsWith("/") ) {
path = path.substring(0, path.length() - 1);
}
if (!session.itemExists(path)) {
String existingPath = findExistingPath(path, session);
String relativePath = null;
Node parentNode = null;
if (existingPath != null) {
parentNode = session.getNode(existingPath);
relativePath = path.substring(existingPath.length() + 1);
} else {
relativePath = path.substring(1);
parentNode = session.getRootNode();
}
return createPath(parentNode,
relativePath,
intermediateNodeType,
nodeType,
autoSave);
} else {
return session.getNode(path);
}
}
/**
* Creates or gets the {@link javax.jcr.Node Node} at the given Path.
* In case it has to create the Node all non-existent intermediate path-elements
* will be create with the given intermediate node type and the returned node
* will be created with the given nodeType
*
* @param parentNode starting node
* @param relativePath to create
* @param intermediateNodeType to use for creation of intermediate nodes (or null)
* @param nodeType to use for creation of the final node (or null)
* @param autoSave Should save be called when a new node is created?
* @return the Node at path
* @throws RepositoryException in case of exception accessing the Repository
*/
public static Node createPath(Node parentNode,
String relativePath,
String intermediateNodeType,
String nodeType,
boolean autoSave)
throws RepositoryException {
if (relativePath == null || relativePath.length() == 0 || "/".equals(relativePath)) {
return parentNode;
}
// sanitize path if it ends with a slash
if ( relativePath.endsWith("/") ) {
relativePath = relativePath.substring(0, relativePath.length() - 1);
}
if (!parentNode.hasNode(relativePath)) {
Session session = parentNode.getSession();
String path = parentNode.getPath() + "/" + relativePath;
String existingPath = findExistingPath(path, session);
if (existingPath != null) {
parentNode = session.getNode(existingPath);
relativePath = path.substring(existingPath.length() + 1);
}
Node node = parentNode;
int pos = relativePath.lastIndexOf('/');
if ( pos != -1 ) {
final StringTokenizer st = new StringTokenizer(relativePath.substring(0, pos), "/");
while ( st.hasMoreTokens() ) {
final String token = st.nextToken();
if ( !node.hasNode(token) ) {
try {
if ( intermediateNodeType != null ) {
node.addNode(token, intermediateNodeType);
} else {
node.addNode(token);
}
if ( autoSave ) node.getSession().save();
} catch (RepositoryException re) {
// we ignore this as this folder might be created from a different task
node.refresh(false);
}
}
node = node.getNode(token);
}
relativePath = relativePath.substring(pos + 1);
}
if ( !node.hasNode(relativePath) ) {
if ( nodeType != null ) {
node.addNode(relativePath, nodeType);
} else {
node.addNode(relativePath);
}
if ( autoSave ) node.getSession().save();
}
return node.getNode(relativePath);
} else {
return parentNode.getNode(relativePath);
}
}
private static String findExistingPath(String path, Session session)
throws RepositoryException {
//find the parent that exists
// we can start from the youngest child in tree
int currentIndex = path.lastIndexOf('/');
String temp = path;
String existingPath = null;
while (currentIndex > 0) {
temp = temp.substring(0, currentIndex);
//break when first existing parent is found
if (session.itemExists(temp)) {
existingPath = temp;
break;
}
currentIndex = temp.lastIndexOf("/");
}
return existingPath;
}
}
| |
package com.google.api.services.samples.youtube.cmdline.analytics;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.services.samples.youtube.cmdline.Auth;
import com.google.api.services.youtube.YouTube;
import com.google.api.services.youtube.model.Channel;
import com.google.api.services.youtube.model.ChannelListResponse;
import com.google.api.services.youtubeAnalytics.YouTubeAnalytics;
import com.google.api.services.youtubeAnalytics.model.ResultTable;
import com.google.api.services.youtubeAnalytics.model.ResultTable.ColumnHeaders;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.io.PrintStream;
import java.math.BigDecimal;
import java.util.List;
/**
* Demo displaying YouTube metrics from a user's channel using the YouTube Data and YouTube
* Analytics APIs. It also uses OAuth2 for authorization.
*
* @author Christoph Schwab-Ganser and Jeremy Walker
*/
public class YouTubeAnalyticsReports {
/**
* Global instance of the HTTP transport.
*/
private static final HttpTransport HTTP_TRANSPORT = new NetHttpTransport();
/**
* Global instance of the JSON factory.
*/
private static final JsonFactory JSON_FACTORY = new JacksonFactory();
/**
* Global instance of Youtube object to make general YouTube API requests.
*/
private static YouTube youtube;
/**
* Global instance of YoutubeAnalytics object to make analytic API requests.
*/
private static YouTubeAnalytics analytics;
/**
* Authorizes user, gets user's default channel via YouTube Data API, and gets/prints stats on
* user's channel using the YouTube Analytics API.
*
* @param args command line args (not used).
*/
public static void main(String[] args) {
// Scopes required to access YouTube general and analytics information.
List<String> scopes = Lists.newArrayList(
"https://www.googleapis.com/auth/yt-analytics.readonly",
"https://www.googleapis.com/auth/youtube.readonly"
);
try {
Credential credential = Auth.authorize(scopes, "analyticsreports");
// YouTube object used to make all non-analytic API requests.
youtube = new YouTube.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential)
.setApplicationName("youtube-analytics-api-report-example")
.build();
// YouTube object used to make all analytic API requests.
analytics = new YouTubeAnalytics.Builder(HTTP_TRANSPORT, JSON_FACTORY, credential)
.setApplicationName("youtube-analytics-api-report-example")
.build();
// Constructs request to get channel id for current user.
YouTube.Channels.List channelRequest = youtube.channels().list("id,snippet");
channelRequest.setMine(true);
channelRequest.setFields("items(id,snippet/title)");
ChannelListResponse channels = channelRequest.execute();
// List of channels associated with user.
List<Channel> listOfChannels = channels.getItems();
// Grab default channel which is always the first item in the list.
Channel defaultChannel = listOfChannels.get(0);
String channelId = defaultChannel.getId();
PrintStream writer = System.out;
if (channelId == null) {
writer.println("No channel found.");
} else {
writer.println("Default Channel: " + defaultChannel.getSnippet().getTitle() +
" ( " + channelId + " )\n");
printData(writer, "Views Over Time.", executeViewsOverTimeQuery(analytics, channelId));
printData(writer, "Top Videos", executeTopVideosQuery(analytics, channelId));
printData(writer, "Demographics", executeDemographicsQuery(analytics, channelId));
}
} catch (IOException e) {
System.err.println("IOException: " + e.getMessage());
e.printStackTrace();
} catch (Throwable t) {
System.err.println("Throwable: " + t.getMessage());
t.printStackTrace();
}
}
/**
* Returns the views and unique viewers per day.
*
* @param analytics the analytics service object used to access the API.
* @param id the string id from which to retrieve data.
* @return the response from the API.
* @throws IOException if an API error occurred.
*/
private static ResultTable executeViewsOverTimeQuery(YouTubeAnalytics analytics,
String id) throws IOException {
return analytics.reports()
.query("channel==" + id, // channel id
"2012-01-01", // Start date.
"2012-01-14", // End date.
"views,uniques") // Metrics.
.setDimensions("day")
.setSort("day")
.execute();
}
/**
* Returns the top video by views.
*
* @param analytics the analytics service object used to access the API.
* @param id the string id from which to retrieve data.
* @return the response from the API.
* @throws IOException if an API error occurred.
*/
private static ResultTable executeTopVideosQuery(YouTubeAnalytics analytics,
String id) throws IOException {
return analytics.reports()
.query("channel==" + id, // channel id
"2012-01-01", // Start date.
"2012-08-14", // End date.
"views,subscribersGained,subscribersLost") // Metrics.
.setDimensions("video")
.setSort("-views")
.setMaxResults(10)
.execute();
}
/**
* Returns the demographics report
*
* @param analytics the analytics service object used to access the API.
* @param id the string id from which to retrieve data.
* @return the response from the API.
* @throws IOException if an API error occurred.
*/
private static ResultTable executeDemographicsQuery(YouTubeAnalytics analytics,
String id) throws IOException {
return analytics.reports()
.query("channel==" + id, // channel id
"2007-01-01", // Start date.
"2012-08-14", // End date.
"viewerPercentage") // Metrics.
.setDimensions("ageGroup,gender")
.setSort("-viewerPercentage")
.execute();
}
/**
* Prints the output from the API. The channel name is printed along with
* each column name and all the data in the rows.
*
* @param writer stream to output to
* @param title title of the report
* @param results data returned from the API.
*/
private static void printData(PrintStream writer, String title, ResultTable results) {
writer.println("Report: " + title);
if (results.getRows() == null || results.getRows().isEmpty()) {
writer.println("No results Found.");
} else {
// Print column headers.
for (ColumnHeaders header : results.getColumnHeaders()) {
writer.printf("%30s", header.getName());
}
writer.println();
// Print actual data.
for (List<Object> row : results.getRows()) {
for (int colNum = 0; colNum < results.getColumnHeaders().size(); colNum++) {
ColumnHeaders header = results.getColumnHeaders().get(colNum);
Object column = row.get(colNum);
if ("INTEGER".equals(header.getUnknownKeys().get("dataType"))) {
long l = ((BigDecimal) column).longValue();
writer.printf("%30d", l);
} else if ("FLOAT".equals(header.getUnknownKeys().get("dataType"))) {
writer.printf("%30f", column);
} else if ("STRING".equals(header.getUnknownKeys().get("dataType"))) {
writer.printf("%30s", column);
} else {
// default output.
writer.printf("%30s", column);
}
}
writer.println();
}
writer.println();
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.node.tasks;
import org.elasticsearch.ElasticsearchTimeoutException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction;
import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.get.GetTaskResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.indices.refresh.RefreshAction;
import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeAction;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryAction;
import org.elasticsearch.action.bulk.BulkAction;
import org.elasticsearch.action.fieldstats.FieldStatsAction;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexAction;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.replication.ReplicationResponse;
import org.elasticsearch.action.support.replication.TransportReplicationActionTests;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.tasks.TaskResult;
import org.elasticsearch.tasks.TaskResultsService;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.tasks.MockTaskManager;
import org.elasticsearch.test.tasks.MockTaskManagerListener;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.transport.ReceiveTimeoutTransportException;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.Function;
import static java.util.Collections.singleton;
import static org.elasticsearch.common.unit.TimeValue.timeValueMillis;
import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.emptyCollectionOf;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
/**
* Integration tests for task management API
* <p>
* We need at least 2 nodes so we have a master node a non-master node
*/
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, minNumDataNodes = 2, transportClientRatio = 0.0)
public class TasksIT extends ESIntegTestCase {
private Map<Tuple<String, String>, RecordingTaskManagerListener> listeners = new HashMap<>();
@Override
protected boolean addMockTransportService() {
return false;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(MockTransportService.TestPlugin.class, TestTaskPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
return Settings.builder()
.put(super.nodeSettings(nodeOrdinal))
.put(MockTaskManager.USE_MOCK_TASK_MANAGER_SETTING.getKey(), true)
.build();
}
public void testTaskCounts() {
// Run only on data nodes
ListTasksResponse response = client().admin().cluster().prepareListTasks("data:true").setActions(ListTasksAction.NAME + "[n]")
.get();
assertThat(response.getTasks().size(), greaterThanOrEqualTo(cluster().numDataNodes()));
}
public void testMasterNodeOperationTasks() {
registerTaskManageListeners(ClusterHealthAction.NAME);
// First run the health on the master node - should produce only one task on the master node
internalCluster().masterClient().admin().cluster().prepareHealth().get();
assertEquals(1, numberOfEvents(ClusterHealthAction.NAME, Tuple::v1)); // counting only registration events
assertEquals(1, numberOfEvents(ClusterHealthAction.NAME, event -> event.v1() == false)); // counting only unregistration events
resetTaskManageListeners(ClusterHealthAction.NAME);
// Now run the health on a non-master node - should produce one task on master and one task on another node
internalCluster().nonMasterClient().admin().cluster().prepareHealth().get();
assertEquals(2, numberOfEvents(ClusterHealthAction.NAME, Tuple::v1)); // counting only registration events
assertEquals(2, numberOfEvents(ClusterHealthAction.NAME, event -> event.v1() == false)); // counting only unregistration events
List<TaskInfo> tasks = findEvents(ClusterHealthAction.NAME, Tuple::v1);
// Verify that one of these tasks is a parent of another task
if (tasks.get(0).getParentTaskId().isSet()) {
assertParentTask(Collections.singletonList(tasks.get(0)), tasks.get(1));
} else {
assertParentTask(Collections.singletonList(tasks.get(1)), tasks.get(0));
}
}
public void testTransportReplicationAllShardsTasks() {
registerTaskManageListeners(FieldStatsAction.NAME); // main task
registerTaskManageListeners(FieldStatsAction.NAME + "[s]"); // shard level tasks
createIndex("test");
ensureGreen("test"); // Make sure all shards are allocated
client().prepareFieldStats().setFields("field").get();
// the field stats operation should produce one main task
NumShards numberOfShards = getNumShards("test");
assertEquals(1, numberOfEvents(FieldStatsAction.NAME, Tuple::v1));
// and then one operation per shard
assertEquals(numberOfShards.numPrimaries, numberOfEvents(FieldStatsAction.NAME + "[s]", Tuple::v1));
// the shard level tasks should have the main task as a parent
assertParentTask(findEvents(FieldStatsAction.NAME + "[s]", Tuple::v1), findEvents(FieldStatsAction.NAME, Tuple::v1).get(0));
}
public void testTransportBroadcastByNodeTasks() {
registerTaskManageListeners(UpgradeAction.NAME); // main task
registerTaskManageListeners(UpgradeAction.NAME + "[n]"); // node level tasks
createIndex("test");
ensureGreen("test"); // Make sure all shards are allocated
client().admin().indices().prepareUpgrade("test").get();
// the percolate operation should produce one main task
assertEquals(1, numberOfEvents(UpgradeAction.NAME, Tuple::v1));
// and then one operation per each node where shards are located
assertEquals(internalCluster().nodesInclude("test").size(), numberOfEvents(UpgradeAction.NAME + "[n]", Tuple::v1));
// all node level tasks should have the main task as a parent
assertParentTask(findEvents(UpgradeAction.NAME + "[n]", Tuple::v1), findEvents(UpgradeAction.NAME, Tuple::v1).get(0));
}
public void testTransportReplicationSingleShardTasks() {
registerTaskManageListeners(ValidateQueryAction.NAME); // main task
registerTaskManageListeners(ValidateQueryAction.NAME + "[s]"); // shard level tasks
createIndex("test");
ensureGreen("test"); // Make sure all shards are allocated
client().admin().indices().prepareValidateQuery("test").get();
// the validate operation should produce one main task
assertEquals(1, numberOfEvents(ValidateQueryAction.NAME, Tuple::v1));
// and then one operation
assertEquals(1, numberOfEvents(ValidateQueryAction.NAME + "[s]", Tuple::v1));
// the shard level operation should have the main task as its parent
assertParentTask(findEvents(ValidateQueryAction.NAME + "[s]", Tuple::v1), findEvents(ValidateQueryAction.NAME, Tuple::v1).get(0));
}
public void testTransportBroadcastReplicationTasks() {
registerTaskManageListeners(RefreshAction.NAME); // main task
registerTaskManageListeners(RefreshAction.NAME + "[s]"); // shard level tasks
registerTaskManageListeners(RefreshAction.NAME + "[s][*]"); // primary and replica shard tasks
createIndex("test");
ensureGreen("test"); // Make sure all shards are allocated
client().admin().indices().prepareRefresh("test").get();
// the refresh operation should produce one main task
NumShards numberOfShards = getNumShards("test");
logger.debug("number of shards, total: [{}], primaries: [{}] ", numberOfShards.totalNumShards, numberOfShards.numPrimaries);
logger.debug("main events {}", numberOfEvents(RefreshAction.NAME, Tuple::v1));
logger.debug("main event node {}", findEvents(RefreshAction.NAME, Tuple::v1).get(0).getTaskId().getNodeId());
logger.debug("[s] events {}", numberOfEvents(RefreshAction.NAME + "[s]", Tuple::v1));
logger.debug("[s][*] events {}", numberOfEvents(RefreshAction.NAME + "[s][*]", Tuple::v1));
logger.debug("nodes with the index {}", internalCluster().nodesInclude("test"));
assertEquals(1, numberOfEvents(RefreshAction.NAME, Tuple::v1));
// Because it's broadcast replication action we will have as many [s] level requests
// as we have primary shards on the coordinating node plus we will have one task per primary outside of the
// coordinating node due to replication.
// If all primaries are on the coordinating node, the number of tasks should be equal to the number of primaries
// If all primaries are not on the coordinating node, the number of tasks should be equal to the number of primaries times 2
assertThat(numberOfEvents(RefreshAction.NAME + "[s]", Tuple::v1), greaterThanOrEqualTo(numberOfShards.numPrimaries));
assertThat(numberOfEvents(RefreshAction.NAME + "[s]", Tuple::v1), lessThanOrEqualTo(numberOfShards.numPrimaries * 2));
// Verify that all [s] events have the proper parent
// This is complicated because if the shard task runs on the same node it has main task as a parent
// but if it runs on non-coordinating node it would have another intermediate [s] task on the coordinating node as a parent
TaskInfo mainTask = findEvents(RefreshAction.NAME, Tuple::v1).get(0);
List<TaskInfo> sTasks = findEvents(RefreshAction.NAME + "[s]", Tuple::v1);
for (TaskInfo taskInfo : sTasks) {
if (mainTask.getTaskId().getNodeId().equals(taskInfo.getTaskId().getNodeId())) {
// This shard level task runs on the same node as a parent task - it should have the main task as a direct parent
assertParentTask(Collections.singletonList(taskInfo), mainTask);
} else {
String description = taskInfo.getDescription();
// This shard level task runs on another node - it should have a corresponding shard level task on the node where main task
// is running
List<TaskInfo> sTasksOnRequestingNode = findEvents(RefreshAction.NAME + "[s]",
event -> event.v1() && mainTask.getTaskId().getNodeId().equals(event.v2().getTaskId().getNodeId())
&& description.equals(event.v2().getDescription()));
// There should be only one parent task
assertEquals(1, sTasksOnRequestingNode.size());
assertParentTask(Collections.singletonList(taskInfo), sTasksOnRequestingNode.get(0));
}
}
// we will have as many [s][p] and [s][r] tasks as we have primary and replica shards
assertEquals(numberOfShards.totalNumShards, numberOfEvents(RefreshAction.NAME + "[s][*]", Tuple::v1));
// we the [s][p] and [s][r] tasks should have a corresponding [s] task on the same node as a parent
List<TaskInfo> spEvents = findEvents(RefreshAction.NAME + "[s][*]", Tuple::v1);
for (TaskInfo taskInfo : spEvents) {
List<TaskInfo> sTask;
if (taskInfo.getAction().endsWith("[s][p]")) {
// A [s][p] level task should have a corresponding [s] level task on the same node
sTask = findEvents(RefreshAction.NAME + "[s]",
event -> event.v1() && taskInfo.getTaskId().getNodeId().equals(event.v2().getTaskId().getNodeId())
&& taskInfo.getDescription().equals(event.v2().getDescription()));
} else {
// A [s][r] level task should have a corresponding [s] level task on the a different node (where primary is located)
sTask = findEvents(RefreshAction.NAME + "[s]",
event -> event.v1() && taskInfo.getParentTaskId().getNodeId().equals(event.v2().getTaskId().getNodeId()) && taskInfo
.getDescription()
.equals(event.v2().getDescription()));
}
// There should be only one parent task
assertEquals(1, sTask.size());
assertParentTask(Collections.singletonList(taskInfo), sTask.get(0));
}
}
public void testTransportBulkTasks() {
registerTaskManageListeners(BulkAction.NAME); // main task
registerTaskManageListeners(BulkAction.NAME + "[s]"); // shard task
registerTaskManageListeners(BulkAction.NAME + "[s][p]"); // shard task on primary
registerTaskManageListeners(BulkAction.NAME + "[s][r]"); // shard task on replica
createIndex("test");
ensureGreen("test"); // Make sure all shards are allocated to catch replication tasks
client().prepareBulk().add(client().prepareIndex("test", "doc", "test_id").setSource("{\"foo\": \"bar\"}")).get();
// the bulk operation should produce one main task
assertEquals(1, numberOfEvents(BulkAction.NAME, Tuple::v1));
// we should also get 1 or 2 [s] operation with main operation as a parent
// in case the primary is located on the coordinating node we will have 1 operation, otherwise - 2
List<TaskInfo> shardTasks = findEvents(BulkAction.NAME + "[s]", Tuple::v1);
assertThat(shardTasks.size(), allOf(lessThanOrEqualTo(2), greaterThanOrEqualTo(1)));
// Select the effective shard task
TaskInfo shardTask;
if (shardTasks.size() == 1) {
// we have only one task - it's going to be the parent task for all [s][p] and [s][r] tasks
shardTask = shardTasks.get(0);
// and it should have the main task as a parent
assertParentTask(shardTask, findEvents(BulkAction.NAME, Tuple::v1).get(0));
} else {
if (shardTasks.get(0).getParentTaskId().equals(shardTasks.get(1).getTaskId())) {
// task 1 is the parent of task 0, that means that task 0 will control [s][p] and [s][r] tasks
shardTask = shardTasks.get(0);
// in turn the parent of the task 1 should be the main task
assertParentTask(shardTasks.get(1), findEvents(BulkAction.NAME, Tuple::v1).get(0));
} else {
// otherwise task 1 will control [s][p] and [s][r] tasks
shardTask = shardTasks.get(1);
// in turn the parent of the task 0 should be the main task
assertParentTask(shardTasks.get(0), findEvents(BulkAction.NAME, Tuple::v1).get(0));
}
}
// we should also get one [s][p] operation with shard operation as a parent
assertEquals(1, numberOfEvents(BulkAction.NAME + "[s][p]", Tuple::v1));
assertParentTask(findEvents(BulkAction.NAME + "[s][p]", Tuple::v1), shardTask);
// we should get as many [s][r] operations as we have replica shards
// they all should have the same shard task as a parent
assertEquals(getNumShards("test").numReplicas, numberOfEvents(BulkAction.NAME + "[s][r]", Tuple::v1));
assertParentTask(findEvents(BulkAction.NAME + "[s][r]", Tuple::v1), shardTask);
}
/**
* Very basic "is it plugged in" style test that indexes a document and makes sure that you can fetch the status of the process. The
* goal here is to verify that the large moving parts that make fetching task status work fit together rather than to verify any
* particular status results from indexing. For that, look at {@link TransportReplicationActionTests}. We intentionally don't use the
* task recording mechanism used in other places in this test so we can make sure that the status fetching works properly over the wire.
*/
public void testCanFetchIndexStatus() throws InterruptedException, ExecutionException, IOException {
/* We make sure all indexing tasks wait to start before this lock is *unlocked* so we can fetch their status with both the get and
* list APIs. */
CountDownLatch taskRegistered = new CountDownLatch(1);
CountDownLatch letTaskFinish = new CountDownLatch(1);
ListenableActionFuture<IndexResponse> indexFuture = null;
try {
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
((MockTaskManager) transportService.getTaskManager()).addListener(new MockTaskManagerListener() {
@Override
public void onTaskRegistered(Task task) {
if (task.getAction().startsWith(IndexAction.NAME)) {
taskRegistered.countDown();
logger.debug("Blocking [{}] starting", task);
try {
letTaskFinish.await(10, TimeUnit.SECONDS);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
@Override
public void onTaskUnregistered(Task task) {
}
@Override
public void waitForTaskCompletion(Task task) {
}
});
}
indexFuture = client().prepareIndex("test", "test").setSource("test", "test").execute();
taskRegistered.await(10, TimeUnit.SECONDS); // waiting for at least one task to be registered
ListTasksResponse listResponse = client().admin().cluster().prepareListTasks().setActions("indices:data/write/index*")
.setDetailed(true).get();
assertThat(listResponse.getTasks(), not(empty()));
for (TaskInfo task : listResponse.getTasks()) {
assertNotNull(task.getStatus());
GetTaskResponse getResponse = client().admin().cluster().prepareGetTask(task.getTaskId()).get();
assertFalse("task should still be running", getResponse.getTask().isCompleted());
TaskInfo fetchedWithGet = getResponse.getTask().getTask();
assertEquals(task.getId(), fetchedWithGet.getId());
assertEquals(task.getType(), fetchedWithGet.getType());
assertEquals(task.getAction(), fetchedWithGet.getAction());
assertEquals(task.getDescription(), fetchedWithGet.getDescription());
assertEquals(task.getStatus(), fetchedWithGet.getStatus());
assertEquals(task.getStartTime(), fetchedWithGet.getStartTime());
assertThat(fetchedWithGet.getRunningTimeNanos(), greaterThanOrEqualTo(task.getRunningTimeNanos()));
assertEquals(task.isCancellable(), fetchedWithGet.isCancellable());
assertEquals(task.getParentTaskId(), fetchedWithGet.getParentTaskId());
}
} finally {
letTaskFinish.countDown();
if (indexFuture != null) {
IndexResponse indexResponse = indexFuture.get();
assertArrayEquals(ReplicationResponse.EMPTY, indexResponse.getShardInfo().getFailures());
}
}
}
public void testTasksCancellation() throws Exception {
// Start blocking test task
// Get real client (the plugin is not registered on transport nodes)
ListenableActionFuture<TestTaskPlugin.NodesResponse> future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
.execute();
logger.info("--> started test tasks");
// Wait for the task to start on all nodes
assertBusy(() -> assertEquals(internalCluster().size(),
client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size()));
logger.info("--> cancelling the main test task");
CancelTasksResponse cancelTasksResponse = client().admin().cluster().prepareCancelTasks()
.setActions(TestTaskPlugin.TestTaskAction.NAME).get();
assertEquals(1, cancelTasksResponse.getTasks().size());
future.get();
logger.info("--> checking that test tasks are not running");
assertEquals(0,
client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "*").get().getTasks().size());
}
public void testTasksUnblocking() throws Exception {
// Start blocking test task
ListenableActionFuture<TestTaskPlugin.NodesResponse> future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
.execute();
// Wait for the task to start on all nodes
assertBusy(() -> assertEquals(internalCluster().size(),
client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get().getTasks().size()));
TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get();
future.get();
assertEquals(0, client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").get()
.getTasks().size());
}
public void testListTasksWaitForCompletion() throws Exception {
waitForCompletionTestCase(randomBoolean(), id -> {
return client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME)
.setWaitForCompletion(true).execute();
}, response -> {
assertThat(response.getNodeFailures(), empty());
assertThat(response.getTaskFailures(), empty());
});
}
public void testGetTaskWaitForCompletionWithoutStoringResult() throws Exception {
waitForCompletionTestCase(false, id -> {
return client().admin().cluster().prepareGetTask(id).setWaitForCompletion(true).execute();
}, response -> {
assertNotNull(response.getTask().getTask());
assertTrue(response.getTask().isCompleted());
// We didn't store the result so it won't come back when we wait
assertNull(response.getTask().getResponse());
});
}
public void testGetTaskWaitForCompletionWithStoringResult() throws Exception {
waitForCompletionTestCase(true, id -> {
return client().admin().cluster().prepareGetTask(id).setWaitForCompletion(true).execute();
}, response -> {
assertNotNull(response.getTask().getTask());
assertTrue(response.getTask().isCompleted());
// We stored the task so we should get its results
assertEquals(0, response.getTask().getResponseAsMap().get("failure_count"));
});
}
/**
* Test wait for completion.
* @param storeResult should the task store its results
* @param wait start waiting for a task. Accepts that id of the task to wait for and returns a future waiting for it.
* @param validator validate the response and return the task ids that were found
*/
private <T> void waitForCompletionTestCase(boolean storeResult, Function<TaskId, ListenableActionFuture<T>> wait, Consumer<T> validator)
throws Exception {
// Start blocking test task
ListenableActionFuture<TestTaskPlugin.NodesResponse> future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
.setShouldStoreResult(storeResult).execute();
ListenableActionFuture<T> waitResponseFuture;
TaskId taskId;
try {
taskId = waitForTestTaskStartOnAllNodes();
// Wait for the task to start
assertBusy(() -> client().admin().cluster().prepareGetTask(taskId).get());
// Register listeners so we can be sure the waiting started
CountDownLatch waitForWaitingToStart = new CountDownLatch(1);
for (TransportService transportService : internalCluster().getInstances(TransportService.class)) {
((MockTaskManager) transportService.getTaskManager()).addListener(new MockTaskManagerListener() {
@Override
public void waitForTaskCompletion(Task task) {
}
@Override
public void onTaskRegistered(Task task) {
}
@Override
public void onTaskUnregistered(Task task) {
waitForWaitingToStart.countDown();
}
});
}
// Spin up a request to wait for the test task to finish
waitResponseFuture = wait.apply(taskId);
// Wait for the wait to start
waitForWaitingToStart.await();
} finally {
// Unblock the request so the wait for completion request can finish
TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get();
}
// Now that the task is unblocked the list response will come back
T waitResponse = waitResponseFuture.get();
validator.accept(waitResponse);
future.get();
}
public void testListTasksWaitForTimeout() throws Exception {
waitForTimeoutTestCase(id -> {
ListTasksResponse response = client().admin().cluster().prepareListTasks()
.setActions(TestTaskPlugin.TestTaskAction.NAME).setWaitForCompletion(true).setTimeout(timeValueMillis(100))
.get();
assertThat(response.getNodeFailures(), not(empty()));
return response.getNodeFailures();
});
}
public void testGetTaskWaitForTimeout() throws Exception {
waitForTimeoutTestCase(id -> {
Exception e = expectThrows(Exception.class,
() -> client().admin().cluster().prepareGetTask(id).setWaitForCompletion(true).setTimeout(timeValueMillis(100)).get());
return singleton(e);
});
}
/**
* Test waiting for a task that times out.
* @param wait wait for the running task and return all the failures you accumulated waiting for it
*/
private void waitForTimeoutTestCase(Function<TaskId, ? extends Iterable<? extends Throwable>> wait) throws Exception {
// Start blocking test task
ListenableActionFuture<TestTaskPlugin.NodesResponse> future = TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
.execute();
try {
TaskId taskId = waitForTestTaskStartOnAllNodes();
// Wait for the task to start
assertBusy(() -> client().admin().cluster().prepareGetTask(taskId).get());
// Spin up a request that should wait for those tasks to finish
// It will timeout because we haven't unblocked the tasks
Iterable<? extends Throwable> failures = wait.apply(taskId);
for (Throwable failure : failures) {
assertNotNull(
ExceptionsHelper.unwrap(failure, ElasticsearchTimeoutException.class, ReceiveTimeoutTransportException.class));
}
} finally {
// Now we can unblock those requests
TestTaskPlugin.UnblockTestTasksAction.INSTANCE.newRequestBuilder(client()).get();
}
future.get();
}
/**
* Wait for the test task to be running on all nodes and return the TaskId of the primary task.
*/
private TaskId waitForTestTaskStartOnAllNodes() throws Exception {
assertBusy(() -> {
List<TaskInfo> tasks = client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]")
.get().getTasks();
assertEquals(internalCluster().size(), tasks.size());
});
List<TaskInfo> task = client().admin().cluster().prepareListTasks().setActions(TestTaskPlugin.TestTaskAction.NAME).get().getTasks();
assertThat(task, hasSize(1));
return task.get(0).getTaskId();
}
public void testTasksListWaitForNoTask() throws Exception {
// Spin up a request to wait for no matching tasks
ListenableActionFuture<ListTasksResponse> waitResponseFuture = client().admin().cluster().prepareListTasks()
.setActions(TestTaskPlugin.TestTaskAction.NAME + "[n]").setWaitForCompletion(true).setTimeout(timeValueMillis(10))
.execute();
// It should finish quickly and without complaint
assertThat(waitResponseFuture.get().getTasks(), empty());
}
public void testTasksGetWaitForNoTask() throws Exception {
// Spin up a request to wait for no matching tasks
ListenableActionFuture<GetTaskResponse> waitResponseFuture = client().admin().cluster().prepareGetTask("notfound:1")
.setWaitForCompletion(true).setTimeout(timeValueMillis(10))
.execute();
// It should finish quickly and without complaint
expectNotFound(() -> waitResponseFuture.get());
}
public void testTasksWaitForAllTask() throws Exception {
// Spin up a request to wait for all tasks in the cluster to make sure it doesn't cause an infinite loop
ListTasksResponse response = client().admin().cluster().prepareListTasks().setWaitForCompletion(true)
.setTimeout(timeValueSeconds(10)).get();
// It should finish quickly and without complaint and list the list tasks themselves
assertThat(response.getNodeFailures(), emptyCollectionOf(FailedNodeException.class));
assertThat(response.getTaskFailures(), emptyCollectionOf(TaskOperationFailure.class));
assertThat(response.getTasks().size(), greaterThanOrEqualTo(1));
}
public void testTaskStoringSuccesfulResult() throws Exception {
// Randomly create an empty index to make sure the type is created automatically
if (randomBoolean()) {
logger.info("creating an empty results index with custom settings");
assertAcked(client().admin().indices().prepareCreate(TaskResultsService.TASK_INDEX));
}
registerTaskManageListeners(TestTaskPlugin.TestTaskAction.NAME); // we need this to get task id of the process
// Start non-blocking test task
TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client()).setShouldStoreResult(true).setShouldBlock(false).get();
List<TaskInfo> events = findEvents(TestTaskPlugin.TestTaskAction.NAME, Tuple::v1);
assertEquals(1, events.size());
TaskInfo taskInfo = events.get(0);
TaskId taskId = taskInfo.getTaskId();
GetResponse resultDoc = client()
.prepareGet(TaskResultsService.TASK_INDEX, TaskResultsService.TASK_TYPE, taskId.toString()).get();
assertTrue(resultDoc.isExists());
Map<String, Object> source = resultDoc.getSource();
@SuppressWarnings("unchecked")
Map<String, Object> task = (Map<String, Object>) source.get("task");
assertEquals(taskInfo.getTaskId().getNodeId(), task.get("node"));
assertEquals(taskInfo.getAction(), task.get("action"));
assertEquals(Long.toString(taskInfo.getId()), task.get("id").toString());
@SuppressWarnings("unchecked")
Map<String, Object> result = (Map<String, Object>) source.get("response");
assertEquals("0", result.get("failure_count").toString());
assertNull(source.get("failure"));
assertNoFailures(client().admin().indices().prepareRefresh(TaskResultsService.TASK_INDEX).get());
SearchResponse searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
.setTypes(TaskResultsService.TASK_TYPE)
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.getAction())))
.get();
assertEquals(1L, searchResponse.getHits().totalHits());
searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX).setTypes(TaskResultsService.TASK_TYPE)
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.getTaskId().getNodeId())))
.get();
assertEquals(1L, searchResponse.getHits().totalHits());
GetTaskResponse getResponse = expectFinishedTask(taskId);
assertEquals(result, getResponse.getTask().getResponseAsMap());
assertNull(getResponse.getTask().getError());
}
public void testTaskStoringFailureResult() throws Exception {
registerTaskManageListeners(TestTaskPlugin.TestTaskAction.NAME); // we need this to get task id of the process
// Start non-blocking test task that should fail
assertThrows(
TestTaskPlugin.TestTaskAction.INSTANCE.newRequestBuilder(client())
.setShouldFail(true)
.setShouldStoreResult(true)
.setShouldBlock(false),
IllegalStateException.class
);
List<TaskInfo> events = findEvents(TestTaskPlugin.TestTaskAction.NAME, Tuple::v1);
assertEquals(1, events.size());
TaskInfo failedTaskInfo = events.get(0);
TaskId failedTaskId = failedTaskInfo.getTaskId();
GetResponse failedResultDoc = client()
.prepareGet(TaskResultsService.TASK_INDEX, TaskResultsService.TASK_TYPE, failedTaskId.toString())
.get();
assertTrue(failedResultDoc.isExists());
Map<String, Object> source = failedResultDoc.getSource();
@SuppressWarnings("unchecked")
Map<String, Object> task = (Map<String, Object>) source.get("task");
assertEquals(failedTaskInfo.getTaskId().getNodeId(), task.get("node"));
assertEquals(failedTaskInfo.getAction(), task.get("action"));
assertEquals(Long.toString(failedTaskInfo.getId()), task.get("id").toString());
@SuppressWarnings("unchecked")
Map<String, Object> error = (Map<String, Object>) source.get("error");
assertEquals("Simulating operation failure", error.get("reason"));
assertEquals("illegal_state_exception", error.get("type"));
assertNull(source.get("result"));
GetTaskResponse getResponse = expectFinishedTask(failedTaskId);
assertNull(getResponse.getTask().getResponse());
assertEquals(error, getResponse.getTask().getErrorAsMap());
}
public void testGetTaskNotFound() throws Exception {
// Node isn't found, tasks index doesn't even exist
expectNotFound(() -> client().admin().cluster().prepareGetTask("not_a_node:1").get());
// Node exists but the task still isn't found
expectNotFound(() -> client().admin().cluster().prepareGetTask(new TaskId(internalCluster().getNodeNames()[0], 1)).get());
}
public void testNodeNotFoundButTaskFound() throws Exception {
// Save a fake task that looks like it is from a node that isn't part of the cluster
CyclicBarrier b = new CyclicBarrier(2);
TaskResultsService resultsService = internalCluster().getInstance(TaskResultsService.class);
resultsService.storeResult(
new TaskResult(new TaskInfo(new TaskId("fake", 1), "test", "test", "", null, 0, 0, false, TaskId.EMPTY_TASK_ID),
new RuntimeException("test")),
new ActionListener<Void>() {
@Override
public void onResponse(Void response) {
try {
b.await();
} catch (InterruptedException | BrokenBarrierException e) {
onFailure(e);
}
}
@Override
public void onFailure(Exception e) {
throw new RuntimeException(e);
}
});
b.await();
// Now we can find it!
GetTaskResponse response = expectFinishedTask(new TaskId("fake:1"));
assertEquals("test", response.getTask().getTask().getAction());
assertNotNull(response.getTask().getError());
assertNull(response.getTask().getResponse());
}
@Override
public void tearDown() throws Exception {
for (Map.Entry<Tuple<String, String>, RecordingTaskManagerListener> entry : listeners.entrySet()) {
((MockTaskManager) internalCluster().getInstance(TransportService.class, entry.getKey().v1()).getTaskManager())
.removeListener(entry.getValue());
}
listeners.clear();
super.tearDown();
}
/**
* Registers recording task event listeners with the given action mask on all nodes
*/
private void registerTaskManageListeners(String actionMasks) {
for (String nodeName : internalCluster().getNodeNames()) {
DiscoveryNode node = internalCluster().getInstance(ClusterService.class, nodeName).localNode();
RecordingTaskManagerListener listener = new RecordingTaskManagerListener(node, actionMasks.split(","));
((MockTaskManager) internalCluster().getInstance(TransportService.class, nodeName).getTaskManager()).addListener(listener);
RecordingTaskManagerListener oldListener = listeners.put(new Tuple<>(node.getName(), actionMasks), listener);
assertNull(oldListener);
}
}
/**
* Resets all recording task event listeners with the given action mask on all nodes
*/
private void resetTaskManageListeners(String actionMasks) {
for (Map.Entry<Tuple<String, String>, RecordingTaskManagerListener> entry : listeners.entrySet()) {
if (actionMasks == null || entry.getKey().v2().equals(actionMasks)) {
entry.getValue().reset();
}
}
}
/**
* Returns the number of events that satisfy the criteria across all nodes
*
* @param actionMasks action masks to match
* @return number of events that satisfy the criteria
*/
private int numberOfEvents(String actionMasks, Function<Tuple<Boolean, TaskInfo>, Boolean> criteria) {
return findEvents(actionMasks, criteria).size();
}
/**
* Returns all events that satisfy the criteria across all nodes
*
* @param actionMasks action masks to match
* @return number of events that satisfy the criteria
*/
private List<TaskInfo> findEvents(String actionMasks, Function<Tuple<Boolean, TaskInfo>, Boolean> criteria) {
List<TaskInfo> events = new ArrayList<>();
for (Map.Entry<Tuple<String, String>, RecordingTaskManagerListener> entry : listeners.entrySet()) {
if (actionMasks == null || entry.getKey().v2().equals(actionMasks)) {
for (Tuple<Boolean, TaskInfo> taskEvent : entry.getValue().getEvents()) {
if (criteria.apply(taskEvent)) {
events.add(taskEvent.v2());
}
}
}
}
return events;
}
/**
* Asserts that all tasks in the tasks list have the same parentTask
*/
private void assertParentTask(List<TaskInfo> tasks, TaskInfo parentTask) {
for (TaskInfo task : tasks) {
assertParentTask(task, parentTask);
}
}
private void assertParentTask(TaskInfo task, TaskInfo parentTask) {
assertTrue(task.getParentTaskId().isSet());
assertEquals(parentTask.getTaskId().getNodeId(), task.getParentTaskId().getNodeId());
assertTrue(Strings.hasLength(task.getParentTaskId().getNodeId()));
assertEquals(parentTask.getId(), task.getParentTaskId().getId());
}
private ResourceNotFoundException expectNotFound(ThrowingRunnable r) {
Exception e = expectThrows(Exception.class, r);
ResourceNotFoundException notFound = (ResourceNotFoundException) ExceptionsHelper.unwrap(e, ResourceNotFoundException.class);
if (notFound == null) throw new RuntimeException("Expected ResourceNotFoundException", e);
return notFound;
}
/**
* Fetch the task status from the list tasks API using it's "fallback to get from the task index" behavior. Asserts some obvious stuff
* about the fetched task and returns a map of it's status.
*/
private GetTaskResponse expectFinishedTask(TaskId taskId) throws IOException {
GetTaskResponse response = client().admin().cluster().prepareGetTask(taskId).get();
assertTrue("the task should have been completed before fetching", response.getTask().isCompleted());
TaskInfo info = response.getTask().getTask();
assertEquals(taskId, info.getTaskId());
assertNull(info.getStatus()); // The test task doesn't have any status
return response;
}
}
| |
/*
* Copyright 2006-2017 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.numeric;
import com.intellij.codeInspection.*;
import com.intellij.codeInspection.ui.SingleCheckboxOptionsPanel;
import com.intellij.java.analysis.JavaAnalysisBundle;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.psi.*;
import com.intellij.util.ObjectUtils;
import com.intellij.util.SmartList;
import com.siyeh.InspectionGadgetsBundle;
import com.siyeh.ig.BaseInspectionVisitor;
import com.siyeh.ig.InspectionGadgetsFix;
import com.siyeh.ig.PsiReplacementUtil;
import com.siyeh.ig.psiutils.MethodCallUtils;
import com.siyeh.ig.psiutils.ParenthesesUtils;
import com.siyeh.ig.psiutils.TypeUtils;
import org.jdom.Element;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.List;
public final class UnaryPlusInspection extends LocalInspectionTool {
public boolean onlyReportInsideBinaryExpression = true;
@NotNull
@Override
public JComponent createOptionsPanel() {
return new SingleCheckboxOptionsPanel(JavaAnalysisBundle.message("inspection.unary.plus.unary.binary.option"), this,
"onlyReportInsideBinaryExpression");
}
@Override
public void writeSettings(@NotNull Element node) throws WriteExternalException {
if (!onlyReportInsideBinaryExpression) {
node.addContent(new Element("option").setAttribute("name", "onlyReportInsideBinaryExpression").setAttribute("value", "false"));
}
}
private static class UnaryPlusFix extends InspectionGadgetsFix {
@Nls
@NotNull
@Override
public String getFamilyName() {
return InspectionGadgetsBundle.message("unary.plus.quickfix");
}
@Override
protected void doFix(Project project, ProblemDescriptor descriptor) {
final PsiElement element = descriptor.getPsiElement();
final PsiElement parent = element.getParent();
if (!(parent instanceof PsiPrefixExpression)) {
return;
}
final PsiPrefixExpression prefixExpression = (PsiPrefixExpression)parent;
final PsiExpression operand = prefixExpression.getOperand();
if (operand == null) {
return;
}
prefixExpression.replace(operand);
}
}
private static class UnaryIncrementFix extends InspectionGadgetsFix {
private final String myRefName;
private UnaryIncrementFix(@NotNull String refName) {
myRefName = refName;
}
@Override
public @NotNull String getName() {
return InspectionGadgetsBundle.message("unary.increment.quickfix", myRefName);
}
@Override
public @NotNull String getFamilyName() {
return InspectionGadgetsBundle.message("unary.increment.quickfix.family.name");
}
@Override
protected void doFix(Project project, ProblemDescriptor descriptor) {
final PsiPrefixExpression prefixExpr = ObjectUtils.tryCast(descriptor.getPsiElement().getParent(), PsiPrefixExpression.class);
if (prefixExpr == null) {
return;
}
final PsiExpression operand = prefixExpr.getOperand();
final PsiExpression oldExpr;
final PsiReferenceExpression refExpr;
if (operand instanceof PsiReferenceExpression) {
oldExpr = (PsiExpression)prefixExpr.getParent();
refExpr = (PsiReferenceExpression)operand;
}
else if (operand instanceof PsiPrefixExpression) {
oldExpr = prefixExpr;
refExpr = (PsiReferenceExpression)((PsiPrefixExpression)operand).getOperand();
}
else {
return;
}
if (refExpr == null || oldExpr == null) {
return;
}
final String refName = refExpr.getReferenceName();
if (refName == null) {
return;
}
PsiReplacementUtil.replaceExpression(oldExpr, "++" + refName);
}
}
@Override
public @NotNull PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) {
return new UnaryPlusVisitor(holder, isOnTheFly, onlyReportInsideBinaryExpression);
}
private static class UnaryPlusVisitor extends BaseInspectionVisitor {
private final ProblemsHolder myHolder;
private final boolean myOnTheFly;
private final boolean myOnlyReportInsideBinaryExpression;
private UnaryPlusVisitor(@NotNull ProblemsHolder holder, boolean onTheFly, boolean onlyReportInsideBinaryExpression) {
myHolder = holder;
myOnTheFly = onTheFly;
myOnlyReportInsideBinaryExpression = onlyReportInsideBinaryExpression;
}
@Override
public void visitPrefixExpression(PsiPrefixExpression prefixExpression) {
super.visitPrefixExpression(prefixExpression);
if (!unaryPlusPrefixExpression(prefixExpression)) {
return;
}
final PsiExpression operand = prefixExpression.getOperand();
if (operand == null) {
return;
}
final PsiType type = operand.getType();
if (type == null) {
return;
}
if (myOnlyReportInsideBinaryExpression) {
final PsiElement parent = ParenthesesUtils.getParentSkipParentheses(prefixExpression);
if (!(operand instanceof PsiParenthesizedExpression ||
operand instanceof PsiPrefixExpression ||
parent instanceof PsiPolyadicExpression ||
parent instanceof PsiPrefixExpression ||
parent instanceof PsiAssignmentExpression ||
parent instanceof PsiVariable)) {
return;
}
}
else if (TypeUtils.unaryNumericPromotion(type) != type &&
MethodCallUtils.isNecessaryForSurroundingMethodCall(prefixExpression, operand)) {
// unary plus might have been used as cast to int
return;
}
List<LocalQuickFix> fixes = new SmartList<>(new UnaryPlusFix());
if (myOnTheFly) {
if (operand instanceof PsiReferenceExpression) {
final PsiPrefixExpression parentPrefixExpr = ObjectUtils.tryCast(prefixExpression.getParent(), PsiPrefixExpression.class);
if (unaryPlusPrefixExpression(parentPrefixExpr)) {
addUnaryIncrementFix(fixes, (PsiReferenceExpression)operand);
}
}
else if (operand instanceof PsiPrefixExpression && unaryPlusPrefixExpression((PsiPrefixExpression)operand)) {
final PsiExpression operandExpr = ((PsiPrefixExpression)operand).getOperand();
final PsiReferenceExpression operandRefExpr = ObjectUtils.tryCast(operandExpr, PsiReferenceExpression.class);
if (operandRefExpr != null) {
addUnaryIncrementFix(fixes, operandRefExpr);
}
}
}
myHolder.registerProblem(prefixExpression.getOperationSign(), InspectionGadgetsBundle.message("unary.plus.problem.descriptor"),
ProblemHighlightType.LIKE_UNUSED_SYMBOL, fixes.toArray(LocalQuickFix[]::new));
}
private static boolean unaryPlusPrefixExpression(@Nullable PsiPrefixExpression prefixExpr) {
return prefixExpr != null && prefixExpr.getOperationTokenType().equals(JavaTokenType.PLUS);
}
private static void addUnaryIncrementFix(@NotNull List<LocalQuickFix> fixes, @NotNull PsiReferenceExpression refExpr) {
final String refName = refExpr.getReferenceName();
if (refName == null) {
return;
}
final PsiPrefixExpression topPrefixExpr = ObjectUtils.tryCast(refExpr.getParent().getParent(), PsiPrefixExpression.class);
if (topPrefixExpr == null) {
return;
}
final PsiType refExprType = refExpr.getType();
if (TypeUtils.unaryNumericPromotion(refExprType) != refExprType &&
MethodCallUtils.isNecessaryForSurroundingMethodCall(topPrefixExpr, refExpr)) {
return;
}
final PsiVariable resolved = ObjectUtils.tryCast(refExpr.resolve(), PsiVariable.class);
if (resolved == null || resolved.hasModifierProperty(PsiModifier.FINAL)) {
return;
}
fixes.add(new UnaryIncrementFix(refName));
}
}
}
| |
/*
* $Id: ELSubmitTag.java 479635 2006-11-27 14:27:18Z pbenedict $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.strutsel.taglib.html;
import org.apache.struts.taglib.html.SubmitTag;
import org.apache.strutsel.taglib.utils.EvalHelper;
import javax.servlet.jsp.JspException;
/**
* Tag for input fields of type "submit". <p> This class is a subclass of the
* class <code>org.apache.struts.taglib.html.SubmitTag</code> which provides
* most of the described functionality. This subclass allows all attribute
* values to be specified as expressions utilizing the JavaServer Pages
* Standard Library expression language.
*
* @version $Rev: 479635 $
*/
public class ELSubmitTag extends SubmitTag {
/**
* Instance variable mapped to "accessKey" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String accessKeyExpr;
/**
* Instance variable mapped to "alt" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String altExpr;
/**
* Instance variable mapped to "altKey" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String altKeyExpr;
/**
* Instance variable mapped to "bundle" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String bundleExpr;
/**
* Instance variable mapped to "dir" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String dirExpr;
/**
* Instance variable mapped to "disabled" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String disabledExpr;
/**
* Instance variable mapped to "indexed" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String indexedExpr;
/**
* Instance variable mapped to "lang" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String langExpr;
/**
* Instance variable mapped to "onblur" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onblurExpr;
/**
* Instance variable mapped to "onchange" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onchangeExpr;
/**
* Instance variable mapped to "onclick" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onclickExpr;
/**
* Instance variable mapped to "ondblclick" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String ondblclickExpr;
/**
* Instance variable mapped to "onfocus" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onfocusExpr;
/**
* Instance variable mapped to "onkeydown" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onkeydownExpr;
/**
* Instance variable mapped to "onkeypress" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onkeypressExpr;
/**
* Instance variable mapped to "onkeyup" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onkeyupExpr;
/**
* Instance variable mapped to "onmousedown" tag attribute. (Mapping set
* in associated BeanInfo class.)
*/
private String onmousedownExpr;
/**
* Instance variable mapped to "onmousemove" tag attribute. (Mapping set
* in associated BeanInfo class.)
*/
private String onmousemoveExpr;
/**
* Instance variable mapped to "onmouseout" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onmouseoutExpr;
/**
* Instance variable mapped to "onmouseover" tag attribute. (Mapping set
* in associated BeanInfo class.)
*/
private String onmouseoverExpr;
/**
* Instance variable mapped to "onmouseup" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String onmouseupExpr;
/**
* Instance variable mapped to "property" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String propertyExpr;
/**
* Instance variable mapped to "style" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String styleExpr;
/**
* Instance variable mapped to "styleClass" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String styleClassExpr;
/**
* Instance variable mapped to "styleId" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String styleIdExpr;
/**
* Instance variable mapped to "tabindex" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String tabindexExpr;
/**
* Instance variable mapped to "title" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String titleExpr;
/**
* Instance variable mapped to "titleKey" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String titleKeyExpr;
/**
* Instance variable mapped to "value" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
private String valueExpr;
/**
* Getter method for "accessKey" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getAccesskeyExpr() {
return (accessKeyExpr);
}
/**
* Getter method for "alt" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getAltExpr() {
return (altExpr);
}
/**
* Getter method for "altKey" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getAltKeyExpr() {
return (altKeyExpr);
}
/**
* Getter method for "bundle" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getBundleExpr() {
return (bundleExpr);
}
/**
* Getter method for "dir" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getDirExpr() {
return (dirExpr);
}
/**
* Getter method for "disabled" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getDisabledExpr() {
return (disabledExpr);
}
/**
* Getter method for "indexed" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getIndexedExpr() {
return (indexedExpr);
}
/**
* Getter method for "lang" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getLangExpr() {
return (langExpr);
}
/**
* Getter method for "onblur" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getOnblurExpr() {
return (onblurExpr);
}
/**
* Getter method for "onchange" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getOnchangeExpr() {
return (onchangeExpr);
}
/**
* Getter method for "onclick" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getOnclickExpr() {
return (onclickExpr);
}
/**
* Getter method for "ondblclick" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public String getOndblclickExpr() {
return (ondblclickExpr);
}
/**
* Getter method for "onfocus" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getOnfocusExpr() {
return (onfocusExpr);
}
/**
* Getter method for "onkeydown" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getOnkeydownExpr() {
return (onkeydownExpr);
}
/**
* Getter method for "onkeypress" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public String getOnkeypressExpr() {
return (onkeypressExpr);
}
/**
* Getter method for "onkeyup" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getOnkeyupExpr() {
return (onkeyupExpr);
}
/**
* Getter method for "onmousedown" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public String getOnmousedownExpr() {
return (onmousedownExpr);
}
/**
* Getter method for "onmousemove" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public String getOnmousemoveExpr() {
return (onmousemoveExpr);
}
/**
* Getter method for "onmouseout" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public String getOnmouseoutExpr() {
return (onmouseoutExpr);
}
/**
* Getter method for "onmouseover" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public String getOnmouseoverExpr() {
return (onmouseoverExpr);
}
/**
* Getter method for "onmouseup" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getOnmouseupExpr() {
return (onmouseupExpr);
}
/**
* Getter method for "property" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getPropertyExpr() {
return (propertyExpr);
}
/**
* Getter method for "style" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getStyleExpr() {
return (styleExpr);
}
/**
* Getter method for "styleClass" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public String getStyleClassExpr() {
return (styleClassExpr);
}
/**
* Getter method for "styleId" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getStyleIdExpr() {
return (styleIdExpr);
}
/**
* Getter method for "tabindex" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getTabindexExpr() {
return (tabindexExpr);
}
/**
* Getter method for "title" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getTitleExpr() {
return (titleExpr);
}
/**
* Getter method for "titleKey" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getTitleKeyExpr() {
return (titleKeyExpr);
}
/**
* Getter method for "value" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public String getValueExpr() {
return (valueExpr);
}
/**
* Setter method for "accessKey" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setAccesskeyExpr(String accessKeyExpr) {
this.accessKeyExpr = accessKeyExpr;
}
/**
* Setter method for "alt" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setAltExpr(String altExpr) {
this.altExpr = altExpr;
}
/**
* Setter method for "altKey" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setAltKeyExpr(String altKeyExpr) {
this.altKeyExpr = altKeyExpr;
}
/**
* Setter method for "bundle" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setBundleExpr(String bundleExpr) {
this.bundleExpr = bundleExpr;
}
/**
* Setter method for "dir" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setDirExpr(String dirExpr) {
this.dirExpr = dirExpr;
}
/**
* Setter method for "disabled" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setDisabledExpr(String disabledExpr) {
this.disabledExpr = disabledExpr;
}
/**
* Setter method for "indexed" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setIndexedExpr(String indexedExpr) {
this.indexedExpr = indexedExpr;
}
/**
* Setter method for "lang" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setLangExpr(String langExpr) {
this.langExpr = langExpr;
}
/**
* Setter method for "onblur" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setOnblurExpr(String onblurExpr) {
this.onblurExpr = onblurExpr;
}
/**
* Setter method for "onchange" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setOnchangeExpr(String onchangeExpr) {
this.onchangeExpr = onchangeExpr;
}
/**
* Setter method for "onclick" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setOnclickExpr(String onclickExpr) {
this.onclickExpr = onclickExpr;
}
/**
* Setter method for "ondblclick" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public void setOndblclickExpr(String ondblclickExpr) {
this.ondblclickExpr = ondblclickExpr;
}
/**
* Setter method for "onfocus" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setOnfocusExpr(String onfocusExpr) {
this.onfocusExpr = onfocusExpr;
}
/**
* Setter method for "onkeydown" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setOnkeydownExpr(String onkeydownExpr) {
this.onkeydownExpr = onkeydownExpr;
}
/**
* Setter method for "onkeypress" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public void setOnkeypressExpr(String onkeypressExpr) {
this.onkeypressExpr = onkeypressExpr;
}
/**
* Setter method for "onkeyup" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setOnkeyupExpr(String onkeyupExpr) {
this.onkeyupExpr = onkeyupExpr;
}
/**
* Setter method for "onmousedown" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public void setOnmousedownExpr(String onmousedownExpr) {
this.onmousedownExpr = onmousedownExpr;
}
/**
* Setter method for "onmousemove" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public void setOnmousemoveExpr(String onmousemoveExpr) {
this.onmousemoveExpr = onmousemoveExpr;
}
/**
* Setter method for "onmouseout" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public void setOnmouseoutExpr(String onmouseoutExpr) {
this.onmouseoutExpr = onmouseoutExpr;
}
/**
* Setter method for "onmouseover" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public void setOnmouseoverExpr(String onmouseoverExpr) {
this.onmouseoverExpr = onmouseoverExpr;
}
/**
* Setter method for "onmouseup" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setOnmouseupExpr(String onmouseupExpr) {
this.onmouseupExpr = onmouseupExpr;
}
/**
* Setter method for "property" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setPropertyExpr(String propertyExpr) {
this.propertyExpr = propertyExpr;
}
/**
* Setter method for "style" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setStyleExpr(String styleExpr) {
this.styleExpr = styleExpr;
}
/**
* Setter method for "styleClass" tag attribute. (Mapping set in
* associated BeanInfo class.)
*/
public void setStyleClassExpr(String styleClassExpr) {
this.styleClassExpr = styleClassExpr;
}
/**
* Setter method for "styleId" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setStyleIdExpr(String styleIdExpr) {
this.styleIdExpr = styleIdExpr;
}
/**
* Setter method for "tabindex" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setTabindexExpr(String tabindexExpr) {
this.tabindexExpr = tabindexExpr;
}
/**
* Setter method for "title" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setTitleExpr(String titleExpr) {
this.titleExpr = titleExpr;
}
/**
* Setter method for "titleKey" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setTitleKeyExpr(String titleKeyExpr) {
this.titleKeyExpr = titleKeyExpr;
}
/**
* Setter method for "value" tag attribute. (Mapping set in associated
* BeanInfo class.)
*/
public void setValueExpr(String valueExpr) {
this.valueExpr = valueExpr;
}
/**
* Resets attribute values for tag reuse.
*/
public void release() {
super.release();
setAccesskeyExpr(null);
setAltExpr(null);
setAltKeyExpr(null);
setBundleExpr(null);
setDirExpr(null);
setDisabledExpr(null);
setIndexedExpr(null);
setLangExpr(null);
setOnblurExpr(null);
setOnchangeExpr(null);
setOnclickExpr(null);
setOndblclickExpr(null);
setOnfocusExpr(null);
setOnkeydownExpr(null);
setOnkeypressExpr(null);
setOnkeyupExpr(null);
setOnmousedownExpr(null);
setOnmousemoveExpr(null);
setOnmouseoutExpr(null);
setOnmouseoverExpr(null);
setOnmouseupExpr(null);
setPropertyExpr(null);
setStyleExpr(null);
setStyleClassExpr(null);
setStyleIdExpr(null);
setTabindexExpr(null);
setTitleExpr(null);
setTitleKeyExpr(null);
setValueExpr(null);
}
/**
* Process the start tag.
*
* @throws JspException if a JSP exception has occurred
*/
public int doStartTag() throws JspException {
evaluateExpressions();
return (super.doStartTag());
}
/**
* Processes all attribute values which use the JSTL expression evaluation
* engine to determine their values.
*
* @throws JspException if a JSP exception has occurred
*/
private void evaluateExpressions()
throws JspException {
String string = null;
Boolean bool = null;
if ((string =
EvalHelper.evalString("accessKey", getAccesskeyExpr(), this,
pageContext)) != null) {
setAccesskey(string);
}
if ((string =
EvalHelper.evalString("alt", getAltExpr(), this, pageContext)) != null) {
setAlt(string);
}
if ((string =
EvalHelper.evalString("altKey", getAltKeyExpr(), this,
pageContext)) != null) {
setAltKey(string);
}
if ((string =
EvalHelper.evalString("bundle", getBundleExpr(), this,
pageContext)) != null) {
setBundle(string);
}
if ((string =
EvalHelper.evalString("dir", getDirExpr(), this,
pageContext)) != null) {
setDir(string);
}
if ((bool =
EvalHelper.evalBoolean("disabled", getDisabledExpr(), this,
pageContext)) != null) {
setDisabled(bool.booleanValue());
}
if ((string =
EvalHelper.evalString("lang", getLangExpr(), this,
pageContext)) != null) {
setLang(string);
}
if ((bool =
EvalHelper.evalBoolean("indexed", getIndexedExpr(), this,
pageContext)) != null) {
setIndexed(bool.booleanValue());
}
if ((string =
EvalHelper.evalString("onblur", getOnblurExpr(), this,
pageContext)) != null) {
setOnblur(string);
}
if ((string =
EvalHelper.evalString("onchange", getOnchangeExpr(), this,
pageContext)) != null) {
setOnchange(string);
}
if ((string =
EvalHelper.evalString("onclick", getOnclickExpr(), this,
pageContext)) != null) {
setOnclick(string);
}
if ((string =
EvalHelper.evalString("ondblclick", getOndblclickExpr(), this,
pageContext)) != null) {
setOndblclick(string);
}
if ((string =
EvalHelper.evalString("onfocus", getOnfocusExpr(), this,
pageContext)) != null) {
setOnfocus(string);
}
if ((string =
EvalHelper.evalString("onkeydown", getOnkeydownExpr(), this,
pageContext)) != null) {
setOnkeydown(string);
}
if ((string =
EvalHelper.evalString("onkeypress", getOnkeypressExpr(), this,
pageContext)) != null) {
setOnkeypress(string);
}
if ((string =
EvalHelper.evalString("onkeyup", getOnkeyupExpr(), this,
pageContext)) != null) {
setOnkeyup(string);
}
if ((string =
EvalHelper.evalString("onmousedown", getOnmousedownExpr(),
this, pageContext)) != null) {
setOnmousedown(string);
}
if ((string =
EvalHelper.evalString("onmousemove", getOnmousemoveExpr(),
this, pageContext)) != null) {
setOnmousemove(string);
}
if ((string =
EvalHelper.evalString("onmouseout", getOnmouseoutExpr(), this,
pageContext)) != null) {
setOnmouseout(string);
}
if ((string =
EvalHelper.evalString("onmouseover", getOnmouseoverExpr(),
this, pageContext)) != null) {
setOnmouseover(string);
}
if ((string =
EvalHelper.evalString("onmouseup", getOnmouseupExpr(), this,
pageContext)) != null) {
setOnmouseup(string);
}
if ((string =
EvalHelper.evalString("property", getPropertyExpr(), this,
pageContext)) != null) {
setProperty(string);
}
if ((string =
EvalHelper.evalString("style", getStyleExpr(), this, pageContext)) != null) {
setStyle(string);
}
if ((string =
EvalHelper.evalString("styleClass", getStyleClassExpr(), this,
pageContext)) != null) {
setStyleClass(string);
}
if ((string =
EvalHelper.evalString("styleId", getStyleIdExpr(), this,
pageContext)) != null) {
setStyleId(string);
}
if ((string =
EvalHelper.evalString("tabindex", getTabindexExpr(), this,
pageContext)) != null) {
setTabindex(string);
}
if ((string =
EvalHelper.evalString("title", getTitleExpr(), this, pageContext)) != null) {
setTitle(string);
}
if ((string =
EvalHelper.evalString("titleKey", getTitleKeyExpr(), this,
pageContext)) != null) {
setTitleKey(string);
}
if ((string =
EvalHelper.evalString("value", getValueExpr(), this, pageContext)) != null) {
setValue(string);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query;
import org.apache.ignite.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.processors.cache.*;
import org.apache.ignite.internal.processors.timeout.*;
import org.apache.ignite.internal.util.future.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.internal.util.typedef.internal.*;
import org.apache.ignite.lang.*;
import org.jetbrains.annotations.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
/**
* Query future adapter.
*
* @param <R> Result type.
*
*/
public abstract class GridCacheQueryFutureAdapter<K, V, R> extends GridFutureAdapter<Collection<R>>
implements CacheQueryFuture<R>, GridTimeoutObject {
/** Logger reference. */
private static final AtomicReference<IgniteLogger> logRef = new AtomicReference<>();
/** Logger. */
protected static IgniteLogger log;
/** */
private static final Object NULL = new Object();
/** Cache context. */
protected GridCacheContext<K, V> cctx;
/** */
protected final GridCacheQueryBean qry;
/** Set of received keys used to deduplicate query result set. */
private final Collection<K> keys;
/** */
private final Queue<Collection<R>> queue = new LinkedList<>();
/** */
private final Collection<Object> allCol = new LinkedList<>();
/** */
private final AtomicInteger cnt = new AtomicInteger();
/** */
private Iterator<R> iter;
/** */
protected final Object mux = new Object();
/** */
private IgniteUuid timeoutId = IgniteUuid.randomUuid();
/** */
private long startTime;
/** */
private long endTime;
/** */
protected boolean loc;
/**
*
*/
protected GridCacheQueryFutureAdapter() {
qry = null;
keys = null;
}
/**
* @param cctx Context.
* @param qry Query.
* @param loc Local query or not.
*/
protected GridCacheQueryFutureAdapter(GridCacheContext<K, V> cctx, GridCacheQueryBean qry, boolean loc) {
this.cctx = cctx;
this.qry = qry;
this.loc = loc;
if (log == null)
log = U.logger(cctx.kernalContext(), logRef, GridCacheQueryFutureAdapter.class);
startTime = U.currentTimeMillis();
long timeout = qry.query().timeout();
if (timeout > 0) {
endTime = startTime + timeout;
// Protect against overflow.
if (endTime < 0)
endTime = Long.MAX_VALUE;
cctx.time().addTimeoutObject(this);
}
keys = qry.query().enableDedup() ? new HashSet<K>() : null;
}
/**
* @return Query.
*/
public GridCacheQueryBean query() {
return qry;
}
/**
* @return If fields query.
*/
boolean fields() {
return false;
}
/** {@inheritDoc} */
@Override public boolean onDone(Collection<R> res, Throwable err) {
cctx.time().removeTimeoutObject(this);
qry.query().onExecuted(res, err, startTime(), duration());
return super.onDone(res, err);
}
/** {@inheritDoc} */
@Override public int available() {
return cnt.get();
}
/** {@inheritDoc} */
@Override public R next() {
try {
R next = unmaskNull(internalIterator().next());
cnt.decrementAndGet();
return next;
}
catch (NoSuchElementException ignored) {
return null;
}
catch (IgniteCheckedException e) {
throw new IgniteException(e);
}
}
/**
* Returns next page for the query.
*
* @return Next page or {@code null} if no more pages available.
* @throws IgniteCheckedException If fetch failed.
*/
public Collection<R> nextPage() throws IgniteCheckedException {
return nextPage(qry.query().timeout(), startTime);
}
/**
* Returns next page for the query.
*
* @param timeout Timeout.
* @return Next page or {@code null} if no more pages available.
* @throws IgniteCheckedException If fetch failed.
*/
public Collection<R> nextPage(long timeout) throws IgniteCheckedException {
return nextPage(timeout, U.currentTimeMillis());
}
/**
* Returns next page for the query.
*
* @param timeout Timeout.
* @param startTime Timeout wait start time.
* @return Next page or {@code null} if no more pages available.
* @throws IgniteCheckedException If fetch failed.
*/
private Collection<R> nextPage(long timeout, long startTime) throws IgniteCheckedException {
Collection<R> res = null;
while (res == null) {
synchronized (mux) {
res = queue.poll();
}
if (res == null) {
if (!isDone()) {
loadPage();
long waitTime = timeout == 0 ? Long.MAX_VALUE : timeout - (U.currentTimeMillis() - startTime);
if (waitTime <= 0)
break;
synchronized (mux) {
try {
if (queue.isEmpty() && !isDone())
mux.wait(waitTime);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IgniteCheckedException("Query was interrupted: " + qry, e);
}
}
}
else
break;
}
}
checkError();
return res;
}
/**
* @throws IgniteCheckedException If future is done with an error.
*/
private void checkError() throws IgniteCheckedException {
if (error() != null) {
clear();
throw new IgniteCheckedException("Query execution failed: " + qry, error());
}
}
/**
* @return Iterator.
* @throws IgniteCheckedException In case of error.
*/
private Iterator<R> internalIterator() throws IgniteCheckedException {
checkError();
Iterator<R> it = null;
while (it == null || !it.hasNext()) {
Collection<R> c;
synchronized (mux) {
it = iter;
if (it != null && it.hasNext())
break;
c = queue.poll();
if (c != null)
it = iter = c.iterator();
if (isDone() && queue.peek() == null)
break;
}
if (c == null && !isDone()) {
loadPage();
long timeout = qry.query().timeout();
long waitTime = timeout == 0 ? Long.MAX_VALUE : timeout - (U.currentTimeMillis() - startTime);
if (waitTime <= 0) {
it = Collections.<R>emptyList().iterator();
break;
}
synchronized (mux) {
try {
if (queue.isEmpty() && !isDone())
mux.wait(waitTime);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IgniteCheckedException("Query was interrupted: " + qry, e);
}
}
}
}
checkError();
return it;
}
/**
* @param evtNodeId Removed or failed node Id.
*/
protected void onNodeLeft(UUID evtNodeId) {
// No-op.
}
/**
* @param col Collection.
*/
@SuppressWarnings({"unchecked"})
protected void enqueue(Collection<?> col) {
assert Thread.holdsLock(mux);
queue.add((Collection<R>)col);
cnt.addAndGet(col.size());
}
/**
* @param col Query data collection.
* @return If dedup flag is {@code true} deduplicated collection (considering keys),
* otherwise passed in collection without any modifications.
*/
@SuppressWarnings({"unchecked"})
private Collection<?> dedupIfRequired(Collection<?> col) {
if (!qry.query().enableDedup())
return col;
Collection<Object> dedupCol = new ArrayList<>(col.size());
synchronized (mux) {
for (Object o : col)
if (!(o instanceof Map.Entry) || keys.add(((Map.Entry<K, V>)o).getKey()))
dedupCol.add(o);
}
return dedupCol;
}
/**
* @param nodeId Sender node.
* @param data Page data.
* @param err Error (if was).
* @param finished Finished or not.
*/
@SuppressWarnings({"unchecked", "NonPrivateFieldAccessedInSynchronizedContext"})
public void onPage(@Nullable UUID nodeId, @Nullable Collection<?> data, @Nullable Throwable err, boolean finished) {
if (isCancelled())
return;
if (log.isDebugEnabled())
log.debug("Received query result page [nodeId=" + nodeId + ", data=" + data +
", err=" + err + ", finished=" + finished + "]");
try {
if (err != null)
synchronized (mux) {
enqueue(Collections.emptyList());
onPage(nodeId, true);
onDone(nodeId != null ?
new IgniteCheckedException("Failed to execute query on node [query=" + qry +
", nodeId=" + nodeId + "]", err) :
new IgniteCheckedException("Failed to execute query locally: " + qry, err));
mux.notifyAll();
}
else {
if (data == null)
data = Collections.emptyList();
data = dedupIfRequired((Collection<Object>)data);
data = cctx.unwrapPortablesIfNeeded((Collection<Object>)data, qry.query().keepPortable());
synchronized (mux) {
enqueue(data);
if (qry.query().keepAll())
allCol.addAll(maskNulls((Collection<Object>)data));
if (onPage(nodeId, finished)) {
onDone((Collection<R>)(qry.query().keepAll() ? unmaskNulls(allCol) : data));
clear();
}
mux.notifyAll();
}
}
}
catch (Error e) {
onPageError(nodeId, e);
throw e;
}
catch (Throwable e) {
onPageError(nodeId, e);
}
}
/**
* @param nodeId Sender node id.
* @param e Error.
*/
private void onPageError(@Nullable UUID nodeId, Throwable e) {
synchronized (mux) {
enqueue(Collections.emptyList());
onPage(nodeId, true);
onDone(e);
mux.notifyAll();
}
}
/**
* @param col Collection.
* @return Collection with masked {@code null} values.
*/
private Collection<Object> maskNulls(Collection<Object> col) {
assert col != null;
return F.viewReadOnly(col, new C1<Object, Object>() {
@Override public Object apply(Object e) {
return e != null ? e : NULL;
}
});
}
/**
* @param col Collection.
* @return Collection with unmasked {@code null} values.
*/
private Collection<Object> unmaskNulls(Collection<Object> col) {
assert col != null;
return F.viewReadOnly(col, new C1<Object, Object>() {
@Override public Object apply(Object e) {
return e != NULL ? e : null;
}
});
}
/**
* @param obj Object.
* @return Unmasked object.
*/
private R unmaskNull(R obj) {
return obj != NULL ? obj : null;
}
/** {@inheritDoc} */
@Override public Collection<R> get() throws IgniteCheckedException {
if (!isDone())
loadAllPages();
return super.get();
}
/** {@inheritDoc} */
@Override public Collection<R> get(long timeout, TimeUnit unit) throws IgniteCheckedException {
if (!isDone())
loadAllPages();
return super.get(timeout, unit);
}
/**
* @param nodeId Sender node id.
* @param last Whether page is last.
* @return Is query finished or not.
*/
protected abstract boolean onPage(UUID nodeId, boolean last);
/**
* Loads next page.
*/
protected abstract void loadPage();
/**
* Loads all left pages.
*
* @throws IgniteInterruptedCheckedException If thread is interrupted.
*/
protected abstract void loadAllPages() throws IgniteInterruptedCheckedException;
/**
* Clears future.
*/
void clear() {
// No-op.
}
/** {@inheritDoc} */
@Override public boolean cancel() throws IgniteCheckedException {
if (onCancelled()) {
cancelQuery();
return true;
}
else
return false;
}
/**
* @throws IgniteCheckedException In case of error.
*/
protected abstract void cancelQuery() throws IgniteCheckedException;
/** {@inheritDoc} */
@Override public IgniteUuid timeoutId() {
return timeoutId;
}
/** {@inheritDoc} */
@Override public long endTime() {
return endTime;
}
/** {@inheritDoc} */
@Override public void onTimeout() {
try {
cancelQuery();
onDone(new IgniteFutureTimeoutCheckedException("Query timed out."));
}
catch (IgniteCheckedException e) {
onDone(e);
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(GridCacheQueryFutureAdapter.class, this);
}
/** */
public void printMemoryStats() {
X.println(">>> Query future memory statistics.");
X.println(">>> queueSize: " + queue.size());
X.println(">>> allCollSize: " + allCol.size());
X.println(">>> keysSize: " + keys.size());
X.println(">>> cnt: " + cnt);
}
}
| |
package org.jgroups.protocols;
import org.jgroups.*;
import org.jgroups.annotations.MBean;
import org.jgroups.annotations.ManagedAttribute;
import org.jgroups.annotations.ManagedOperation;
import org.jgroups.annotations.Property;
import org.jgroups.conf.ConfiguratorFactory;
import org.jgroups.stack.IpAddress;
import org.jgroups.stack.Protocol;
import org.jgroups.util.*;
import org.jgroups.util.UUID;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.*;
import java.util.concurrent.ConcurrentSkipListSet;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
/**
* The Discovery protocol retrieves the initial membership (used by GMS and MERGE3) by sending discovery requests.
* We do this in subclasses of Discovery, e.g. by mcasting a discovery request ({@link PING}) or, if gossiping is enabled,
* by contacting the GossipRouter ({@link TCPGOSSIP}).<p/>
* The responses should allow us to determine the coordinator which we have to contact, e.g. in case we want to join
* the group, or to see if we have diverging views in case of MERGE2.<p/>
* When we are a server (after having received the BECOME_SERVER event), we'll respond to discovery requests with
* a discovery response.
*
* @author Bela Ban
*/
@MBean
public abstract class Discovery extends Protocol {
/* ----------------------------------------- Properties -------------------------------------------------- */
@Property(description="Return from the discovery phase as soon as we have 1 coordinator response")
protected boolean break_on_coord_rsp=true;
@Property(description="Whether or not to return the entire logical-physical address cache mappings on a " +
"discovery request, or not.")
protected boolean return_entire_cache;
@Property(description="If greater than 0, we'll wait a random number of milliseconds in range [0..stagger_timeout] " +
"before sending a discovery response. This prevents traffic spikes in large clusters when everyone sends their " +
"discovery response at the same time")
protected long stagger_timeout;
@Property(description="If a persistent disk cache (PDC) is present, combine the discovery results with the " +
"contents of the disk cache before returning the results")
protected boolean use_disk_cache;
@Property(description="Max size of the member list shipped with a discovery request. If we have more, the " +
"mbrs field in the discovery request header is nulled and members return the entire membership, " +
"not individual members")
protected int max_members_in_discovery_request=500;
@Property(description="Expiry time of discovery responses in ms")
protected long discovery_rsp_expiry_time=60000;
@Property(description="If true then the discovery is done on a separate timer thread. Should be set to true when " +
"discovery is blocking and/or takes more than a few milliseconds")
protected boolean async_discovery;
@Property(description="If enabled, use a separate thread for every discovery request. Can be used with or without " +
"async_discovery")
protected boolean async_discovery_use_separate_thread_per_request;
@Property(description="When a new node joins, and we have a static discovery protocol (TCPPING), then send the " +
"contents of the discovery cache to new and existing members if true (and we're the coord). Addresses JGRP-1903")
protected boolean send_cache_on_join=true;
@Property(description="The max rank of this member to respond to discovery requests, e.g. if " +
"max_rank_to_reply=2 in {A,B,C,D,E}, only A (rank 1) and B (rank 2) will reply. A value <= 0 means " +
"everybody will reply. This attribute is ignored if TP.use_ip_addrs is false.")
protected int max_rank_to_reply;
@Property(description="The number of times a discovery process is executed when finding initial members " +
"(https://issues.jboss.org/browse/JGRP-2317)")
protected int num_discovery_runs=1;
/* --------------------------------------------- JMX ------------------------------------------------------ */
@ManagedAttribute(description="Total number of discovery requests sent ")
protected int num_discovery_requests;
/* --------------------------------------------- Fields ------------------------------------------------------ */
protected volatile boolean is_server;
protected volatile boolean is_leaving;
protected TimeScheduler timer;
protected volatile View view;
@ManagedAttribute(description="Whether this member is the current coordinator")
protected volatile boolean is_coord;
protected volatile Address local_addr;
protected volatile Address current_coord;
protected String cluster_name;
protected final Map<Long,Responses> ping_responses=new HashMap<>();
protected final Set<Future<?>> discovery_req_futures=new ConcurrentSkipListSet<>();
@ManagedAttribute(description="Whether the transport supports multicasting")
protected boolean transport_supports_multicasting=true;
protected boolean use_ip_addrs; // caches TP.use_ip_addrs
@ManagedAttribute(description="True if sending a message can block at the transport level")
protected boolean sends_can_block=true;
protected Consumer<PingData> discovery_rsp_callback; // called when a discovery response is received
protected static final byte[] WHITESPACE=" \t".getBytes();
public void init() throws Exception {
TP tp=getTransport();
if(stagger_timeout < 0)
throw new IllegalArgumentException("stagger_timeout cannot be negative");
if(num_discovery_runs < 1)
throw new IllegalArgumentException("num_discovery_runs must be >= 1");
transport_supports_multicasting=tp.supportsMulticasting();
sends_can_block=getTransport() instanceof TCP; // UDP and TCP_NIO2 won't block
use_ip_addrs=tp.getUseIpAddresses();
}
public void start() throws Exception {
super.start();
timer=getTransport().getTimer();
if(timer == null)
throw new Exception("timer cannot be retrieved from protocol stack");
}
public void stop() {
is_server=false;
clearRequestFutures();
}
public abstract boolean isDynamic();
public void handleDisconnect() {
}
public void handleConnect() {
}
public void discoveryRequestReceived(Address sender, String logical_name, PhysicalAddress physical_addr) {
}
public int getNumberOfDiscoveryRequestsSent() {return num_discovery_requests;}
public boolean breakOnCoordResponse() {return break_on_coord_rsp;}
public <T extends Discovery> T breakOnCoordResponse(boolean flag) {break_on_coord_rsp=flag; return (T)this;}
public boolean returnEntireCache() {return return_entire_cache;}
public <T extends Discovery> T returnEntireCache(boolean flag) {return_entire_cache=flag; return (T)this;}
public long staggerTimeout() {return stagger_timeout;}
public <T extends Discovery> T staggerTimeout(long timeout) {stagger_timeout=timeout; return (T)this;}
public boolean useDiskCache() {return use_disk_cache;}
public <T extends Discovery> T useDiskCache(boolean flag) {use_disk_cache=flag; return (T)this;}
public <T extends Discovery> T discoveryRspExpiryTime(long t) {this.discovery_rsp_expiry_time=t; return (T)this;}
@ManagedAttribute
public String getView() {return view != null? view.getViewId().toString() : "null";}
public ViewId getViewId() {
return view != null? view.getViewId() : null;
}
@ManagedAttribute(description="The address of the current coordinator")
public String getCurrentCoord() {return current_coord != null? current_coord.toString() : "n/a";}
protected boolean isMergeRunning() {
Object retval=up_prot.up(new Event(Event.IS_MERGE_IN_PROGRESS));
return retval instanceof Boolean && (Boolean)retval;
}
@ManagedOperation(description="Sends information about my cache to everyone but myself")
public void sendCacheInformation() {
List<Address> current_members=new ArrayList<>(view.getMembers());
disseminateDiscoveryInformation(current_members, null, current_members);
}
public List<Integer> providedUpServices() {
return Arrays.asList(Event.FIND_INITIAL_MBRS, Event.GET_PHYSICAL_ADDRESS, Event.FIND_MBRS);
}
public void resetStats() {
super.resetStats();
num_discovery_requests=0;
}
public void addResponse(Responses rsp) {
synchronized(ping_responses) {
ping_responses.put(System.nanoTime(), rsp);
}
}
/**
* Fetches information (e.g. physical address, logical name) for the given member addresses. Needs to add responses
* to the {@link org.jgroups.util.Responses} object. If {@link #async_discovery} is true, this method will be called
* in a separate thread, otherwise the caller's thread will be used.
* @param members A list of logical addresses (typically {@link org.jgroups.util.UUID}s). If null, then information
* for all members is fetched
* @param initial_discovery Set to true if this is for the initial membership discovery. Some protocols (e.g.
* file based ones) may return only the information for the coordinator(s).
* @param responses The list to which responses should be added
*/
protected abstract void findMembers(List<Address> members, boolean initial_discovery, Responses responses);
/** Calls {@link #findMembers(List, boolean, Responses)} */
protected void invokeFindMembers(List<Address> members, boolean initial_discovery, Responses rsps, boolean async) {
findMembers(members, initial_discovery, rsps); // ignores 'async' parameter
}
public Responses findMembers(final List<Address> members, final boolean initial_discovery, boolean async, long timeout) {
num_discovery_requests++;
int num_expected=members != null? members.size() : 0;
int capacity=members != null? members.size() : 16;
Responses rsps=new Responses(num_expected, initial_discovery && break_on_coord_rsp, capacity);
addResponse(rsps);
if(async || async_discovery || (num_discovery_runs > 1) && initial_discovery) {
final Runnable find_method=() -> invokeFindMembers(members, initial_discovery, rsps, async);
timer.execute(find_method);
if(num_discovery_runs > 1 && initial_discovery) {
int num_reqs_to_send=num_discovery_runs-1;
long last_send=timeout - (timeout/num_discovery_runs);
long interval=last_send/num_reqs_to_send;
for(long i=0,delay=interval; i < num_reqs_to_send; i++,delay+=interval) {
Future<?> future=timer.schedule(find_method, delay, TimeUnit.MILLISECONDS);
this.discovery_req_futures.add(future);
num_discovery_requests++;
}
}
}
else
invokeFindMembers(members, initial_discovery, rsps, async);
weedOutCompletedDiscoveryResponses();
return rsps;
}
@ManagedOperation(description="Runs the discovery protocol to find initial members")
public String findInitialMembersAsString() {
Responses rsps=findMembers(null, false, false, 0);
if(!rsps.isDone())
rsps.waitFor(300);
if(rsps.isEmpty()) return "<empty>";
StringBuilder sb=new StringBuilder();
for(PingData rsp: rsps)
sb.append(rsp).append("\n");
return sb.toString();
}
@ManagedOperation(description="Reads logical-physical address mappings and logical name mappings from a " +
"file (or URL) and adds them to the local caches")
public void addToCache(String filename) throws Exception {
InputStream in=ConfiguratorFactory.getConfigStream(filename);
List<PingData> list=read(in);
if(list != null)
for(PingData data: list)
addDiscoveryResponseToCaches(data.getAddress(), data.getLogicalName(), data.getPhysicalAddr());
}
@ManagedOperation(description="Reads data from local caches and dumps them to a file")
public void dumpCache(String output_filename) throws Exception {
Map<Address,PhysicalAddress> cache_contents=
(Map<Address,PhysicalAddress>)down_prot.down(new Event(Event.GET_LOGICAL_PHYSICAL_MAPPINGS, false));
List<PingData> list=new ArrayList<>(cache_contents.size());
for(Map.Entry<Address,PhysicalAddress> entry: cache_contents.entrySet()) {
Address addr=entry.getKey();
PhysicalAddress phys_addr=entry.getValue();
PingData data=new PingData(addr, true, NameCache.get(addr), phys_addr).coord(addr.equals(local_addr));
list.add(data);
}
OutputStream out=new FileOutputStream(output_filename);
write(list, out);
}
public Object up(Event evt) {
switch(evt.getType()) {
case Event.FIND_MBRS:
return findMembers(evt.getArg(), false, true, 0); // this is done asynchronously
}
return up_prot.up(evt);
}
public Object up(Message msg) {
PingHeader hdr=msg.getHeader(this.id);
if(hdr == null)
return up_prot.up(msg);
if(is_leaving)
return null; // prevents merging back a leaving member (https://issues.jboss.org/browse/JGRP-1336)
PingData data=readPingData(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
Address logical_addr=data != null? data.getAddress() : msg.src();
switch(hdr.type) {
case PingHeader.GET_MBRS_REQ: // return Rsp(local_addr, coord)
if(cluster_name == null || hdr.cluster_name == null) {
log.warn("cluster_name (%s) or cluster_name of header (%s) is null; passing up discovery " +
"request from %s, but this should not be the case", cluster_name, hdr.cluster_name, msg.src());
}
else {
if(!cluster_name.equals(hdr.cluster_name)) {
log.warn("%s: discarding discovery request for cluster '%s' from %s; " +
"our cluster name is '%s'. Please separate your clusters properly",
logical_addr, hdr.cluster_name, msg.src(), cluster_name);
return null;
}
}
// add physical address and logical name of the discovery sender (if available) to the cache
if(data != null) { // null if use_ip_addrs is true
addDiscoveryResponseToCaches(logical_addr, data.getLogicalName(), data.getPhysicalAddr());
discoveryRequestReceived(msg.getSrc(), data.getLogicalName(), data.getPhysicalAddr());
addResponse(data, false);
}
if(return_entire_cache) {
Map<Address,PhysicalAddress> cache=(Map<Address,PhysicalAddress>)down(new Event(Event.GET_LOGICAL_PHYSICAL_MAPPINGS));
if(cache != null) {
for(Map.Entry<Address,PhysicalAddress> entry: cache.entrySet()) {
Address addr=entry.getKey();
// JGRP-1492: only return our own address, and addresses in view.
if(addr.equals(local_addr) || (view != null && view.containsMember(addr))) {
PhysicalAddress physical_addr=entry.getValue();
sendDiscoveryResponse(addr, physical_addr, NameCache.get(addr), msg.getSrc(), isCoord(addr));
}
}
}
return null;
}
// Only send a response if hdr.mbrs is not empty and contains myself. Otherwise always send my info
Collection<? extends Address> mbrs=data != null? data.mbrs() : null;
boolean drop_because_of_rank=use_ip_addrs && max_rank_to_reply > 0 && hdr.initialDiscovery() && Util.getRank(view, local_addr) > max_rank_to_reply;
if(drop_because_of_rank || (mbrs != null && !mbrs.contains(local_addr)))
return null;
PhysicalAddress physical_addr=(PhysicalAddress)down(new Event(Event.GET_PHYSICAL_ADDRESS, local_addr));
sendDiscoveryResponse(local_addr, physical_addr, NameCache.get(local_addr), msg.getSrc(), is_coord);
return null;
case PingHeader.GET_MBRS_RSP:
// add physical address (if available) to transport's cache
if(data != null) {
log.trace("%s: received GET_MBRS_RSP from %s: %s", local_addr, msg.src(), data);
handleDiscoveryResponse(data, msg.src());
}
return null;
default:
log.warn("got PING header with unknown type %d", hdr.type);
return null;
}
}
protected void handleDiscoveryResponse(PingData data, Address sender) {
// add physical address (if available) to transport's cache
Address logical_addr=data.getAddress() != null? data.getAddress() : sender;
addDiscoveryResponseToCaches(logical_addr, data.getLogicalName(), data.getPhysicalAddr());
boolean overwrite=Objects.equals(logical_addr, sender);
addResponse(data, overwrite);
}
public Object down(Event evt) {
switch(evt.getType()) {
case Event.FIND_INITIAL_MBRS: // sent by GMS layer
long timeout=evt.getArg();
return findMembers(null, true, false, timeout); // triggered by JOIN process (ClientGmsImpl)
case Event.FIND_MBRS:
return findMembers(evt.getArg(), false, false, 0); // triggered by MERGE3
case Event.FIND_MBRS_ASYNC:
discovery_rsp_callback=evt.arg();
return findMembers(null, false, false, 0); // triggered by MERGE3
case Event.VIEW_CHANGE:
View old_view=view;
view=evt.getArg();
current_coord=view.getCoord();
is_coord=Objects.equals(current_coord, local_addr);
Object retval=down_prot.down(evt);
if(send_cache_on_join && !isDynamic() && is_coord) {
List<Address> curr_mbrs, left_mbrs, new_mbrs;
curr_mbrs=new ArrayList<>(view.getMembers());
left_mbrs=View.leftMembers(old_view, view);
new_mbrs=View.newMembers(old_view, view);
startCacheDissemination(curr_mbrs, left_mbrs, new_mbrs); // separate task
}
return retval;
case Event.BECOME_SERVER: // called after client has joined and is fully working group member
down_prot.down(evt);
is_server=true;
return null;
case Event.SET_LOCAL_ADDRESS:
local_addr=evt.getArg();
return down_prot.down(evt);
case Event.CONNECT:
case Event.CONNECT_WITH_STATE_TRANSFER:
case Event.CONNECT_USE_FLUSH:
case Event.CONNECT_WITH_STATE_TRANSFER_USE_FLUSH:
is_leaving=false;
cluster_name=evt.getArg();
Object ret=down_prot.down(evt);
handleConnect();
return ret;
case Event.DISCONNECT:
is_leaving=true;
handleDisconnect();
return down_prot.down(evt);
default:
return down_prot.down(evt); // Pass on to the layer below us
}
}
/* -------------------------- Private methods ---------------------------- */
protected List<PingData> read(InputStream in) {
List<PingData> retval=null;
try {
while(true) {
try {
String name_str=Util.readToken(in);
String uuid_str=Util.readToken(in);
String addr_str=Util.readToken(in);
String coord_str=Util.readToken(in);
if(name_str == null || uuid_str == null || addr_str == null || coord_str == null)
break;
UUID uuid=null;
try {
long tmp=Long.valueOf(uuid_str);
uuid=new UUID(0, tmp);
}
catch(Throwable t) {
uuid=UUID.fromString(uuid_str);
}
PhysicalAddress phys_addr=new IpAddress(addr_str);
boolean is_coordinator=coord_str.trim().equals("T") || coord_str.trim().equals("t");
if(retval == null)
retval=new ArrayList<>();
retval.add(new PingData(uuid, true, name_str, phys_addr).coord(is_coordinator));
}
catch(Throwable t) {
log.error(Util.getMessage("FailedReadingLineOfInputStream"), t);
}
}
return retval;
}
finally {
Util.close(in);
}
}
protected void write(List<PingData> list, OutputStream out) throws Exception {
try {
for(PingData data: list) {
String logical_name=data.getLogicalName();
Address addr=data.getAddress();
PhysicalAddress phys_addr=data.getPhysicalAddr();
if(logical_name == null || addr == null || phys_addr == null)
continue;
out.write(logical_name.getBytes());
out.write(WHITESPACE);
out.write(addressAsString(addr).getBytes());
out.write(WHITESPACE);
out.write(phys_addr.toString().getBytes());
out.write(WHITESPACE);
out.write(data.isCoord()? String.format("T%n").getBytes() : String.format("F%n").getBytes());
}
}
finally {
Util.close(out);
}
}
protected void addResponse(PingData rsp, boolean overwrite) {
if(discovery_rsp_callback != null) {
try {
discovery_rsp_callback.accept(rsp);
}
catch(Throwable t) {
log.error("%s: failed invoking callback for discovery response: %s", local_addr, t);
}
}
synchronized(ping_responses) {
for(Iterator<Map.Entry<Long,Responses>> it=ping_responses.entrySet().iterator(); it.hasNext();) {
Map.Entry<Long,Responses> entry=it.next();
long timestamp=entry.getKey();
Responses rsps=entry.getValue();
rsps.addResponse(rsp, overwrite);
if(rsps.isDone() || TimeUnit.MILLISECONDS.convert(System.nanoTime() - timestamp, TimeUnit.NANOSECONDS) > discovery_rsp_expiry_time) {
it.remove();
rsps.done();
clearRequestFutures();
}
}
}
}
/** Removes responses which are done or whose timeout has expired (in the latter case, an expired response is marked as done) */
@ManagedOperation(description="Removes expired or completed responses")
public void weedOutCompletedDiscoveryResponses() {
synchronized(ping_responses) {
for(Iterator<Map.Entry<Long,Responses>> it=ping_responses.entrySet().iterator(); it.hasNext();) {
Map.Entry<Long,Responses> entry=it.next();
long timestamp=entry.getKey();
Responses rsps=entry.getValue();
if(rsps.isDone() || TimeUnit.MILLISECONDS.convert(System.nanoTime() - timestamp, TimeUnit.NANOSECONDS) > discovery_rsp_expiry_time) {
it.remove();
rsps.done();
clearRequestFutures();
}
}
}
}
protected boolean addDiscoveryResponseToCaches(Address mbr, String logical_name, PhysicalAddress physical_addr) {
if(mbr == null)
return false;
if(logical_name != null)
NameCache.add(mbr, logical_name);
if(physical_addr != null)
return (Boolean)down(new Event(Event.ADD_PHYSICAL_ADDRESS, new Tuple<>(mbr, physical_addr)));
return false;
}
protected void clearRequestFutures() {
discovery_req_futures.forEach(f->f.cancel(true));
discovery_req_futures.clear();
}
protected synchronized void startCacheDissemination(List<Address> curr_mbrs, List<Address> left_mbrs, List<Address> new_mbrs) {
timer.execute(new DiscoveryCacheDisseminationTask(curr_mbrs,left_mbrs,new_mbrs), sends_can_block);
}
/**
* Creates a byte[] representation of the PingData, but DISCARDING the view it contains.
* @param data the PingData instance to serialize.
* @return
*/
protected byte[] serializeWithoutView(PingData data) {
final PingData clone = new PingData(data.getAddress(), data.isServer(), data.getLogicalName(), data.getPhysicalAddr()).coord(data.isCoord());
try {
return Util.streamableToByteBuffer(clone);
}
catch(Exception e) {
log.error(Util.getMessage("ErrorSerializingPingData"), e);
return null;
}
}
protected static PingData deserialize(final byte[] data) throws Exception {
return Util.streamableFromByteBuffer(PingData::new, data);
}
public static Buffer marshal(PingData data) {
return Util.streamableToBuffer(data);
}
protected PingData readPingData(byte[] buffer, int offset, int length) {
try {
return buffer != null? Util.streamableFromBuffer(PingData::new, buffer, offset, length) : null;
}
catch(Exception ex) {
log.error("%s: failed reading PingData from message: %s", local_addr, ex);
return null;
}
}
protected void sendDiscoveryResponse(Address logical_addr, PhysicalAddress physical_addr,
String logical_name, final Address sender, boolean coord) {
final PingData data=new PingData(logical_addr, is_server, logical_name, physical_addr).coord(coord);
final Message rsp_msg=new Message(sender).setFlag(Message.Flag.INTERNAL, Message.Flag.OOB, Message.Flag.DONT_BUNDLE)
.putHeader(this.id, new PingHeader(PingHeader.GET_MBRS_RSP)).setBuffer(marshal(data));
if(stagger_timeout > 0) {
int view_size=view != null? view.size() : 10;
int rank=Util.getRank(view, local_addr); // returns 0 if view or local_addr are null
long sleep_time=rank == 0? Util.random(stagger_timeout)
: stagger_timeout * rank / view_size - (stagger_timeout / view_size);
timer.schedule(() -> {
log.trace("%s: received GET_MBRS_REQ from %s, sending staggered response %s", local_addr, sender, data);
down_prot.down(rsp_msg);
}, sleep_time, TimeUnit.MILLISECONDS, sends_can_block);
return;
}
log.trace("%s: received GET_MBRS_REQ from %s, sending response %s", local_addr, sender, data);
down_prot.down(rsp_msg);
}
protected static String addressAsString(Address address) {
if(address == null)
return "";
if(address instanceof UUID)
return ((UUID) address).toStringLong();
return address.toString();
}
protected boolean isCoord(Address member) {return member.equals(current_coord);}
/** Disseminates cache information (UUID/IP adddress/port/name) to the given members
* @param current_mbrs The current members. Guaranteed to be non-null. This is a copy and can be modified.
* @param left_mbrs The members which left. These are excluded from dissemination. Can be null if no members left
* @param new_mbrs The new members that we need to disseminate the information to. Will be all members if null.
*/
protected void disseminateDiscoveryInformation(List current_mbrs, List<Address> left_mbrs, List<Address> new_mbrs) {
if(new_mbrs == null || new_mbrs.isEmpty())
return;
if(local_addr != null)
current_mbrs.remove(local_addr);
if(left_mbrs != null)
current_mbrs.removeAll(left_mbrs);
// 1. Send information about <everyone - self - left_mbrs> to new_mbrs
Set<Address> info=new HashSet<>(current_mbrs);
for(Address addr : info) {
PhysicalAddress phys_addr=(PhysicalAddress)down_prot.down(new Event(Event.GET_PHYSICAL_ADDRESS,addr));
if(phys_addr == null)
continue;
boolean is_coordinator=isCoord(addr);
for(Address target : new_mbrs)
sendDiscoveryResponse(addr,phys_addr,NameCache.get(addr),target,is_coordinator);
}
// 2. Send information about new_mbrs to <everyone - self - left_mbrs - new_mbrs>
Set<Address> targets=new HashSet<>(current_mbrs);
targets.removeAll(new_mbrs);
if(!targets.isEmpty()) {
for(Address addr : new_mbrs) {
PhysicalAddress phys_addr=(PhysicalAddress)down_prot.down(new Event(Event.GET_PHYSICAL_ADDRESS,addr));
if(phys_addr == null)
continue;
boolean is_coordinator=isCoord(addr);
for(Address target : targets)
sendDiscoveryResponse(addr,phys_addr,NameCache.get(addr),target,is_coordinator);
}
}
}
protected class DiscoveryCacheDisseminationTask implements Runnable {
protected final List<Address> curr_mbrs, left_mbrs, new_mbrs;
public DiscoveryCacheDisseminationTask(List<Address> curr_mbrs,List<Address> left_mbrs,List<Address> new_mbrs) {
this.curr_mbrs=curr_mbrs;
this.left_mbrs=left_mbrs;
this.new_mbrs=new_mbrs;
}
public void run() {
disseminateDiscoveryInformation(curr_mbrs, left_mbrs, new_mbrs);
}
}
}
| |
package edu.usfca.ds.views;
import edu.usfca.xj.appkit.gview.base.Vector2D;
import edu.usfca.xj.appkit.gview.object.GElement;
import edu.usfca.xj.appkit.gview.object.GElementArrow;
import edu.usfca.xj.appkit.gview.object.GElementLabel;
import java.awt.*;
public class DSViewSort extends DSView {
public final int BUBBLESORT = 1;
public final int SELECTIONSORT = 2;
public final int INSERTIONSORT = 3;
public final int MERGESORT = 4;
public final int QUICKSORT = 5;
public final int SHELLSORT = 6;
protected final double MAXHEIGHT = 200.0;
protected final int SIZE = 45;
protected final int ALLOCSIZE = 2 * SIZE;
protected int SortData[];
protected GElement SortElements[];
protected GElement GraphicalElements[];
protected GElement ArrayFrame[];
protected GElement SortIndex[];
protected int Xpos[];
protected int Ypos[];
protected boolean Graphical;
protected boolean visableIndices;
public DSViewSort() {
visableIndices = true;
Graphical = true;
waitscalefactor = 10;
SortData = new int[ALLOCSIZE];
SortElements = new GElement[ALLOCSIZE];
SortIndex = new GElement[ALLOCSIZE];
ArrayFrame = new GElement[ALLOCSIZE];
GraphicalElements = new GElement[ALLOCSIZE];
Xpos = new int[ALLOCSIZE];
Ypos = new int[ALLOCSIZE];
int i;
minwaitscalefactor = 0.05;
sleeptime = 25;
for (i = 0; i < SIZE; i++) {
Xpos[i] = 20 + i * 20;
Ypos[i] = 210;
ArrayFrame[i] = createRectangle("", Xpos[i], Ypos[i] + 2, 20, 20, false);
SortIndex[i] = createLabel(String.valueOf(i), Xpos[i], Ypos[i] + 22, false);
SortIndex[i].setLabelVisible(visableIndices);
SortIndex[i].setLabelColor(Color.BLUE);
SortData[i] = (int) (99 * Math.random());
}
RecreateArray();
}
protected void CallFunction(int function) {
switch (function) {
case BUBBLESORT:
BubbleSort();
break;
case SELECTIONSORT:
SelectionSort();
break;
case INSERTIONSORT:
InsertionSort();
break;
case MERGESORT:
MergeSort();
break;
case QUICKSORT:
Quicksort();
break;
case SHELLSORT:
ShellSort();
break;
}
}
public void ToggleIndicesViewable() {
int i;
visableIndices = !visableIndices;
for (i = 0; i < SIZE; i++)
SortIndex[i].setLabelVisible(!SortIndex[i].isLabelVisible());
repaint();
}
public void Randomize() {
int i;
for (i = 0; i < SIZE; i++) {
SortData[i] = (int) (Math.random() * 100);
}
RecreateArray();
}
private void RecreateArray() {
int i;
for (i = 0; i < SIZE; i++) {
removeAny(SortElements[i]);
removeAny(GraphicalElements[i]);
}
if (Graphical) {
for (i = 0; i < SIZE; i++)
GraphicalElements[i] = BuildGrapicElement(SortData[i], Xpos[i], Ypos[i]);
}
for (i = 0; i < SIZE; i++)
SortElements[i] = createLabel(String.valueOf(SortData[i]), Xpos[i], Ypos[i]);
repaint();
}
private int GetYPosGraphic(int value, int y) {
return (int) (y - ((((double) value) / 100 * MAXHEIGHT) / 2 + 12));
}
private GElement BuildGrapicElement(int value, int xbase, int ybase) {
int height = (int) (((double) value) / 100 * MAXHEIGHT);
return createRectangle("", xbase, GetYPosGraphic(value, ybase), 10, height, false);
}
public void SwitchToNumeric() {
Graphical = false;
RecreateArray();
}
private void swap(int index1, int index2) {
int i;
Vector2D G1from = null;
Vector2D G1to = null;
Vector2D G2from = null;
Vector2D G2to = null;
Vector2D path1G[] = null;
Vector2D path2G[] = null;
Vector2D path1[];
Vector2D path2[];
int numsteps = Math.max((int) (Math.abs(SortElements[index1].getPositionX() - SortElements[index2].getPositionX()) / 2),
10);
double G1newY;
double G2newY;
if (Graphical) {
G1newY = GraphicalElements[index1].getPositionY() - Ypos[index1] + Ypos[index2];
G2newY = GraphicalElements[index2].getPositionY() - Ypos[index2] + Ypos[index1];
G1from = GraphicalElements[index1].getPosition();
G2from = GraphicalElements[index2].getPosition();
G1to = new Vector2D(G2from.getX(), G1newY);
G2to = new Vector2D(G1from.getX(), G2newY);
}
path1 = createPath(SortElements[index1].getPosition(), SortElements[index2].getPosition(), numsteps);
path2 = createPath(SortElements[index2].getPosition(), SortElements[index1].getPosition(), numsteps);
if (Graphical) {
path1G = createPath(G1from, G1to, numsteps);
path2G = createPath(G2from, G2to, numsteps);
}
for (i = 0; i < numsteps; i++) {
SortElements[index1].setPosition(path1[i]);
SortElements[index2].setPosition(path2[i]);
if (Graphical) {
GraphicalElements[index1].setPosition(path1G[i]);
GraphicalElements[index2].setPosition(path2G[i]);
}
repaintwaitmin();
}
int tmp = SortData[index1];
SortData[index1] = SortData[index2];
SortData[index2] = tmp;
GElement tmp2 = SortElements[index1];
SortElements[index1] = SortElements[index2];
SortElements[index2] = tmp2;
if (Graphical) {
tmp2 = GraphicalElements[index1];
GraphicalElements[index1] = GraphicalElements[index2];
GraphicalElements[index2] = tmp2;
}
}
protected void moveto(int index1, int index2) {
moveto(index1, index2, true);
}
protected void moveto(int index1, int index2, boolean killOldindex1) {
int i;
Vector2D G1from = null;
Vector2D G1to = null;
Vector2D path1G[] = null;
Vector2D path1[];
int numsteps = (int) Math.max(Math.sqrt((Xpos[index1] - Xpos[index2]) * (Xpos[index1] - Xpos[index2]) +
(Ypos[index1] - Ypos[index2]) * (Ypos[index1] - Ypos[index2]) / 4),
10);
double G1newY;
double G1newX;
if (Graphical) {
G1newY = GetYPosGraphic(SortData[index1], Ypos[index2]);
G1newX = Xpos[index2];
G1from = GraphicalElements[index1].getPosition();
G1to = new Vector2D(G1newX, G1newY);
if (GraphicalElements[index2] != null)
removeAny(GraphicalElements[index2]);
GraphicalElements[index2] = BuildGrapicElement(SortData[index1], Xpos[index1], Ypos[index1]);
}
path1 = createPath(SortElements[index1].getPosition(), new Vector2D(Xpos[index2], Ypos[index2]), numsteps);
if (SortElements[index2] != null)
removeAny(SortElements[index2]);
SortElements[index2] = createLabel(String.valueOf(SortData[index1]), Xpos[index1], Ypos[index1]);
if (Graphical) {
path1G = createPath(G1from, G1to, numsteps);
}
if (killOldindex1) {
if (Graphical)
removeAny(GraphicalElements[index1]);
removeAny(SortElements[index1]);
}
for (i = 0; i < numsteps; i++) {
SortElements[index2].setPosition(path1[i]);
if (Graphical) {
GraphicalElements[index2].setPosition(path1G[i]);
}
repaintwaitmin();
}
SortData[index2] = SortData[index1];
}
protected boolean lessthanBC(int index1, int index2) {
if ((index1 < 0) || (index1 > SIZE) ||
(index2 < 0) || (index2 > SIZE))
return false;
return lessthan(index1, index2);
}
public boolean lessthan(int index1, int index2) {
SortElements[index1].setLabelColor(Color.RED);
SortElements[index2].setLabelColor(Color.RED);
if (Graphical) {
GraphicalElements[index1].setColor(Color.RED);
GraphicalElements[index2].setColor(Color.RED);
}
repaintwait();
SortElements[index1].setLabelColor(Color.BLACK);
SortElements[index2].setLabelColor(Color.BLACK);
if (Graphical) {
GraphicalElements[index1].setColor(Color.BLACK);
GraphicalElements[index2].setColor(Color.BLACK);
}
return SortData[index1] < SortData[index2];
}
protected void GrayElement(int index, boolean graybottom) {
SortElements[index].setLabelColor(Color.LIGHT_GRAY);
ArrayFrame[index].setColor(Color.LIGHT_GRAY);
SortIndex[index].setLabelColor(Color.LIGHT_GRAY);
if (Graphical)
GraphicalElements[index].setColor(Color.LIGHT_GRAY);
if (graybottom) {
SortElements[index + SIZE].setLabelColor(Color.LIGHT_GRAY);
ArrayFrame[index + SIZE].setColor(Color.LIGHT_GRAY);
SortIndex[index + SIZE].setLabelColor(Color.LIGHT_GRAY);
if (Graphical && GraphicalElements[index + SIZE] != null)
GraphicalElements[index + SIZE].setColor(Color.LIGHT_GRAY);
}
}
protected void UnGrayElement(int index, boolean ungraybottom) {
SortElements[index].setLabelColor(Color.BLACK);
ArrayFrame[index].setColor(Color.BLACK);
SortIndex[index].setLabelColor(Color.BLUE);
if (Graphical)
GraphicalElements[index].setColor(Color.BLACK);
if (ungraybottom) {
SortElements[index + SIZE].setLabelColor(Color.BLACK);
ArrayFrame[index + SIZE].setColor(Color.BLACK);
SortIndex[index + SIZE].setLabelColor(Color.BLUE);
if (Graphical && (GraphicalElements[index + SIZE] != null))
GraphicalElements[index + SIZE].setColor(Color.BLACK);
}
}
protected void HighlightRange(int low, int high, boolean highlightbottom) {
int i;
for (i = 0; i < SIZE; i++) {
if ((i < low) || (i > high)) {
GrayElement(i,highlightbottom);
} else {
UnGrayElement(i,highlightbottom);
}
}
}
protected void Merge(int index1low, int index1high, int index2low, int index2high) {
int i;
int oldlow = index1low;
for (i = index1low; i <= index2high; i++) {
if (index1low > index1high)
moveto(index2low++, i + SIZE, true);
else if (index2low > index2high)
moveto(index1low++, i + SIZE, true);
else if (lessthan(index1low, index2low))
moveto(index1low++, i + SIZE, true);
else
moveto(index2low++, i + SIZE, true);
}
for (i = oldlow; i <= index2high; i++)
moveto(i + SIZE, i, true);
}
protected void MergeSort(int lowindex, int highindex) {
HighlightRange(lowindex, highindex, true);
waitscalefactor *= 2;
repaintwait();
waitscalefactor /= 2;
if (lowindex < highindex) {
MergeSort(lowindex, (int) (lowindex + highindex) / 2);
MergeSort((int) (lowindex + highindex) / 2 + 1, highindex);
HighlightRange(lowindex, highindex, true);
repaintwait();
Merge(lowindex, (lowindex + highindex) / 2, (lowindex + highindex) / 2 + 1,
highindex);
}
}
protected void MergeSort() {
int i;
for (i = SIZE; i < 2 * SIZE; i++) {
Xpos[i] = 20 + (i - SIZE) * 20;
Ypos[i] = 440;
ArrayFrame[i] = createRectangle("", Xpos[i], Ypos[i] + 2, 20, 20, false);
SortIndex[i] = createLabel(String.valueOf(i - SIZE), Xpos[i], Ypos[i] + 22, false);
SortIndex[i].setLabelVisible(visableIndices);
SortIndex[i].setLabelColor(Color.BLUE);
SortElements[i] = createLabel("", Xpos[i], Ypos[i]);
}
MergeSort(0, SIZE - 1);
for (i = SIZE; i < 2 * SIZE; i++) {
removeAny(ArrayFrame[i]);
removeAny(SortIndex[i]);
removeAny(SortIndex[i]);
removeAny(SortIndex[i]);
removeAny(SortElements[i]);
}
}
protected void InsertionSortSkip(int inc, int offset) {
int i,j;
for (i = inc + offset; i < SIZE; i = i + inc) {
moveto(i, SIZE, true);
for (j = i - inc; lessthanBC(SIZE, j); j = j - inc)
moveto(j, j + inc, true);
moveto(SIZE, j + inc, true);
}
}
public void ShellSort() {
int inc;
int offset;
int k;
Xpos[SIZE] = 435;
Ypos[SIZE] = 440;
ArrayFrame[SIZE] = createRectangle("", Xpos[SIZE], Ypos[SIZE] + 2, 20, 20, false);
SortIndex[SIZE] = createLabel("Temp", Xpos[SIZE], Ypos[SIZE] + 22, false);
SortIndex[SIZE].setLabelVisible(visableIndices);
SortIndex[SIZE].setLabelColor(Color.BLUE);
if (Graphical)
GraphicalElements[SIZE] = BuildGrapicElement(0, Xpos[SIZE], Ypos[SIZE]);
SortElements[SIZE] = createLabel("", Xpos[SIZE], Ypos[SIZE]);
for (inc = SIZE/2; inc >=1; inc /= 2)
for (offset = 0; offset < inc; offset++) {
for (k=0;k<SIZE;k++)
if ((k - offset) % inc == 0)
UnGrayElement(k,false);
else
GrayElement(k,false);
InsertionSortSkip(inc,offset);
}
removeAny(ArrayFrame[SIZE]);
removeAny(SortIndex[SIZE]);
if (Graphical)
removeAny(GraphicalElements[SIZE]);
}
public void InsertionSort() {
Xpos[SIZE] = 435;
Ypos[SIZE] = 440;
ArrayFrame[SIZE] = createRectangle("", Xpos[SIZE], Ypos[SIZE] + 2, 20, 20, false);
SortIndex[SIZE] = createLabel("Temp", Xpos[SIZE], Ypos[SIZE] + 22, false);
SortIndex[SIZE].setLabelVisible(visableIndices);
SortIndex[SIZE].setLabelColor(Color.BLUE);
if (Graphical)
GraphicalElements[SIZE] = BuildGrapicElement(0, Xpos[SIZE], Ypos[SIZE]);
SortElements[SIZE] = createLabel("", Xpos[SIZE], Ypos[SIZE]);
InsertionSortSkip(1,0);
removeAny(ArrayFrame[SIZE]);
removeAny(SortIndex[SIZE]);
if (Graphical)
removeAny(GraphicalElements[SIZE]);
}
public void SelectionSort() {
int i, j, smallestIndex;
for (i = 0; i < SIZE - 1; i++) {
smallestIndex = i;
for (j = i + 1; j < SIZE; j++)
if (lessthan(j, smallestIndex))
smallestIndex = j;
if (smallestIndex != i)
swap(i, smallestIndex);
}
}
public void BubbleSort() {
int i, j;
for (i = 0; i < SIZE - 1; i++)
for (j = 0; j < SIZE - 1 - i; j++)
if (lessthan(j + 1, j))
swap(j + 1, j);
}
protected int partition(int pivot, int low, int high) {
int i,ydelta;
GElement pivotheight = null;
if (visableIndices)
ydelta = 35;
else
ydelta = 15;
if (Graphical) {
pivotheight = createRectangle("", Xpos[SIZE/2], GetYPosGraphic(SortData[pivot]*2, Ypos[0]), SIZE*(Xpos[1]-Xpos[0]),1, false);
pivotheight.setColor(Color.RED);
}
GElementArrow iarrow = createArrow(Xpos[low], Ypos[low] + 50+ydelta, Xpos[low], Ypos[low] + ydelta, 20, false);
GElementArrow jarrow = createArrow(Xpos[high], Ypos[high] +50+ydelta, Xpos[high], Ypos[high] +ydelta, 20, false);
GElementLabel ilabel = createLabel("i", Xpos[low], Ypos[low] + 50+ydelta+5);
GElementLabel jlabel = createLabel("j", Xpos[high], Ypos[high] + 50+ydelta+5);
Vector2D patharrow[];
Vector2D pathlabel[];
while (low <= high) {
while (low <= high) {
if (!lessthan(pivot, low)) {
/* patharrow = createPath(iarrow.getPosition(),
new Vector2D(iarrow.getPositionX() + Xpos[low + 1] - Xpos[low],
iarrow.getPositionY() + Ypos[low + 1] - Ypos[low]), 10);
pathlabel = createPath(ilabel.getPosition(),
new Vector2D(ilabel.getPositionX() + Xpos[low + 1] - Xpos[low],
ilabel.getPositionY() + Ypos[low + 1] - Ypos[low]), 10);
*/
patharrow = createPath(iarrow.getPosition(),
new Vector2D(iarrow.getPositionX() + 20,
iarrow.getPositionY()), 10);
pathlabel = createPath(ilabel.getPosition(),
new Vector2D(ilabel.getPositionX()+20,
ilabel.getPositionY()), 10);
for (i = 0; i < 10; i++) {
iarrow.moveToPosition(patharrow[i]);
ilabel.moveToPosition(pathlabel[i]);
repaintwaitmin();
}
low++;
} else
break;
}
while (high >= low) {
if (!lessthan(high, pivot)) {
/* patharrow = createPath(jarrow.getPosition(),
new Vector2D(jarrow.getPositionX() + Xpos[high-1] - Xpos[high],
jarrow.getPositionY() + Ypos[high-1] - Ypos[high]), 10);
pathlabel = createPath(jlabel.getPosition(),
new Vector2D(jlabel.getPositionX() + Xpos[high-1] - Xpos[high],
jlabel.getPositionY() + Ypos[high-1] - Ypos[high]), 10); */
patharrow = createPath(jarrow.getPosition(),
new Vector2D(jarrow.getPositionX() -20,
jarrow.getPositionY() ), 10);
pathlabel = createPath(jlabel.getPosition(),
new Vector2D(jlabel.getPositionX()-20 ,
jlabel.getPositionY()), 10);
high--;
for (i = 0; i < 10; i++) {
jarrow.moveToPosition(patharrow[i]);
jlabel.moveToPosition(pathlabel[i]);
repaintwaitmin();
}
} else
break;
}
if (low < high)
swap(low, high);
repaintwait();
}
if (high != pivot)
swap(high, pivot);
removeAny(iarrow);
removeAny(jarrow);
removeAny(ilabel);
removeAny(jlabel);
if (Graphical)
removeAny(pivotheight);
return high;
}
protected void Quicksort(int lowindex, int highindex) {
int midpoint;
HighlightRange(lowindex, highindex, false);
if (lowindex < highindex) {
midpoint = partition(lowindex, lowindex + 1, highindex);
Quicksort(lowindex, midpoint - 1);
Quicksort(midpoint + 1, highindex);
}
HighlightRange(lowindex, highindex, false);
}
public void Quicksort() {
Quicksort(0, SIZE - 1);
}
public void SwitchToGraphic() {
Graphical = true;
RecreateArray();
}
}
| |
/*
* Jicofo, the Jitsi Conference Focus.
*
* Copyright @ 2015 Atlassian Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jitsi.impl.protocol.xmpp.colibri;
import net.java.sip.communicator.impl.protocol.jabber.extensions.colibri.*;
import net.java.sip.communicator.impl.protocol.jabber.extensions.jingle.*;
import net.java.sip.communicator.service.protocol.*;
import net.java.sip.communicator.util.Logger;
import org.jitsi.jicofo.*;
import org.jitsi.protocol.xmpp.*;
import org.jitsi.protocol.xmpp.colibri.*;
import org.jitsi.protocol.xmpp.util.*;
import org.jitsi.service.neomedia.*;
import org.jitsi.util.*;
import org.jitsi.xmpp.util.*;
import org.jivesoftware.smack.packet.*;
import java.util.*;
/**
* Default implementation of {@link ColibriConference} that uses Smack for
* handling XMPP connection. Handles conference state, allocates and expires
* channels per single conference ID. Conference ID is stored after first
* allocate channels request.
*
* @author Pawel Domas
*/
public class ColibriConferenceImpl
implements ColibriConference
{
private final static Logger logger
= Logger.getLogger(ColibriConferenceImpl.class);
/**
* The instance of XMPP connection.
*/
private final XmppConnection connection;
/**
* XMPP address of videobridge component.
*/
private String jitsiVideobridge;
/**
* The {@link ColibriConferenceIQ} that stores the state of whole conference
*/
private ColibriConferenceIQ conferenceState = new ColibriConferenceIQ();
/**
* Synchronization root to sync access to {@link #colibriBuilder} and
* {@link #conferenceState}.
*/
private final Object syncRoot = new Object();
/**
* Custom type of semaphore that allows only 1 thread to send initial
* Colibri IQ that creates the conference.
* It means that if {@link #conferenceState} has no ID then only 1 thread
* will be allowed to send allocate request to the bridge. Other threads
* will be suspended until we have the response. Error response to create
* request will cause <tt>OperationFailedException</tt> on waiting threads.
*
* By "create request" we mean a channel allocation Colibri IQ that has no
* conference id specified.
*/
private final ConferenceCreationSemaphore createConfSemaphore
= new ConferenceCreationSemaphore();
/**
* Utility used for building Colibri queries.
*/
private final ColibriBuilder colibriBuilder
= new ColibriBuilder(conferenceState);
/**
* Flag used to figure out if Colibri conference has been allocated during
* last {@link #createColibriChannels(boolean, String, boolean, List)} call.
*/
private boolean justAllocated = false;
/**
* Flag indicates that this instance has been disposed and should not be
* used anymore.
*/
private boolean disposed;
/**
* Creates new instance of <tt>ColibriConferenceImpl</tt>.
* @param connection XMPP connection object that wil be used by new
* instance.
*/
public ColibriConferenceImpl(XmppConnection connection)
{
this.connection = connection;
}
/**
* Checks if this instance has been disposed already and if so prints
* a warning message.
*
* @param operationName the name of the operation that will not happen and
* should be mentioned in the warning message.
*
* @return <tt>true</tt> if this instance has been disposed already or
* <tt>false</tt> otherwise.
*/
private boolean checkIfDisposed(String operationName)
{
if (disposed)
{
logger.warn("Not doing " + operationName + " - instance disposed");
return true;
}
return false;
}
/**
* {@inheritDoc}
*/
@Override
public void setJitsiVideobridge(String videobridgeJid)
{
if (!StringUtils.isNullOrEmpty(conferenceState.getID()))
{
throw new IllegalStateException(
"Cannot change the bridge on active conference");
}
this.jitsiVideobridge = videobridgeJid;
}
/**
* {@inheritDoc}
*/
@Override
public String getJitsiVideobridge()
{
return this.jitsiVideobridge;
}
/**
* {@inheritDoc}
*/
@Override
public String getConferenceId()
{
synchronized (syncRoot)
{
return conferenceState.getID();
}
}
/**
* {@inheritDoc}
*/
@Override
public void setConfig(JitsiMeetConfig config)
{
synchronized (syncRoot)
{
colibriBuilder.setChannelLastN(config.getChannelLastN());
colibriBuilder.setAdaptiveLastN(config.isAdaptiveLastNEnabled());
colibriBuilder.setAdaptiveSimulcast(
config.isAdaptiveSimulcastEnabled());
colibriBuilder.setSimulcastMode(config.getSimulcastMode());
}
}
/**
* {@inheritDoc}
*/
@Override
public ColibriConferenceIQ createColibriChannels(
boolean useBundle,
String endpointName,
boolean peerIsInitiator,
List<ContentPacketExtension> contents)
throws OperationFailedException
{
ColibriConferenceIQ allocateRequest;
try
{
synchronized (syncRoot)
{
// Only if not in 'disposed' state
if (checkIfDisposed("createColibriChannels"))
return null;
acquireCreateConferenceSemaphore(endpointName);
colibriBuilder.reset();
colibriBuilder.addAllocateChannelsReq(
useBundle, endpointName, peerIsInitiator, contents);
allocateRequest = colibriBuilder.getRequest(jitsiVideobridge);
}
if (logger.isDebugEnabled())
logger.debug(Thread.currentThread() + " sending alloc request");
logRequest("Channel allocate request", allocateRequest);
// FIXME retry allocation on timeout ?
Packet response = sendAllocRequest(endpointName, allocateRequest);
logResponse("Channel allocate response", response);
if (logger.isDebugEnabled())
logger.debug(
Thread.currentThread() +
" - have alloc response? " + (response != null));
if (response == null)
{
throw new OperationFailedException(
"Failed to allocate colibri channels: response is null."
+ " Maybe the response timed out.",
OperationFailedException.NETWORK_FAILURE);
}
else if (response.getError() != null)
{
throw new OperationFailedException(
"Failed to allocate colibri channels: " + response.toXML(),
OperationFailedException.GENERAL_ERROR);
}
else if (!(response instanceof ColibriConferenceIQ))
{
throw new OperationFailedException(
"Failed to allocate colibri channels: response is not a"
+ " colibri conference",
OperationFailedException.GENERAL_ERROR);
}
boolean conferenceExisted = getConferenceId() != null;
/*
* Update the complete ColibriConferenceIQ representation maintained by
* this instance with the information given by the (current) response.
*/
// FIXME: allocations!!! should be static method
synchronized (syncRoot)
{
ColibriAnalyser analyser = new ColibriAnalyser(conferenceState);
analyser.processChannelAllocResp((ColibriConferenceIQ) response);
if (!conferenceExisted && getConferenceId() != null)
{
justAllocated = true;
}
}
/*
* Formulate the result to be returned to the caller which is a subset
* of the whole conference information kept by this CallJabberImpl and
* includes the remote channels explicitly requested by the method
* caller and their respective local channels.
*/
return ColibriAnalyser.getResponseContents(
(ColibriConferenceIQ) response, contents);
}
finally
{
releaseCreateConferenceSemaphore(endpointName);
}
}
/**
* Obtains create conference semaphore. If the conference does not exist yet
* (ID == null) then only first thread will be allowed to obtain it and all
* other threads will have to wait for it to process response packet.
*
* Methods exposed for unit test purpose.
*
* @param endpointName the name of Colibri endpoint(conference participant)
*
* @return <tt>true</tt> if current thread is conference creator.
*
* @throws OperationFailedException if conference creator thread has failed
* to allocate new conference and current thread has been waiting
* to acquire the semaphore.
*/
protected boolean acquireCreateConferenceSemaphore(String endpointName)
throws OperationFailedException
{
return createConfSemaphore.acquire();
}
/**
* Releases "create conference semaphore". Must be called to release the
* semaphore possibly in "finally" block.
*
* @param endpointName the name of colibri conference endpoint(participant)
*/
protected void releaseCreateConferenceSemaphore(String endpointName)
{
createConfSemaphore.release();
}
/**
* Sends Colibri packet and waits for response in
* {@link #createColibriChannels(boolean, String, boolean, List)} call.
*
* Exposed for unit tests purpose.
*
* @param endpointName Colibri endpoint name(participant)
* @param request Colibri IQ to be send towards the bridge.
*
* @return <tt>Packet</tt> which is JVB response or <tt>null</tt> if
* the request timed out.
*/
protected Packet sendAllocRequest(String endpointName,
ColibriConferenceIQ request)
{
return connection.sendPacketAndGetReply(request);
}
/**
* {@inheritDoc}
*/
public boolean hasJustAllocated()
{
synchronized (syncRoot)
{
if (this.justAllocated)
{
this.justAllocated = false;
return true;
}
return false;
}
}
private void logResponse(String message, Packet response)
{
if (!logger.isDebugEnabled())
return;
String responseXml = IQUtils.responseToXML(response);
responseXml = responseXml.replace(">",">\n");
logger.debug(message + "\n" + responseXml);
}
private void logRequest(String message, IQ iq)
{
if (logger.isDebugEnabled())
logger.debug(message + "\n" + iq.toXML().replace(">",">\n"));
}
/**
* {@inheritDoc}
*/
@Override
public void expireChannels(ColibriConferenceIQ channelInfo)
{
ColibriConferenceIQ iq;
synchronized (syncRoot)
{
// Only if not in 'disposed' state
if (checkIfDisposed("expireChannels"))
return;
colibriBuilder.reset();
colibriBuilder.addExpireChannelsReq(channelInfo);
iq = colibriBuilder.getRequest(jitsiVideobridge);
}
if (iq != null)
{
logRequest("Expire peer channels", iq);
connection.sendPacket(iq);
}
}
/**
* {@inheritDoc}
*/
@Override
public void updateRtpDescription(
Map<String, RtpDescriptionPacketExtension> map,
ColibriConferenceIQ localChannelsInfo)
{
ColibriConferenceIQ iq;
synchronized (syncRoot)
{
// Only if not in 'disposed' state
if (checkIfDisposed("updateRtpDescription"))
return;
colibriBuilder.reset();
colibriBuilder.addRtpDescription(map, localChannelsInfo);
iq = colibriBuilder.getRequest(jitsiVideobridge);
}
if (iq != null)
{
logRequest("Sending RTP desc update: ", iq);
connection.sendPacket(iq);
}
}
/**
* {@inheritDoc}
*/
@Override
public void updateTransportInfo(
Map<String, IceUdpTransportPacketExtension> map,
ColibriConferenceIQ localChannelsInfo)
{
ColibriConferenceIQ iq;
synchronized (syncRoot)
{
if (checkIfDisposed("updateTransportInfo"))
return;
colibriBuilder.reset();
colibriBuilder.addTransportUpdateReq(map, localChannelsInfo);
iq = colibriBuilder.getRequest(jitsiVideobridge);
}
if (iq != null)
{
logRequest("Sending transport info update: ", iq);
connection.sendPacket(iq);
}
}
/**
* {@inheritDoc}
*/
@Override
public void updateSourcesInfo(MediaSSRCMap ssrcs,
MediaSSRCGroupMap ssrcGroups,
ColibriConferenceIQ localChannelsInfo)
{
ColibriConferenceIQ iq;
synchronized (syncRoot)
{
if (checkIfDisposed("updateSourcesInfo"))
return;
if (StringUtils.isNullOrEmpty(conferenceState.getID()))
{
logger.error(
"Have not updated SSRC info on the bridge - "
+ "no conference in progress");
return;
}
colibriBuilder.reset();
boolean send = false;
// ssrcs
if (ssrcs != null
&& colibriBuilder.addSSSRCInfo(
ssrcs.toMap(), localChannelsInfo))
{
send = true;
}
// ssrcGroups
if (ssrcGroups != null
&& colibriBuilder.addSSSRCGroupsInfo(
ssrcGroups.toMap(), localChannelsInfo))
{
send = true;
}
iq = send ? colibriBuilder.getRequest(jitsiVideobridge) : null;
}
if (iq != null)
{
logRequest("Sending SSRC update: ", iq);
connection.sendPacket(iq);
}
}
/**
* {@inheritDoc}
*/
@Override
public void updateBundleTransportInfo(
IceUdpTransportPacketExtension transport,
ColibriConferenceIQ localChannelsInfo)
{
ColibriConferenceIQ iq;
synchronized (syncRoot)
{
if (checkIfDisposed("updateBundleTransportInfo"))
return;
colibriBuilder.reset();
colibriBuilder.addBundleTransportUpdateReq(
transport, localChannelsInfo);
iq = colibriBuilder.getRequest(jitsiVideobridge);
}
if (iq != null)
{
logRequest("Sending bundle transport info update: ", iq);
connection.sendPacket(iq);
}
}
/**
* {@inheritDoc}
*/
@Override
public void expireConference()
{
ColibriConferenceIQ iq;
synchronized (syncRoot)
{
if (checkIfDisposed("expireConference"))
return;
colibriBuilder.reset();
if (StringUtils.isNullOrEmpty(conferenceState.getID()))
{
logger.info("Nothing to expire - no conference allocated yet");
return;
}
// Expire all channels
if (colibriBuilder.addExpireChannelsReq(conferenceState))
{
iq = colibriBuilder.getRequest(jitsiVideobridge);
if (iq != null)
{
logRequest("Expire conference: ", iq);
connection.sendPacket(iq);
}
}
// Reset conference state
conferenceState = new ColibriConferenceIQ();
// Mark instance as 'disposed'
dispose();
}
}
/**
* {@inheritDoc}
*/
@Override
public void dispose()
{
this.disposed = true;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isDisposed()
{
return disposed;
}
/**
* {@inheritDoc}
*/
@Override
public boolean muteParticipant(ColibriConferenceIQ channelsInfo,
boolean mute)
{
if (checkIfDisposed("muteParticipant"))
return false;
ColibriConferenceIQ request = new ColibriConferenceIQ();
request.setID(conferenceState.getID());
request.setName(conferenceState.getName());
ColibriConferenceIQ.Content audioContent
= channelsInfo.getContent("audio");
if (audioContent == null || StringUtils.isNullOrEmpty(request.getID()))
{
logger.error("Failed to mute - no audio content." +
" Conf ID: " + request.getID());
return false;
}
ColibriConferenceIQ.Content contentRequest
= new ColibriConferenceIQ.Content(audioContent.getName());
for (ColibriConferenceIQ.Channel channel : audioContent.getChannels())
{
ColibriConferenceIQ.Channel channelRequest
= new ColibriConferenceIQ.Channel();
channelRequest.setID(channel.getID());
if (mute)
{
channelRequest.setDirection(MediaDirection.SENDONLY);
}
else
{
channelRequest.setDirection(MediaDirection.SENDRECV);
}
contentRequest.addChannel(channelRequest);
}
if (contentRequest.getChannelCount() == 0)
{
logger.error("Failed to mute - no channels to modify." +
" ConfID:" + request.getID());
return false;
}
request.setType(IQ.Type.SET);
request.setTo(jitsiVideobridge);
request.addContent(contentRequest);
connection.sendPacket(request);
// FIXME wait for response and set local status
return true;
}
/**
* Sets world readable name that identifies the conference.
* @param name the new name.
*/
public void setName(String name)
{
conferenceState.setName(name);
}
/**
* Gets world readable name that identifies the conference.
* @return the name.
*/
public String getName()
{
return conferenceState.getName();
}
/**
* {@inheritDoc}
*/
@Override
public void updateChannelsInfo(
ColibriConferenceIQ localChannelsInfo,
Map<String, RtpDescriptionPacketExtension> rtpInfoMap,
MediaSSRCMap ssrcs,
MediaSSRCGroupMap ssrcGroups,
IceUdpTransportPacketExtension bundleTransport,
Map<String, IceUdpTransportPacketExtension> transportMap)
{
ColibriConferenceIQ iq;
synchronized (syncRoot)
{
if (checkIfDisposed("updateChannelsInfo"))
return;
colibriBuilder.reset();
boolean send = false;
// RTP description
if (rtpInfoMap != null
&& colibriBuilder.addRtpDescription(
rtpInfoMap, localChannelsInfo))
{
send = true;
}
// SSRCs
if (ssrcs != null
&& colibriBuilder.addSSSRCInfo(
ssrcs.toMap(), localChannelsInfo))
{
send = true;
}
// SSRC groups
if (ssrcGroups != null
&& colibriBuilder.addSSSRCGroupsInfo(
ssrcGroups.toMap(), localChannelsInfo))
{
send = true;
}
// Bundle transport...
if (bundleTransport != null
&& colibriBuilder.addBundleTransportUpdateReq(
bundleTransport, localChannelsInfo))
{
send = true;
}
// ...or non-bundle transport
else if (transportMap != null
&& colibriBuilder.addTransportUpdateReq(
transportMap, localChannelsInfo))
{
send = true;
}
iq = send ? colibriBuilder.getRequest(jitsiVideobridge) : null;
}
if (iq != null)
{
logRequest("Sending channel info update: ", iq);
connection.sendPacket(iq);
}
}
/**
* Custom type of semaphore that allows only 1 thread to send initial
* Colibri IQ that creates the conference.
* It means that if {@link #conferenceState} has no ID then only 1 thread
* will be allowed to send allocate request to the bridge. Other threads
* will be suspended until we have the response(from which we get our
* conference ID). Error response to create request will cause
* <tt>OperationFailedException</tt> on the threads waiting on this
* semaphore.
*/
class ConferenceCreationSemaphore
{
/**
* Stores reference to conference creator thread instance.
*/
private Thread creatorThread;
/**
* Acquires conference creation semaphore. If we don't have conference
* ID yet then only first thread to obtain will be allowed to go through
* and all other threads will be suspended until it finishes it's job.
* Once we have a conference allocated all threads are allowed to go
* through immediately.
*
* @return <tt>true</tt> if current thread has just become a conference
* creator. That is the thread that sends first channel allocate
* request that results in new conference created.
*
* @throws OperationFailedException if we are not conference creator
* thread and conference creator has failed to create the
* conference while we've been waiting on this semaphore.
*/
public boolean acquire()
throws OperationFailedException
{
synchronized (syncRoot)
{
String jvbInUse = jitsiVideobridge;
if (conferenceState.getID() == null && creatorThread == null)
{
creatorThread = Thread.currentThread();
if (logger.isDebugEnabled())
logger.debug("I'm the conference creator - " +
Thread.currentThread().getName());
return true;
}
else
{
if (logger.isDebugEnabled())
logger.debug(
"Will have to wait until the conference " +
"is created - " + Thread.currentThread().getName());
while (creatorThread != null)
{
try
{
syncRoot.wait();
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
}
if (conferenceState.getID() == null)
{
throw new OperationFailedException(
"Creator thread has failed to " +
"allocate channels on: " + jvbInUse,
OperationFailedException.GENERAL_ERROR);
}
if (logger.isDebugEnabled())
logger.debug(
"Conference created ! Continuing with " +
"channel allocation -" +
Thread.currentThread().getName());
}
}
return false;
}
/**
* Releases this semaphore instance. If we're a conference creator then
* all waiting thread will be woken up.
*/
public void release()
{
synchronized (syncRoot)
{
if (creatorThread == Thread.currentThread())
{
if (logger.isDebugEnabled())
logger.debug(
"Conference creator is releasing " +
"the lock - " + Thread.currentThread().getName());
creatorThread = null;
syncRoot.notifyAll();
}
}
}
}
}
| |
/*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.gwt.client.rooms;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.unitime.timetable.gwt.client.ToolBox;
import org.unitime.timetable.gwt.client.rooms.RoomsTable.DepartmentCell;
import org.unitime.timetable.gwt.client.rooms.RoomsTable.SortOperation;
import org.unitime.timetable.gwt.client.widgets.P;
import org.unitime.timetable.gwt.client.widgets.UniTimeTable;
import org.unitime.timetable.gwt.client.widgets.UniTimeTableHeader;
import org.unitime.timetable.gwt.client.widgets.UniTimeTableHeader.Operation;
import org.unitime.timetable.gwt.resources.GwtMessages;
import org.unitime.timetable.gwt.resources.GwtResources;
import org.unitime.timetable.gwt.shared.EventInterface.FilterRpcResponse;
import org.unitime.timetable.gwt.shared.RoomInterface.GroupInterface;
import org.unitime.timetable.gwt.shared.RoomInterface.RoomGroupsColumn;
import com.google.gwt.core.client.GWT;
import com.google.gwt.event.dom.client.MouseOutEvent;
import com.google.gwt.event.dom.client.MouseOutHandler;
import com.google.gwt.event.dom.client.MouseOverEvent;
import com.google.gwt.event.dom.client.MouseOverHandler;
import com.google.gwt.user.client.ui.HTML;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.Widget;
import com.google.gwt.user.client.ui.HasHorizontalAlignment.HorizontalAlignmentConstant;
/**
* @author Tomas Muller
*/
public class RoomGroupsTable extends UniTimeTable<GroupInterface> {
protected static final GwtMessages MESSAGES = GWT.create(GwtMessages.class);
protected static final GwtResources RESOURCES = GWT.create(GwtResources.class);
private boolean iGlobal;
private RoomGroupsColumn iSortBy = null;
private boolean iAsc = true;
public RoomGroupsTable(boolean isGlobal) {
setStyleName("unitime-RoomGroups");
iGlobal = isGlobal;
List<UniTimeTableHeader> header = new ArrayList<UniTimeTableHeader>();
for (RoomGroupsColumn column: RoomGroupsColumn.values()) {
int nrCells = getNbrCells(column);
for (int idx = 0; idx < nrCells; idx++) {
UniTimeTableHeader h = new UniTimeTableHeader(getColumnName(column, idx), getColumnAlignment(column, idx));
header.add(h);
}
}
for (final RoomGroupsColumn column: RoomGroupsColumn.values()) {
if (RoomGroupsComparator.isApplicable(column) && getNbrCells(column) > 0) {
final UniTimeTableHeader h = header.get(getCellIndex(column));
Operation op = new SortOperation() {
@Override
public void execute() {
doSort(column);
}
@Override
public boolean isApplicable() { return getRowCount() > 1 && h.isVisible(); }
@Override
public boolean hasSeparator() { return false; }
@Override
public String getName() { return MESSAGES.opSortBy(getColumnName()); }
@Override
public String getColumnName() { return h.getHTML().replace("<br>", " "); }
};
h.addOperation(op);
}
}
addRow(null, header);
for (int i = 0; i < getCellCount(0); i++)
getCellFormatter().setStyleName(0, i, "unitime-ClickableTableHeader");
setSortBy(RoomCookie.getInstance().getRoomGroupsSortBy());
}
protected void doSort(RoomGroupsColumn column) {
if (column == iSortBy) {
iAsc = !iAsc;
} else {
iSortBy = column;
iAsc = true;
}
RoomCookie.getInstance().setSortRoomGroupsBy(getSortBy());
sort();
}
public boolean hasSortBy() { return iSortBy != null; }
public int getSortBy() { return iSortBy == null ? 0 : iAsc ? 1 + iSortBy.ordinal() : -1 - iSortBy.ordinal(); }
public void setSortBy(int sortBy) {
if (sortBy == 0) {
iSortBy = null;
iAsc = true;
} else if (sortBy > 0) {
iSortBy = RoomGroupsColumn.values()[sortBy - 1];
iAsc = true;
} else {
iSortBy = RoomGroupsColumn.values()[-1 - sortBy];
iAsc = false;
}
sort();
}
public void sort() {
if (iSortBy == null) return;
if (getNbrCells(iSortBy) == 0) iSortBy = RoomGroupsColumn.NAME;
UniTimeTableHeader header = getHeader(getCellIndex(iSortBy));
sort(header, new RoomGroupsComparator(iSortBy, true), iAsc);
}
protected int getNbrCells(RoomGroupsColumn column) {
switch (column) {
case DEFAULT:
return iGlobal ? 1 : 0;
case DEPARTMENT:
return iGlobal ? 0 : 1;
default:
return 1;
}
}
public String getColumnName(RoomGroupsColumn column, int idx) {
switch (column) {
case NAME: return MESSAGES.colName();
case ABBREVIATION: return MESSAGES.colAbbreviation();
case DEFAULT: return MESSAGES.colDefault();
case DEPARTMENT: return MESSAGES.colDepartment();
case ROOMS: return MESSAGES.colRooms();
case DESCRIPTION: return MESSAGES.colDescription();
default: return column.name();
}
}
protected HorizontalAlignmentConstant getColumnAlignment(RoomGroupsColumn column, int idx) {
switch (column) {
default:
return HasHorizontalAlignment.ALIGN_LEFT;
}
}
protected int getCellIndex(RoomGroupsColumn column) {
int ret = 0;
for (RoomGroupsColumn c: RoomGroupsColumn.values())
if (c.ordinal() < column.ordinal()) ret += getNbrCells(c);
return ret;
}
protected Widget getCell(final GroupInterface group, final RoomGroupsColumn column, final int idx) {
switch (column) {
case NAME:
return new Label(group.getLabel() == null ? "" : group.getLabel(), false);
case ABBREVIATION:
return new Label(group.getAbbreviation() == null ? "" : group.getAbbreviation(), false);
case DEFAULT:
if (group.isDefault())
return new Image(RESOURCES.on());
else
return null;
case DEPARTMENT:
return new DepartmentCell(true, group.getDepartment());
case DESCRIPTION:
if (group.hasDescription()) {
HTML html = new HTML(group.getDescription());
html.setStyleName("description");
return html;
} else
return null;
case ROOMS:
if (group.hasRooms())
return new RoomsCell(group);
else
return null;
default:
return null;
}
}
public int addGroup(final GroupInterface group) {
List<Widget> widgets = new ArrayList<Widget>();
for (RoomGroupsColumn column: RoomGroupsColumn.values()) {
int nbrCells = getNbrCells(column);
for (int idx = 0; idx < nbrCells; idx ++) {
Widget cell = getCell(group, column, idx);
if (cell == null)
cell = new P();
widgets.add(cell);
}
}
int row = addRow(group, widgets);
getRowFormatter().setStyleName(row, "row");
for (int col = 0; col < getCellCount(row); col++)
getCellFormatter().setStyleName(row, col, "cell");
return row;
}
public static class RoomsCell extends P {
public RoomsCell(GroupInterface group) {
super("rooms");
if (group.hasRooms()) {
for (Iterator<FilterRpcResponse.Entity> i = group.getRooms().iterator(); i.hasNext(); ) {
FilterRpcResponse.Entity room = i.next();
add(new RoomCell(room, i.hasNext()));
}
}
}
}
public static class RoomCell extends P {
public RoomCell(final FilterRpcResponse.Entity room, boolean hasNext) {
super("room");
setText((room.getAbbreviation() != null && !room.getAbbreviation().isEmpty() ? MESSAGES.label(room.getName(), room.getAbbreviation()) : room.getName()) + (hasNext ? "," : ""));
addMouseOverHandler(new MouseOverHandler() {
@Override
public void onMouseOver(MouseOverEvent event) {
RoomHint.showHint(RoomCell.this.getElement(), room.getUniqueId(), room.getProperty("prefix", null), room.getProperty("distance", null), true);
}
});
addMouseOutHandler(new MouseOutHandler() {
@Override
public void onMouseOut(MouseOutEvent event) {
RoomHint.hideHint();
}
});
}
}
public void refreshTable() {
for (int r = 1; r < getRowCount(); r++) {
for (int c = 0; c < getCellCount(r); c++) {
Widget w = getWidget(r, c);
if (w instanceof HasRefresh)
((HasRefresh)w).refresh();
}
}
}
public GroupInterface getGroup(Long groupId) {
if (groupId == null) return null;
for (int i = 1; i < getRowCount(); i++) {
if (groupId.equals(getData(i).getId())) return getData(i);
}
return null;
}
public void scrollTo(Long groupId) {
if (groupId == null) return;
for (int i = 1; i < getRowCount(); i++) {
if (groupId.equals(getData(i).getId())) {
ToolBox.scrollToElement(getRowFormatter().getElement(i));
return;
}
}
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Songs;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Skill_Mimicry extends BardSkill
{
@Override
public String ID()
{
return "Skill_Mimicry";
}
private final static String localizedName = CMLib.lang().L("Mimicry");
@Override
public String name()
{
return localizedName;
}
@Override
public String displayText()
{
return "";
}
@Override
protected int canAffectCode()
{
return CAN_MOBS;
}
@Override
protected int canTargetCode()
{
return CAN_MOBS;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_OK_OTHERS;
}
private static final String[] triggerStrings =I(new String[] {"MIMICRY","MIMIC"});
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
@Override
public int classificationCode()
{
return Ability.ACODE_SKILL|Ability.DOMAIN_FOOLISHNESS;
}
protected CMMsg lastMsg=null;
protected boolean disabled=false;
@Override
public void executeMsg(final Environmental affecting, final CMMsg msg)
{
super.executeMsg(affecting,msg);
if((affecting instanceof MOB)&&(!CMLib.flags().isAliveAwakeMobileUnbound((MOB)affecting,true)))
return;
if(disabled)
return;
if(((!(affecting instanceof MOB))||(!msg.amISource((MOB)affecting)))
&&((text().length()==0)||(text().equalsIgnoreCase(msg.source().Name())))
&&((msg.sourceMinor()!=CMMsg.TYP_EMOTE)
||(msg.tool() instanceof Social)))
lastMsg=msg;
}
public void fixSNameTo(final CMMsg msg, final MOB sMOB, final Environmental ticking)
{
//String src=msg.sourceMessage();
String trg=msg.targetMessage();
String oth=msg.othersMessage();
//if(src!=null) src=CMStrings.replaceAll(src,"<S-NAME>",ticking.name());
//if(src!=null) src=CMStrings.replaceAll(src,"You ",ticking.name()+" ");
//if(src!=null) src=CMStrings.replaceAll(src,"Your ",ticking.name()+"`s ");
if(trg!=null)
trg=CMStrings.replaceAll(trg,"<S-NAME>",ticking.name());
if(oth!=null)
oth=CMStrings.replaceAll(oth,"<S-NAME>",ticking.name());
//if(src!=null) src=CMStrings.replaceAll(src,"<S-HIM-HERSELF>","itself");
if(trg!=null)
trg=CMStrings.replaceAll(trg,"<S-HIM-HERSELF>","itself");
if(oth!=null)
oth=CMStrings.replaceAll(oth,"<S-HIM-HERSELF>","itself");
//if(src!=null) src=CMStrings.replaceAll(src,"<S-HIS-HERSELF>","itself");
if(trg!=null)
trg=CMStrings.replaceAll(trg,"<S-HIS-HERSELF>","itself");
if(oth!=null)
oth=CMStrings.replaceAll(oth,"<S-HIS-HERSELF>","itself");
//if(src!=null) src=CMStrings.replaceAll(src,"<S-HIM-HER>","it");
if(trg!=null)
trg=CMStrings.replaceAll(trg,"<S-HIM-HER>","it");
if(oth!=null)
oth=CMStrings.replaceAll(oth,"<S-HIM-HER>","it");
//if(src!=null) src=CMStrings.replaceAll(src,"<S-HE-SHE>","it");
if(trg!=null)
trg=CMStrings.replaceAll(trg,"<S-HE-SHE>","it");
if(oth!=null)
oth=CMStrings.replaceAll(oth,"<S-HE-SHE>","it");
//if(src!=null) src=CMStrings.replaceAll(src,"<S-HIS-HER>","its");
if(trg!=null)
trg=CMStrings.replaceAll(trg,"<S-HIS-HER>","its");
if(oth!=null)
oth=CMStrings.replaceAll(oth,"<S-HIS-HER>","its");
msg.modify(sMOB,sMOB,msg.tool(),
msg.sourceCode(),oth,
msg.targetCode(),trg,
msg.othersCode(),oth);
}
@Override
public boolean tick(final Tickable ticking, final int tickID)
{
super.tick(ticking,tickID);
CMMsg msg=lastMsg;
if(msg==null)
return true;
lastMsg=null;
if(((affected instanceof MOB)&&(!CMLib.flags().isAliveAwakeMobileUnbound((MOB)affected,true))))
return true;
msg=(CMMsg)msg.copyOf();
final MOB sMOB=msg.source();
if(msg.sourceMinor()==CMMsg.TYP_EMOTE)
{
if(affected instanceof MOB)
msg.modify((MOB)affected,msg.target(),msg.tool(),
msg.sourceCode(),msg.sourceMessage(),
msg.targetCode(),msg.targetMessage(),
msg.othersCode(),msg.othersMessage());
else
{
final MOB newSMOB=CMClass.getFactoryMOB();
newSMOB.baseCharStats().setStat(CharStats.STAT_GENDER,'N');
newSMOB.setName(affected.name());
newSMOB.recoverCharStats();
msg.modify(newSMOB,msg.source(),msg.tool(),
msg.sourceCode(),msg.sourceMessage(),
msg.targetCode(),msg.targetMessage(),
msg.othersCode(),msg.othersMessage());
}
}
else
if(msg.tool() instanceof Social)
{
MOB target=null;
if(msg.target() instanceof MOB)
target=msg.source();
if(affected instanceof MOB)
msg.modify((MOB)affected,target,msg.tool(),
msg.sourceCode(),msg.sourceMessage(),
msg.targetCode(),msg.targetMessage(),
msg.othersCode(),msg.othersMessage());
else
{
final MOB newSMOB=CMClass.getFactoryMOB();
newSMOB.baseCharStats().setStat(CharStats.STAT_GENDER,'N');
newSMOB.setName(affected.name());
newSMOB.recoverCharStats();
msg.modify(newSMOB,target,msg.tool(),
msg.sourceCode(),msg.sourceMessage(),
msg.targetCode(),msg.targetMessage(),
msg.othersCode(),msg.othersMessage());
}
}
else
return true;
disabled=true;
if((sMOB.location()!=null)
&&(sMOB.location().okMessage(sMOB,msg)))
{
if(msg.source().location()==null)
{
msg.source().setLocation(sMOB.location());
sMOB.location().send(msg.source(),msg);
msg.source().setLocation(null);
}
else
sMOB.location().send(msg.source(),msg);
}
disabled=false;
return true;
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
final MOB target=this.getTarget(mob,commands,givenTarget);
if(target==null)
return false;
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,CMMsg.MSG_QUIETMOVEMENT|(auto?CMMsg.MASK_ALWAYS:0),auto?"":L("<S-NAME> begin(s) mimicing <T-NAMESELF>."));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
beneficialAffect(mob,mob,asLevel,0);
final Ability A=mob.fetchEffect(ID());
if((A!=null)&&(target!=mob))
A.setMiscText(target.Name());
}
}
else
return beneficialVisualFizzle(mob,target,L("<S-NAME> attempt(s) to mimic <T-NAMESELF>, but fail(s)."));
return success;
}
}
| |
package byronRestaurant;
import MarketEmployee.MarketManagerRole;
import Person.Role.Role;
import Person.Role.ShiftTime;
import Person.Role.Employee.WorkState;
import byronRestaurant.gui.CookGui;
import interfaces.generic_interfaces.GenericCook;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.*;
import java.util.concurrent.Semaphore;
import javax.swing.Timer;
import byronRestaurant.RevolvingStand;
import trace.AlertLog;
import trace.AlertTag;
/**
* Restaurant Cook Agent
* make sure to change when cook makes an order to market (threshold point)
*
*
*/
public class CookRole extends GenericCook {
private List<Order> orders = Collections.synchronizedList(new ArrayList<Order>());
public enum cookStatus {pending, cooking, done};
private List<MarketManagerRole> markets = Collections.synchronizedList(new ArrayList<MarketManagerRole>());
private String name;
private int foodThreshold = 3;
private int foodMaximum = 50;
private CookGui cookGui;
private RevolvingStand revolvingStand;
private Semaphore atPlatingArea = new Semaphore(0,true);
private Semaphore atKitchen = new Semaphore(0,true);
private Semaphore atDefault = new Semaphore(0,true);
protected class Order{
public WaiterRole waiter;
public int table;
public String choice;
public cookStatus status;
Order(WaiterRole waiterRole, int t, String c){
waiter = waiterRole;
table = t;
choice = c;
status = cookStatus.pending;
}
}
private class Food{
public String type;
public double cookTime;
public int amount;
Food(String t, double c, int a){
type = t;
cookTime = c;
amount = a;
}
}
private Map<String, Food> inventory = Collections.synchronizedMap(new HashMap<String, Food>() {/**
*
*/
private static final long serialVersionUID = 1L;
{
put("Steak", new Food("Steak", 6000,50));
put("Chicken", new Food("Chicken", 5000, 50));
put("Burger", new Food("Burger", 8000, 50));
}});
//Initialize Cook
public CookRole(String location) {
super(location);
revolvingStand = new RevolvingStand();
Timer checkRevolvingStand = new Timer(15000, new ActionListener(){
@Override
public void actionPerformed(ActionEvent e) {
stateChanged();
}
});
checkRevolvingStand.start();
}
// public void AddMarket(MarketAgent m){
// market = m;
// }
// messages
public void msgHereIsAnOrder(WaiterRole waiterRole, int table, String choice){
orders.add(new Order(waiterRole, table, choice));
print("Receiving new order from " + waiterRole.getName());
stateChanged();
}
public void msgFoodDone(Order o){
o.status = cookStatus.done;
stateChanged();
}
public void msgOrderFilled(int ingredientNum, int quantity){
String foodType = null;
if (ingredientNum==0) {
foodType="Steak";
}
if (ingredientNum==1){
foodType="Chicken";
}
if (ingredientNum==2){
foodType="Burger";
}
int temp = inventory.get(foodType).amount + quantity;
Food tempFood = new Food(foodType, inventory.get(foodType).cookTime,temp);
inventory.put(foodType,tempFood);
synchronized(orders){
for (Order o : orders){
o.waiter.msgRestockedItem(foodType);
}
}
stateChanged();
}
public void msgOrderNotFilled(int ingredientNum){
String foodType = null;
if (ingredientNum==0) {
foodType="Steak";
}
if (ingredientNum==1){
foodType="Chicken";
}
if (ingredientNum==2){
foodType="Burger";
}
AlertLog.getInstance().logMessage(AlertTag.BYRONS_RESTAURANT, myPerson.getName(),"Market has run out, keep " + foodType + " off the menu.");
}
public void msgTakingFood(){
cookGui.setOnPlate(false);
}
public void msgAtPlatingArea(){
atPlatingArea.release();
stateChanged();
}
public void msgAtKitchen(){
atKitchen.release();
stateChanged();
}
public void msgAtDefault(){
atDefault.release();
stateChanged();
}
// Scheduler
public boolean pickAndExecuteAction() {
synchronized(orders){
if(orders.isEmpty() && workState == WorkState.ReadyToLeave){
kill();
return true;
}
for (Order o : orders){
if (o.status == cookStatus.done){
placeOrder(o);
return true;
}
if (o.status == cookStatus.pending){
cookOrder(o);
return true;
}
}
if(!revolvingStand.isEmpty()){
//get the order from the stand
WaiterRole.Order order = revolvingStand.getLastOrder();
//structure the order data to fit in with my old cooking routine
Order newOrder = new Order(order.getWaiter(), order.getTable(), order.getChoice());
orders.add(newOrder);
//cook the order in the same way
cookOrder(newOrder);
return true;
}
}
return false;
}
//Actions
private void placeOrder(Order o){
cookGui.doGoToPlatingArea();
try {
atPlatingArea.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
cookGui.setOnPlate(true);
o.waiter.msgOrderIsReady(o.table, o.choice);
print("Order is ready");
orders.remove(o);
cookGui.doGoToDefault();
try {
atDefault.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
private void cookOrder(final Order o){
if (inventory.get(o.choice).amount == foodThreshold){
markets.get(0).msgMarketManagerFoodOrder(o.choice, (foodMaximum-inventory.get(o.choice).amount), this);
}else if (inventory.get(o.choice).amount == 0){
o.waiter.msgOutOfStock(o.choice, o.table);
orders.remove(o);
}
else{
Food food = inventory.get(o.choice);
cookGui.doGoToKitchen();
try {
atKitchen.acquire();
} catch (InterruptedException e) {
e.printStackTrace();
}
cookGui.setIsCooking(true);
o.status = cookStatus.cooking;
Timer timer2 = new Timer((int) food.cookTime, new ActionListener(){
public void actionPerformed(ActionEvent e){
msgFoodDone(o);
cookGui.setIsCooking(false);
}
});
timer2.start();
inventory.get(o.choice).amount --;
print ("Cooking order of " + o.choice + ". " + inventory.get(o.choice).amount + " remaining.");
}
}
//Utilities
public String getName(){
return name;
}
public void setGui(CookGui gui) {
cookGui = gui;
}
public CookGui getGui() {
return cookGui;
}
@Override
public ShiftTime getShift() {
// TODO Auto-generated method stub
return null;
}
@Override
public Double getSalary() {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean canGoGetFood() {
// TODO Auto-generated method stub
return false;
}
@Override
public String getNameOfRole() {
return Role.RESTAURANT_BYRON_COOK_ROLE;
}
public RevolvingStand getRevolvingStand(){
return revolvingStand;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.yarn.cli;
import org.apache.flink.client.cli.CliFrontendParser;
import org.apache.flink.client.cli.CustomCommandLine;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.util.Preconditions;
import org.apache.flink.yarn.YarnClusterClientV2;
import org.apache.flink.yarn.YarnClusterDescriptorV2;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.fs.Path;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.List;
import static org.apache.flink.client.cli.CliFrontendParser.ADDRESS_OPTION;
/**
* Class handling the command line interface to the YARN per job mode under flip-6.
*/
public class FlinkYarnCLI implements CustomCommandLine<YarnClusterClientV2> {
private static final Logger LOG = LoggerFactory.getLogger(FlinkYarnCLI.class);
/** The id for the CommandLine interface. */
private static final String ID = "yarn";
private static final String YARN_DYNAMIC_PROPERTIES_SEPARATOR = "@@"; // this has to be a regex for String.split()
//------------------------------------ Command Line argument options -------------------------
// the prefix transformation is used by the CliFrontend static constructor.
private final Option queue;
private final Option shipPath;
private final Option flinkJar;
private final Option jmMemory;
private final Option detached;
private final Option zookeeperNamespace;
private final Options allOptions;
/**
* Dynamic properties allow the user to specify additional configuration values with -D, such as
* <tt> -Dfs.overwrite-files=true -Dtaskmanager.network.memory.min=536346624</tt>.
*/
private final Option dynamicProperties;
//------------------------------------ Internal fields -------------------------
// use detach mode as default
private boolean detachedMode = true;
public FlinkYarnCLI(String shortPrefix, String longPrefix) {
queue = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue.");
shipPath = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)");
flinkJar = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file");
jmMemory = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]");
dynamicProperties = new Option(shortPrefix + "D", true, "Dynamic properties");
detached = new Option(shortPrefix + "a", longPrefix + "attached", false, "Start attached");
zookeeperNamespace = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode");
allOptions = new Options();
allOptions.addOption(flinkJar);
allOptions.addOption(jmMemory);
allOptions.addOption(queue);
allOptions.addOption(shipPath);
allOptions.addOption(dynamicProperties);
allOptions.addOption(detached);
allOptions.addOption(zookeeperNamespace);
}
public YarnClusterDescriptorV2 createDescriptor(String defaultApplicationName, CommandLine cmd) {
YarnClusterDescriptorV2 yarnClusterDescriptor = new YarnClusterDescriptorV2();
// Jar Path
Path localJarPath;
if (cmd.hasOption(flinkJar.getOpt())) {
String userPath = cmd.getOptionValue(flinkJar.getOpt());
if (!userPath.startsWith("file://")) {
userPath = "file://" + userPath;
}
localJarPath = new Path(userPath);
} else {
LOG.info("No path for the flink jar passed. Using the location of "
+ yarnClusterDescriptor.getClass() + " to locate the jar");
String encodedJarPath =
yarnClusterDescriptor.getClass().getProtectionDomain().getCodeSource().getLocation().getPath();
try {
// we have to decode the url encoded parts of the path
String decodedPath = URLDecoder.decode(encodedJarPath, Charset.defaultCharset().name());
localJarPath = new Path(new File(decodedPath).toURI());
} catch (UnsupportedEncodingException e) {
throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath +
" Please supply a path manually via the -" + flinkJar.getOpt() + " option.");
}
}
yarnClusterDescriptor.setLocalJarPath(localJarPath);
List<File> shipFiles = new ArrayList<>();
// path to directory to ship
if (cmd.hasOption(shipPath.getOpt())) {
String shipPath = cmd.getOptionValue(this.shipPath.getOpt());
File shipDir = new File(shipPath);
if (shipDir.isDirectory()) {
shipFiles.add(shipDir);
} else {
LOG.warn("Ship directory is not a directory. Ignoring it.");
}
}
yarnClusterDescriptor.addShipFiles(shipFiles);
// queue
if (cmd.hasOption(queue.getOpt())) {
yarnClusterDescriptor.setQueue(cmd.getOptionValue(queue.getOpt()));
}
// JobManager Memory
if (cmd.hasOption(jmMemory.getOpt())) {
int jmMemory = Integer.valueOf(cmd.getOptionValue(this.jmMemory.getOpt()));
yarnClusterDescriptor.setJobManagerMemory(jmMemory);
}
String[] dynamicProperties = null;
if (cmd.hasOption(this.dynamicProperties.getOpt())) {
dynamicProperties = cmd.getOptionValues(this.dynamicProperties.getOpt());
}
String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR);
yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded);
if (cmd.hasOption(detached.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) {
// TODO: not support non detach mode now.
//this.detachedMode = false;
}
yarnClusterDescriptor.setDetachedMode(this.detachedMode);
if (defaultApplicationName != null) {
yarnClusterDescriptor.setName(defaultApplicationName);
}
if (cmd.hasOption(zookeeperNamespace.getOpt())) {
String zookeeperNamespace = cmd.getOptionValue(this.zookeeperNamespace.getOpt());
yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespace);
}
return yarnClusterDescriptor;
}
private void printUsage() {
System.out.println("Usage:");
HelpFormatter formatter = new HelpFormatter();
formatter.setWidth(200);
formatter.setLeftPadding(5);
formatter.setSyntaxPrefix(" Optional");
Options options = new Options();
addGeneralOptions(options);
addRunOptions(options);
formatter.printHelp(" ", options);
}
@Override
public boolean isActive(CommandLine commandLine, Configuration configuration) {
String jobManagerOption = commandLine.getOptionValue(ADDRESS_OPTION.getOpt(), null);
boolean yarnJobManager = ID.equals(jobManagerOption);
return yarnJobManager;
}
@Override
public String getId() {
return ID;
}
@Override
public void addRunOptions(Options baseOptions) {
for (Object option : allOptions.getOptions()) {
baseOptions.addOption((Option) option);
}
}
@Override
public void addGeneralOptions(Options baseOptions) {
}
@Override
public YarnClusterClientV2 retrieveCluster(
CommandLine cmdLine,
Configuration config) throws UnsupportedOperationException {
throw new UnsupportedOperationException("Not support retrieveCluster since Flip-6.");
}
@Override
public YarnClusterClientV2 createCluster(
String applicationName,
CommandLine cmdLine,
Configuration config,
List<URL> userJarFiles) throws Exception {
Preconditions.checkNotNull(userJarFiles, "User jar files should not be null.");
YarnClusterDescriptorV2 yarnClusterDescriptor = createDescriptor(applicationName, cmdLine);
yarnClusterDescriptor.setFlinkConfiguration(config);
yarnClusterDescriptor.setProvidedUserJarFiles(userJarFiles);
return new YarnClusterClientV2(yarnClusterDescriptor, config);
}
private void logAndSysout(String message) {
LOG.info(message);
System.out.println(message);
}
}
| |
/*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static java.util.Arrays.asList;
import static org.truth0.Truth.ASSERT;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Function;
import com.google.common.base.Functions;
import com.google.common.base.Optional;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.testing.IteratorFeature;
import com.google.common.collect.testing.IteratorTester;
import com.google.common.testing.NullPointerTester;
import junit.framework.TestCase;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.SortedSet;
import javax.annotation.Nullable;
/**
* Unit test for {@link FluentIterable}.
*
* @author Marcin Mikosik
*/
@GwtCompatible(emulated = true)
public class FluentIterableTest extends TestCase {
@GwtIncompatible("NullPointerTester")
public void testNullPointerExceptions() {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicStaticMethods(FluentIterable.class);
}
public void testFrom() {
assertEquals(ImmutableList.of(1, 2, 3, 4),
Lists.newArrayList(FluentIterable.from(ImmutableList.of(1, 2, 3, 4))));
}
@SuppressWarnings("deprecation") // test of deprecated method
public void testFrom_alreadyFluentIterable() {
FluentIterable<Integer> iterable = FluentIterable.from(asList(1));
assertSame(iterable, FluentIterable.from(iterable));
}
public void testSize1Collection() {
assertEquals(1, FluentIterable.from(asList("a")).size());
}
public void testSize2NonCollection() {
Iterable<Integer> iterable = new Iterable<Integer>() {
@Override
public Iterator<Integer> iterator() {
return asList(0, 1).iterator();
}
};
assertEquals(2, FluentIterable.from(iterable).size());
}
public void testSize_collectionDoesntIterate() {
List<Integer> nums = asList(1, 2, 3, 4, 5);
List<Integer> collection = new ArrayList<Integer>(nums) {
@Override public Iterator<Integer> iterator() {
fail("Don't iterate me!");
return null;
}
};
assertEquals(5, FluentIterable.from(collection).size());
}
public void testContains_nullSetYes() {
Iterable<String> set = Sets.newHashSet("a", null, "b");
assertTrue(FluentIterable.from(set).contains(null));
}
public void testContains_nullSetNo() {
Iterable<String> set = ImmutableSortedSet.of("a", "b");
assertFalse(FluentIterable.from(set).contains(null));
}
public void testContains_nullIterableYes() {
Iterable<String> iterable = iterable("a", null, "b");
assertTrue(FluentIterable.from(iterable).contains(null));
}
public void testContains_nullIterableNo() {
Iterable<String> iterable = iterable("a", "b");
assertFalse(FluentIterable.from(iterable).contains(null));
}
public void testContains_nonNullSetYes() {
Iterable<String> set = Sets.newHashSet("a", null, "b");
assertTrue(FluentIterable.from(set).contains("b"));
}
public void testContains_nonNullSetNo() {
Iterable<String> set = Sets.newHashSet("a", "b");
assertFalse(FluentIterable.from(set).contains("c"));
}
public void testContains_nonNullIterableYes() {
Iterable<String> set = iterable("a", null, "b");
assertTrue(FluentIterable.from(set).contains("b"));
}
public void testContains_nonNullIterableNo() {
Iterable<String> iterable = iterable("a", "b");
assertFalse(FluentIterable.from(iterable).contains("c"));
}
public void testCycle() {
FluentIterable<String> cycle = FluentIterable.from(asList("a", "b")).cycle();
int howManyChecked = 0;
for (String string : cycle) {
String expected = (howManyChecked % 2 == 0) ? "a" : "b";
assertEquals(expected, string);
if (howManyChecked++ == 5) {
break;
}
}
// We left the last iterator pointing to "b". But a new iterator should
// always point to "a".
assertEquals("a", cycle.iterator().next());
}
public void testCycle_removingAllElementsStopsCycle() {
FluentIterable<Integer> cycle = fluent(1, 2).cycle();
Iterator<Integer> iterator = cycle.iterator();
iterator.next();
iterator.remove();
iterator.next();
iterator.remove();
assertFalse(iterator.hasNext());
assertFalse(cycle.iterator().hasNext());
}
/*
* Tests for partition(int size) method.
*/
/*
* Tests for partitionWithPadding(int size) method.
*/
public void testFilter() {
FluentIterable<String> filtered =
FluentIterable.from(asList("foo", "bar")).filter(Predicates.equalTo("foo"));
List<String> expected = Collections.singletonList("foo");
List<String> actual = Lists.newArrayList(filtered);
assertEquals(expected, actual);
assertCanIterateAgain(filtered);
assertEquals("[foo]", filtered.toString());
}
private static class TypeA {}
private interface TypeB {}
private static class HasBoth extends TypeA implements TypeB {}
@GwtIncompatible("Iterables.filter(Iterable, Class)")
public void testFilterByType() throws Exception {
HasBoth hasBoth = new HasBoth();
FluentIterable<TypeA> alist =
FluentIterable.from(asList(new TypeA(), new TypeA(), hasBoth, new TypeA()));
Iterable<TypeB> blist = alist.filter(TypeB.class);
ASSERT.that(blist).iteratesOverSequence(hasBoth);
}
public void testAnyMatch() {
ArrayList<String> list = Lists.newArrayList();
FluentIterable<String> iterable = FluentIterable.<String>from(list);
Predicate<String> predicate = Predicates.equalTo("pants");
assertFalse(iterable.anyMatch(predicate));
list.add("cool");
assertFalse(iterable.anyMatch(predicate));
list.add("pants");
assertTrue(iterable.anyMatch(predicate));
}
public void testAllMatch() {
List<String> list = Lists.newArrayList();
FluentIterable<String> iterable = FluentIterable.<String>from(list);
Predicate<String> predicate = Predicates.equalTo("cool");
assertTrue(iterable.allMatch(predicate));
list.add("cool");
assertTrue(iterable.allMatch(predicate));
list.add("pants");
assertFalse(iterable.allMatch(predicate));
}
public void testFirstMatch() {
FluentIterable<String> iterable = FluentIterable.from(Lists.newArrayList("cool", "pants"));
assertEquals(Optional.of("cool"), iterable.firstMatch(Predicates.equalTo("cool")));
assertEquals(Optional.of("pants"), iterable.firstMatch(Predicates.equalTo("pants")));
assertEquals(Optional.absent(), iterable.firstMatch(Predicates.alwaysFalse()));
assertEquals(Optional.of("cool"), iterable.firstMatch(Predicates.alwaysTrue()));
}
private static final class IntegerValueOfFunction implements Function<String, Integer> {
@Override
public Integer apply(String from) {
return Integer.valueOf(from);
}
}
public void testTransformWith() {
List<String> input = asList("1", "2", "3");
Iterable<Integer> iterable =
FluentIterable.from(input).transform(new IntegerValueOfFunction());
assertEquals(asList(1, 2, 3), Lists.newArrayList(iterable));
assertCanIterateAgain(iterable);
assertEquals("[1, 2, 3]", iterable.toString());
}
public void testTransformWith_poorlyBehavedTransform() {
List<String> input = asList("1", null, "3");
Iterable<Integer> iterable =
FluentIterable.from(input).transform(new IntegerValueOfFunction());
Iterator<Integer> resultIterator = iterable.iterator();
resultIterator.next();
try {
resultIterator.next();
fail("Transforming null to int should throw NumberFormatException");
} catch (NumberFormatException expected) {
}
}
private static final class StringValueOfFunction implements Function<Integer, String> {
@Override
public String apply(Integer from) {
return String.valueOf(from);
}
}
public void testTransformWith_nullFriendlyTransform() {
List<Integer> input = asList(1, 2, null, 3);
Iterable<String> result = FluentIterable.from(input).transform(new StringValueOfFunction());
assertEquals(asList("1", "2", "null", "3"), Lists.newArrayList(result));
}
private static final class RepeatedStringValueOfFunction
implements Function<Integer, List<String>> {
@Override
public List<String> apply(Integer from) {
String value = String.valueOf(from);
return ImmutableList.of(value, value);
}
}
public void testTransformAndConcat() {
List<Integer> input = asList(1, 2, 3);
Iterable<String> result =
FluentIterable.from(input).transformAndConcat(new RepeatedStringValueOfFunction());
assertEquals(asList("1", "1", "2", "2", "3", "3"), Lists.newArrayList(result));
}
private static final class RepeatedStringValueOfWildcardFunction
implements Function<Integer, List<? extends String>> {
@Override
public List<String> apply(Integer from) {
String value = String.valueOf(from);
return ImmutableList.of(value, value);
}
}
public void testTransformAndConcat_wildcardFunctionGenerics() {
List<Integer> input = asList(1, 2, 3);
FluentIterable.from(input).transformAndConcat(new RepeatedStringValueOfWildcardFunction());
}
public void testFirst_list() {
List<String> list = Lists.newArrayList("a", "b", "c");
assertEquals("a", FluentIterable.from(list).first().get());
}
public void testFirst_null() {
List<String> list = Lists.newArrayList(null, "a", "b");
try {
FluentIterable.from(list).first();
fail();
} catch (NullPointerException expected) {
}
}
public void testFirst_emptyList() {
List<String> list = Collections.emptyList();
assertEquals(Optional.absent(), FluentIterable.from(list).first());
}
public void testFirst_sortedSet() {
SortedSet<String> sortedSet = ImmutableSortedSet.of("b", "c", "a");
assertEquals("a", FluentIterable.from(sortedSet).first().get());
}
public void testFirst_emptySortedSet() {
SortedSet<String> sortedSet = ImmutableSortedSet.of();
assertEquals(Optional.absent(), FluentIterable.from(sortedSet).first());
}
public void testFirst_iterable() {
Set<String> set = ImmutableSet.of("a", "b", "c");
assertEquals("a", FluentIterable.from(set).first().get());
}
public void testFirst_emptyIterable() {
Set<String> set = Sets.newHashSet();
assertEquals(Optional.absent(), FluentIterable.from(set).first());
}
public void testLast_list() {
List<String> list = Lists.newArrayList("a", "b", "c");
assertEquals("c", FluentIterable.from(list).last().get());
}
public void testLast_null() {
List<String> list = Lists.newArrayList("a", "b", null);
try {
FluentIterable.from(list).last();
fail();
} catch (NullPointerException expected) {
}
}
public void testLast_emptyList() {
List<String> list = Collections.emptyList();
assertEquals(Optional.absent(), FluentIterable.from(list).last());
}
public void testLast_sortedSet() {
SortedSet<String> sortedSet = ImmutableSortedSet.of("b", "c", "a");
assertEquals("c", FluentIterable.from(sortedSet).last().get());
}
public void testLast_emptySortedSet() {
SortedSet<String> sortedSet = ImmutableSortedSet.of();
assertEquals(Optional.absent(), FluentIterable.from(sortedSet).last());
}
public void testLast_iterable() {
Set<String> set = ImmutableSet.of("a", "b", "c");
assertEquals("c", FluentIterable.from(set).last().get());
}
public void testLast_emptyIterable() {
Set<String> set = Sets.newHashSet();
assertEquals(Optional.absent(), FluentIterable.from(set).last());
}
public void testSkip_simple() {
Collection<String> set = ImmutableSet.of("a", "b", "c", "d", "e");
assertEquals(Lists.newArrayList("c", "d", "e"),
Lists.newArrayList(FluentIterable.from(set).skip(2)));
assertEquals("[c, d, e]", FluentIterable.from(set).skip(2).toString());
}
public void testSkip_simpleList() {
Collection<String> list = Lists.newArrayList("a", "b", "c", "d", "e");
assertEquals(Lists.newArrayList("c", "d", "e"),
Lists.newArrayList(FluentIterable.from(list).skip(2)));
assertEquals("[c, d, e]", FluentIterable.from(list).skip(2).toString());
}
public void testSkip_pastEnd() {
Collection<String> set = ImmutableSet.of("a", "b");
assertEquals(Collections.emptyList(), Lists.newArrayList(FluentIterable.from(set).skip(20)));
}
public void testSkip_pastEndList() {
Collection<String> list = Lists.newArrayList("a", "b");
assertEquals(Collections.emptyList(), Lists.newArrayList(FluentIterable.from(list).skip(20)));
}
public void testSkip_skipNone() {
Collection<String> set = ImmutableSet.of("a", "b");
assertEquals(Lists.newArrayList("a", "b"),
Lists.newArrayList(FluentIterable.from(set).skip(0)));
}
public void testSkip_skipNoneList() {
Collection<String> list = Lists.newArrayList("a", "b");
assertEquals(Lists.newArrayList("a", "b"),
Lists.newArrayList(FluentIterable.from(list).skip(0)));
}
public void testSkip_iterator() throws Exception {
new IteratorTester<Integer>(5, IteratorFeature.MODIFIABLE, Lists.newArrayList(2, 3),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
Collection<Integer> collection = Sets.newLinkedHashSet();
Collections.addAll(collection, 1, 2, 3);
return FluentIterable.from(collection).skip(1).iterator();
}
}.test();
}
public void testSkip_iteratorList() throws Exception {
new IteratorTester<Integer>(5, IteratorFeature.MODIFIABLE, Lists.newArrayList(2, 3),
IteratorTester.KnownOrder.KNOWN_ORDER) {
@Override protected Iterator<Integer> newTargetIterator() {
return FluentIterable.from(Lists.newArrayList(1, 2, 3)).skip(1).iterator();
}
}.test();
}
public void testSkip_nonStructurallyModifiedList() throws Exception {
List<String> list = Lists.newArrayList("a", "b", "c");
FluentIterable<String> tail = FluentIterable.from(list).skip(1);
Iterator<String> tailIterator = tail.iterator();
list.set(2, "c2");
assertEquals("b", tailIterator.next());
assertEquals("c2", tailIterator.next());
assertFalse(tailIterator.hasNext());
}
public void testSkip_structurallyModifiedSkipSome() throws Exception {
Collection<String> set = Sets.newLinkedHashSet();
Collections.addAll(set, "a", "b", "c");
FluentIterable<String> tail = FluentIterable.from(set).skip(1);
set.remove("b");
set.addAll(Lists.newArrayList("X", "Y", "Z"));
ASSERT.that(tail).iteratesOverSequence("c", "X", "Y", "Z");
}
public void testSkip_structurallyModifiedSkipSomeList() throws Exception {
List<String> list = Lists.newArrayList("a", "b", "c");
FluentIterable<String> tail = FluentIterable.from(list).skip(1);
list.subList(1, 3).clear();
list.addAll(0, Lists.newArrayList("X", "Y", "Z"));
ASSERT.that(tail).iteratesOverSequence("Y", "Z", "a");
}
public void testSkip_structurallyModifiedSkipAll() throws Exception {
Collection<String> set = Sets.newLinkedHashSet();
Collections.addAll(set, "a", "b", "c");
FluentIterable<String> tail = FluentIterable.from(set).skip(2);
set.remove("a");
set.remove("b");
assertFalse(tail.iterator().hasNext());
}
public void testSkip_structurallyModifiedSkipAllList() throws Exception {
List<String> list = Lists.newArrayList("a", "b", "c");
FluentIterable<String> tail = FluentIterable.from(list).skip(2);
list.subList(0, 2).clear();
ASSERT.that(tail).isEmpty();
}
@SuppressWarnings("ReturnValueIgnored")
public void testSkip_illegalArgument() {
try {
FluentIterable.from(asList("a", "b", "c")).skip(-1);
fail("Skipping negative number of elements should throw IllegalArgumentException.");
} catch (IllegalArgumentException expected) {
}
}
public void testLimit() {
Iterable<String> iterable = Lists.newArrayList("foo", "bar", "baz");
FluentIterable<String> limited = FluentIterable.from(iterable).limit(2);
assertEquals(ImmutableList.of("foo", "bar"), Lists.newArrayList(limited));
assertCanIterateAgain(limited);
assertEquals("[foo, bar]", limited.toString());
}
@SuppressWarnings("ReturnValueIgnored")
public void testLimit_illegalArgument() {
try {
FluentIterable.from(Lists.newArrayList("a", "b", "c")).limit(-1);
fail("Passing negative number to limit(...) method should throw IllegalArgumentException");
} catch (IllegalArgumentException expected) {
}
}
public void testIsEmpty() {
assertTrue(FluentIterable.<String>from(Collections.<String>emptyList()).isEmpty());
assertFalse(FluentIterable.<String>from(Lists.newArrayList("foo")).isEmpty());
}
public void testToList() {
assertEquals(Lists.newArrayList(1, 2, 3, 4), fluent(1, 2, 3, 4).toList());
}
public void testToList_empty() {
assertTrue(fluent().toList().isEmpty());
}
public void testToSortedList_withComparator() {
assertEquals(Lists.newArrayList(4, 3, 2, 1),
fluent(4, 1, 3, 2).toSortedList(Ordering.<Integer>natural().reverse()));
}
public void testToSortedList_withDuplicates() {
assertEquals(Lists.newArrayList(4, 3, 1, 1),
fluent(1, 4, 1, 3).toSortedList(Ordering.<Integer>natural().reverse()));
}
public void testToSet() {
ASSERT.that(fluent(1, 2, 3, 4).toSet()).has().exactly(1, 2, 3, 4).inOrder();
}
public void testToSet_removeDuplicates() {
ASSERT.that(fluent(1, 2, 1, 2).toSet()).has().exactly(1, 2).inOrder();
}
public void testToSet_empty() {
assertTrue(fluent().toSet().isEmpty());
}
public void testToSortedSet() {
ASSERT.that(fluent(1, 4, 2, 3).toSortedSet(Ordering.<Integer>natural().reverse()))
.has().exactly(4, 3, 2, 1).inOrder();
}
public void testToSortedSet_removeDuplicates() {
ASSERT.that(fluent(1, 4, 1, 3).toSortedSet(Ordering.<Integer>natural().reverse()))
.has().exactly(4, 3, 1).inOrder();
}
public void testToMap() {
ASSERT.that(fluent(1, 2, 3).toMap(Functions.toStringFunction()).entrySet())
.has().exactly(
Maps.immutableEntry(1, "1"),
Maps.immutableEntry(2, "2"),
Maps.immutableEntry(3, "3")).inOrder();
}
public void testToMap_nullKey() {
try {
fluent(1, null, 2).toMap(Functions.constant("foo"));
fail();
} catch (NullPointerException expected) {
}
}
public void testToMap_nullValue() {
try {
fluent(1, 2, 3).toMap(Functions.constant(null));
fail();
} catch (NullPointerException expected) {
}
}
public void testIndex() {
ImmutableListMultimap<Integer, String> expected =
ImmutableListMultimap.<Integer, String>builder()
.putAll(3, "one", "two")
.put(5, "three")
.put(4, "four")
.build();
ImmutableListMultimap<Integer, String> index =
FluentIterable.from(asList("one", "two", "three", "four")).index(
new Function<String, Integer>() {
@Override
public Integer apply(String input) {
return input.length();
}
});
assertEquals(expected, index);
}
public void testIndex_nullKey() {
try {
fluent(1, 2, 3).index(Functions.constant(null));
fail();
} catch (NullPointerException expected) {
}
}
public void testIndex_nullValue() {
try {
fluent(1, null, 2).index(Functions.constant("foo"));
fail();
} catch (NullPointerException expected) {
}
}
public void testUniqueIndex() {
ImmutableMap<Integer, String> expected =
ImmutableMap.of(3, "two", 5, "three", 4, "four");
ImmutableMap<Integer, String> index =
FluentIterable.from(asList("two", "three", "four")).uniqueIndex(
new Function<String, Integer>() {
@Override
public Integer apply(String input) {
return input.length();
}
});
assertEquals(expected, index);
}
public void testUniqueIndex_duplicateKey() {
try {
FluentIterable.from(asList("one", "two", "three", "four")).uniqueIndex(
new Function<String, Integer>() {
@Override
public Integer apply(String input) {
return input.length();
}
});
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testUniqueIndex_nullKey() {
try {
fluent(1, 2, 3).uniqueIndex(Functions.constant(null));
fail();
} catch (NullPointerException expected) {
}
}
public void testUniqueIndex_nullValue() {
try {
fluent(1, null, 2).uniqueIndex(new Function<Integer, Object>() {
@Override
public Object apply(@Nullable Integer input) {
return String.valueOf(input);
}
});
fail();
} catch (NullPointerException expected) {
}
}
public void testCopyInto_List() {
ASSERT.that(fluent(1, 3, 5).copyInto(Lists.newArrayList(1, 2)))
.has().exactly(1, 2, 1, 3, 5).inOrder();
}
public void testCopyInto_Set() {
ASSERT.that(fluent(1, 3, 5).copyInto(Sets.newHashSet(1, 2)))
.has().exactly(1, 2, 3, 5);
}
public void testCopyInto_SetAllDuplicates() {
ASSERT.that(fluent(1, 3, 5).copyInto(Sets.newHashSet(1, 2, 3, 5)))
.has().exactly(1, 2, 3, 5);
}
public void testCopyInto_NonCollection() {
final ArrayList<Integer> list = Lists.newArrayList(1, 2, 3);
final ArrayList<Integer> iterList = Lists.newArrayList(9, 8, 7);
Iterable<Integer> iterable = new Iterable<Integer>() {
@Override
public Iterator<Integer> iterator() {
return iterList.iterator();
}
};
ASSERT.that(FluentIterable.from(iterable).copyInto(list))
.has().exactly(1, 2, 3, 9, 8, 7).inOrder();
}
public void testGet() {
assertEquals("a", FluentIterable
.from(Lists.newArrayList("a", "b", "c")).get(0));
assertEquals("b", FluentIterable
.from(Lists.newArrayList("a", "b", "c")).get(1));
assertEquals("c", FluentIterable
.from(Lists.newArrayList("a", "b", "c")).get(2));
}
public void testGet_outOfBounds() {
try {
FluentIterable.from(Lists.newArrayList("a", "b", "c")).get(-1);
fail();
} catch (IndexOutOfBoundsException expected) {
}
try {
FluentIterable.from(Lists.newArrayList("a", "b", "c")).get(3);
fail();
} catch (IndexOutOfBoundsException expected) {
}
}
private static void assertCanIterateAgain(Iterable<?> iterable) {
for (@SuppressWarnings("unused") Object obj : iterable) {
}
}
private static FluentIterable<Integer> fluent(Integer... elements) {
return FluentIterable.from(Lists.newArrayList(elements));
}
private static Iterable<String> iterable(String... elements) {
final List<String> list = asList(elements);
return new Iterable<String>() {
@Override
public Iterator<String> iterator() {
return list.iterator();
}
};
}
}
| |
package com.ambrosoft.exercises;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created on 12/13/17
* <p>
* Atom: single capital letter or capital followed by lower-case, optionally followed by digits
* Problem: write custom parser for groups; create a parse tree
* It is effectively expression evaluator
* <p>
* Approach: think of grammar, propose overall structure
* One could also create a tree, with internal nodes containing counts and elements in the leaves
* For every leaf travel up multiplying counts along the path
*/
public class ParseMolecule {
private static Pattern Tokenize = Pattern.compile("[A-Z][a-z]?|[\\d]+|[()\\[\\]{}]");
enum TokenType {
OPEN,
CLOSE,
ELEMENT,
COUNT
}
private static class Token {
final TokenType type;
final String content;
Token(TokenType tokenType, String content) {
this.type = tokenType;
this.content = content;
}
boolean equals(Token other) {
return other.type == type && other.content.equals(content);
}
@Override
public String toString() {
return String.format("%s %s", type, content);
}
int toInt() {
if (type == TokenType.COUNT) {
return Integer.parseInt(content);
} else {
throw new IllegalStateException("non-count token toInt");
}
}
Token matchingClose() {
if (type == TokenType.OPEN) {
switch (content.charAt(0)) {
case '(':
return new Token(TokenType.CLOSE, ")");
case '[':
return new Token(TokenType.CLOSE, "]");
case '{':
return new Token(TokenType.CLOSE, "}");
default:
throw new IllegalStateException("unknown open to close");
}
} else {
throw new IllegalStateException("non-open token closing");
}
}
}
private static class Counts {
private final HashMap<String, Integer> counts = new HashMap<>();
void addCount(String symbol, int count) {
final Integer extant = counts.get(symbol);
counts.put(symbol, extant != null ? extant + count : count);
}
void multiply(int factor) {
for (Map.Entry<String, Integer> element : counts.entrySet()) {
counts.put(element.getKey(), element.getValue() * factor);
}
}
void add(Counts other) {
for (Map.Entry<String, Integer> element : other.counts.entrySet()) {
final String key = element.getKey();
counts.put(key, counts.getOrDefault(key, 0) + element.getValue());
}
}
Map<String, Integer> getMap() {
return counts;
}
@Override
public String toString() {
return counts.toString();
}
}
private static Counts parseSeq(final Token[] tokens, int start) {
final Counts counts = new Counts();
while (start < tokens.length) {
start = processToken(tokens, start, counts);
}
return counts;
}
private static int parseGroup(Token[] tokens, int start, Counts counts, Token close) {
final Counts groupCounts = new Counts();
while (start < tokens.length && !tokens[start].equals(close)) {
start = processToken(tokens, start, groupCounts);
if (start < tokens.length && tokens[start].equals(close)) {
if (start + 1 < tokens.length && tokens[start + 1].type == TokenType.COUNT) {
groupCounts.multiply(tokens[start + 1].toInt());
counts.add(groupCounts);
return start + 2;
} else {
counts.add(groupCounts);
return start + 1;
}
}
}
throw new IllegalArgumentException("not found " + close);
}
private static int processToken(Token[] tokens, int start, Counts counts) {
switch (tokens[start].type) {
case ELEMENT:
if (start + 1 < tokens.length && tokens[start + 1].type == TokenType.COUNT) {
counts.addCount(tokens[start].content, tokens[start + 1].toInt());
return start + 2;
} else {
counts.addCount(tokens[start].content, 1);
return start + 1;
}
case OPEN:
return parseGroup(tokens, start + 1, counts, tokens[start].matchingClose());
default:
throw new IllegalArgumentException("unexpected token " + tokens[start]);
}
}
private static Token[] tokenize(final String input) {
final ArrayList<Token> list = new ArrayList<>();
final Matcher tokens = Tokenize.matcher(input);
while (tokens.find()) {
final String fragment = tokens.group();
switch (fragment.charAt(0)) {
case '(':
case '[':
case '{':
list.add(new Token(TokenType.OPEN, fragment));
break;
case ')':
case ']':
case '}':
list.add(new Token(TokenType.CLOSE, fragment));
break;
default:
if (Character.isDigit(fragment.charAt(0))) {
list.add(new Token(TokenType.COUNT, fragment));
} else {
list.add(new Token(TokenType.ELEMENT, fragment));
}
}
}
return list.toArray(new Token[list.size()]);
}
public static Map<String, Integer> getAtoms(String formula) {
final Token[] tokens = tokenize(formula);
if (tokens.length == 0) {
throw new IllegalArgumentException(formula);
} else {
return parseSeq(tokens, 0).getMap();
}
}
static void test(String input) {
Token[] tokens = tokenize(input);
System.out.println(Arrays.toString(tokens));
Counts counts = parseSeq(tokens, 0);
System.out.println("counts = " + counts);
}
public static void main(String[] args) {
// test("Mg(OH");
// test("pie");
test("K4[ON(FeO3)2]2");
test("O");
// test("Mg(OH]2");
test("H2O");
test("K2(CO2)3");
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import io.netty.util.Timeout;
import lombok.Cleanup;
import org.apache.pulsar.broker.service.Topic;
import org.apache.pulsar.broker.service.persistent.PersistentSubscription;
import org.apache.pulsar.client.admin.PulsarAdminException;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.MessageId;
import org.apache.pulsar.client.api.MessageRouter;
import org.apache.pulsar.client.api.MessageRoutingMode;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.ProducerConsumerBase;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.client.api.Schema;
import org.apache.pulsar.client.api.SubscriptionInitialPosition;
import org.apache.pulsar.client.api.SubscriptionType;
import org.apache.pulsar.client.api.TopicMetadata;
import org.apache.pulsar.common.policies.data.ClusterData;
import org.apache.pulsar.common.policies.data.PartitionedTopicStats;
import org.apache.pulsar.common.policies.data.SubscriptionStats;
import org.apache.pulsar.common.policies.data.TenantInfoImpl;
import org.apache.pulsar.common.policies.data.TopicStats;
import org.awaitility.Awaitility;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.IntStream;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
@SuppressWarnings({ "unchecked", "rawtypes" })
@Test(groups = "broker-impl")
public class TopicsConsumerImplTest extends ProducerConsumerBase {
private static final long testTimeout = 90000; // 1.5 min
private static final Logger log = LoggerFactory.getLogger(TopicsConsumerImplTest.class);
private final long ackTimeOutMillis = TimeUnit.SECONDS.toMillis(2);
@Override
@BeforeMethod
public void setup() throws Exception {
super.internalSetup();
super.producerBaseSetup();
}
@Override
@AfterMethod(alwaysRun = true)
public void cleanup() throws Exception {
super.internalCleanup();
}
// Verify subscribe topics from different namespace should return error.
@Test(timeOut = testTimeout)
public void testDifferentTopicsNameSubscribe() throws Exception {
String key = "TopicsFromDifferentNamespace";
final String subscriptionName = "my-ex-subscription-" + key;
final String topicName1 = "persistent://prop/use/ns-abc1/topic-1-" + key;
final String topicName2 = "persistent://prop/use/ns-abc2/topic-2-" + key;
final String topicName3 = "persistent://prop/use/ns-abc3/topic-3-" + key;
List<String> topicNames = Lists.newArrayList(topicName1, topicName2, topicName3);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName2, 2);
admin.topics().createPartitionedTopic(topicName3, 3);
// 2. Create consumer
try {
Consumer consumer = pulsarClient.newConsumer()
.topics(topicNames)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
} catch (IllegalArgumentException e) {
// expected for have different namespace
}
}
@Test(timeOut = testTimeout)
public void testGetConsumersAndGetTopics() throws Exception {
String key = "TopicsConsumerGet";
final String subscriptionName = "my-ex-subscription-" + key;
final String topicName1 = "persistent://prop/use/ns-abc/topic-1-" + key;
final String topicName2 = "persistent://prop/use/ns-abc/topic-2-" + key;
final String topicName3 = "persistent://prop/use/ns-abc/topic-3-" + key;
List<String> topicNames = Lists.newArrayList(topicName1, topicName2);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName2, 2);
admin.topics().createPartitionedTopic(topicName3, 3);
// 2. Create consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topics(topicNames)
.topic(topicName3)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.receiverQueueSize(4)
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
assertTrue(consumer.getTopic().startsWith(MultiTopicsConsumerImpl.DUMMY_TOPIC_NAME_PREFIX));
List<String> topics = ((MultiTopicsConsumerImpl<byte[]>) consumer).getPartitions();
List<ConsumerImpl<byte[]>> consumers = ((MultiTopicsConsumerImpl) consumer).getConsumers();
topics.forEach(topic -> log.info("topic: {}", topic));
consumers.forEach(c -> log.info("consumer: {}", c.getTopic()));
IntStream.range(0, 6).forEach(index ->
assertEquals(consumers.get(index).getTopic(), topics.get(index)));
assertEquals(((MultiTopicsConsumerImpl<byte[]>) consumer).getPartitionedTopics().size(), 2);
consumer.unsubscribe();
consumer.close();
}
@Test(timeOut = testTimeout)
public void testSyncProducerAndConsumer() throws Exception {
String key = "TopicsConsumerSyncTest";
final String subscriptionName = "my-ex-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int totalMessages = 30;
final String topicName1 = "persistent://prop/use/ns-abc/topic-1-" + key;
final String topicName2 = "persistent://prop/use/ns-abc/topic-2-" + key;
final String topicName3 = "persistent://prop/use/ns-abc/topic-3-" + key;
List<String> topicNames = Lists.newArrayList(topicName1, topicName2, topicName3);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName2, 2);
admin.topics().createPartitionedTopic(topicName3, 3);
// 1. producer connect
Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName1)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
Producer<byte[]> producer2 = pulsarClient.newProducer().topic(topicName2)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
Producer<byte[]> producer3 = pulsarClient.newProducer().topic(topicName3)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
// 2. Create consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topics(topicNames)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.receiverQueueSize(4)
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
// 3. producer publish messages
for (int i = 0; i < totalMessages / 3; i++) {
producer1.send((messagePredicate + "producer1-" + i).getBytes());
producer2.send((messagePredicate + "producer2-" + i).getBytes());
producer3.send((messagePredicate + "producer3-" + i).getBytes());
}
int messageSet = 0;
Message<byte[]> message = consumer.receive();
do {
assertTrue(message instanceof TopicMessageImpl);
messageSet ++;
consumer.acknowledge(message);
log.debug("Consumer acknowledged : " + new String(message.getData()));
message = consumer.receive(500, TimeUnit.MILLISECONDS);
} while (message != null);
assertEquals(messageSet, totalMessages);
consumer.unsubscribe();
consumer.close();
producer1.close();
producer2.close();
producer3.close();
}
@Test(timeOut = testTimeout)
public void testAsyncConsumer() throws Exception {
String key = "TopicsConsumerAsyncTest";
final String subscriptionName = "my-ex-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int totalMessages = 30;
final String topicName1 = "persistent://prop/use/ns-abc/topic-1-" + key;
final String topicName2 = "persistent://prop/use/ns-abc/topic-2-" + key;
final String topicName3 = "persistent://prop/use/ns-abc/topic-3-" + key;
List<String> topicNames = Lists.newArrayList(topicName1, topicName2, topicName3);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName2, 2);
admin.topics().createPartitionedTopic(topicName3, 3);
// 1. producer connect
Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName1)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
Producer<byte[]> producer2 = pulsarClient.newProducer().topic(topicName2)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
Producer<byte[]> producer3 = pulsarClient.newProducer().topic(topicName3)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
// 2. Create consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topics(topicNames)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.receiverQueueSize(4)
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
// Asynchronously produce messages
List<Future<MessageId>> futures = Lists.newArrayList();
for (int i = 0; i < totalMessages / 3; i++) {
futures.add(producer1.sendAsync((messagePredicate + "producer1-" + i).getBytes()));
futures.add(producer2.sendAsync((messagePredicate + "producer2-" + i).getBytes()));
futures.add(producer3.sendAsync((messagePredicate + "producer3-" + i).getBytes()));
}
log.info("Waiting for async publish to complete : {}", futures.size());
for (Future<MessageId> future : futures) {
future.get();
}
log.info("start async consume");
CountDownLatch latch = new CountDownLatch(totalMessages);
@Cleanup("shutdownNow")
ExecutorService executor = Executors.newFixedThreadPool(1);
executor.execute(() -> IntStream.range(0, totalMessages).forEach(index ->
consumer.receiveAsync()
.thenAccept(msg -> {
assertTrue(msg instanceof TopicMessageImpl);
try {
consumer.acknowledge(msg);
} catch (PulsarClientException e1) {
fail("message acknowledge failed", e1);
}
latch.countDown();
log.info("receive index: {}, latch countDown: {}", index, latch.getCount());
})
.exceptionally(ex -> {
log.warn("receive index: {}, failed receive message {}", index, ex.getMessage());
ex.printStackTrace();
return null;
})));
latch.await();
log.info("success latch wait");
consumer.unsubscribe();
consumer.close();
producer1.close();
producer2.close();
producer3.close();
}
@Test(timeOut = testTimeout)
public void testConsumerUnackedRedelivery() throws Exception {
String key = "TopicsConsumerRedeliveryTest";
final String subscriptionName = "my-ex-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int totalMessages = 30;
final String topicName1 = "persistent://prop/use/ns-abc/topic-1-" + key;
final String topicName2 = "persistent://prop/use/ns-abc/topic-2-" + key;
final String topicName3 = "persistent://prop/use/ns-abc/topic-3-" + key;
List<String> topicNames = Lists.newArrayList(topicName1, topicName2, topicName3);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName2, 2);
admin.topics().createPartitionedTopic(topicName3, 3);
// 1. producer connect
Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName1)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
Producer<byte[]> producer2 = pulsarClient.newProducer().topic(topicName2)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
Producer<byte[]> producer3 = pulsarClient.newProducer().topic(topicName3)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
// 2. Create consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topics(topicNames)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.receiverQueueSize(4)
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
// 3. producer publish messages
for (int i = 0; i < totalMessages / 3; i++) {
producer1.send((messagePredicate + "producer1-" + i).getBytes());
producer2.send((messagePredicate + "producer2-" + i).getBytes());
producer3.send((messagePredicate + "producer3-" + i).getBytes());
}
// 4. Receiver receives the message, not ack, Unacked Message Tracker size should be totalMessages.
Message<byte[]> message = consumer.receive();
while (message != null) {
assertTrue(message instanceof TopicMessageImpl);
log.debug("Consumer received : " + new String(message.getData()));
message = consumer.receive(500, TimeUnit.MILLISECONDS);
}
long size = ((MultiTopicsConsumerImpl<byte[]>) consumer).getUnAckedMessageTracker().size();
log.debug(key + " Unacked Message Tracker size is " + size);
assertEquals(size, totalMessages);
// 5. Blocking call, redeliver should kick in, after receive and ack, Unacked Message Tracker size should be 0.
message = consumer.receive();
HashSet<String> hSet = new HashSet<>();
do {
assertTrue(message instanceof TopicMessageImpl);
hSet.add(new String(message.getData()));
consumer.acknowledge(message);
log.debug("Consumer acknowledged : " + new String(message.getData()));
message = consumer.receive(500, TimeUnit.MILLISECONDS);
} while (message != null);
size = ((MultiTopicsConsumerImpl<byte[]>) consumer).getUnAckedMessageTracker().size();
log.debug(key + " Unacked Message Tracker size is " + size);
assertEquals(size, 0);
assertEquals(hSet.size(), totalMessages);
// 6. producer publish more messages
for (int i = 0; i < totalMessages / 3; i++) {
producer1.send((messagePredicate + "producer1-round2" + i).getBytes());
producer2.send((messagePredicate + "producer2-round2" + i).getBytes());
producer3.send((messagePredicate + "producer3-round2" + i).getBytes());
}
// 7. Receiver receives the message, ack them
message = consumer.receive();
int received = 0;
while (message != null) {
assertTrue(message instanceof TopicMessageImpl);
received++;
String data = new String(message.getData());
log.debug("Consumer received : " + data);
consumer.acknowledge(message);
message = consumer.receive(100, TimeUnit.MILLISECONDS);
}
size = ((MultiTopicsConsumerImpl<byte[]>) consumer).getUnAckedMessageTracker().size();
log.debug(key + " Unacked Message Tracker size is " + size);
assertEquals(size, 0);
assertEquals(received, totalMessages);
// 8. Simulate ackTimeout
Thread.sleep(ackTimeOutMillis);
// 9. producer publish more messages
for (int i = 0; i < totalMessages / 3; i++) {
producer1.send((messagePredicate + "producer1-round3" + i).getBytes());
producer2.send((messagePredicate + "producer2-round3" + i).getBytes());
producer3.send((messagePredicate + "producer3-round3" + i).getBytes());
}
// 10. Receiver receives the message, doesn't ack
message = consumer.receive();
while (message != null) {
String data = new String(message.getData());
log.debug("Consumer received : " + data);
message = consumer.receive(100, TimeUnit.MILLISECONDS);
}
size = ((MultiTopicsConsumerImpl<byte[]>) consumer).getUnAckedMessageTracker().size();
log.debug(key + " Unacked Message Tracker size is " + size);
assertEquals(size, 30);
Thread.sleep(ackTimeOutMillis);
// 11. Receiver receives redelivered messages
message = consumer.receive();
int redelivered = 0;
while (message != null) {
assertTrue(message instanceof TopicMessageImpl);
redelivered++;
String data = new String(message.getData());
log.debug("Consumer received : " + data);
consumer.acknowledge(message);
message = consumer.receive(2000, TimeUnit.MILLISECONDS);
}
assertEquals(redelivered, 30);
size = ((MultiTopicsConsumerImpl<byte[]>) consumer).getUnAckedMessageTracker().size();
log.info(key + " Unacked Message Tracker size is " + size);
assertEquals(size, 0);
consumer.unsubscribe();
consumer.close();
producer1.close();
producer2.close();
producer3.close();
}
@Test
public void testTopicNameValid() throws Exception{
final String topicName = "persistent://prop/use/ns-abc/testTopicNameValid";
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName, 3);
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topic(topicName)
.subscriptionName("subscriptionName")
.subscribe();
((MultiTopicsConsumerImpl) consumer).subscribeAsync("ns-abc/testTopicNameValid", 5).handle((res, exception) -> {
assertTrue(exception instanceof PulsarClientException.AlreadyClosedException);
assertEquals(((PulsarClientException.AlreadyClosedException) exception).getMessage(), "Topic name not valid");
return null;
}).get();
((MultiTopicsConsumerImpl) consumer).subscribeAsync(topicName, 3).handle((res, exception) -> {
assertTrue(exception instanceof PulsarClientException.AlreadyClosedException);
assertEquals(((PulsarClientException.AlreadyClosedException) exception).getMessage(), "Already subscribed to " + topicName);
return null;
}).get();
}
@Test
public void testSubscribeUnsubscribeSingleTopic() throws Exception {
String key = "TopicsConsumerSubscribeUnsubscribeSingleTopicTest";
final String subscriptionName = "my-ex-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int totalMessages = 30;
final String topicName1 = "persistent://prop/use/ns-abc/topic-1-" + key;
final String topicName2 = "persistent://prop/use/ns-abc/topic-2-" + key;
final String topicName3 = "persistent://prop/use/ns-abc/topic-3-" + key;
List<String> topicNames = Lists.newArrayList(topicName1, topicName2, topicName3);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName2, 2);
admin.topics().createPartitionedTopic(topicName3, 3);
// 1. producer connect
Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName1)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
Producer<byte[]> producer2 = pulsarClient.newProducer().topic(topicName2)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
Producer<byte[]> producer3 = pulsarClient.newProducer().topic(topicName3)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
// 2. Create consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topics(topicNames)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.receiverQueueSize(4)
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
// 3. producer publish messages
for (int i = 0; i < totalMessages / 3; i++) {
producer1.send((messagePredicate + "producer1-" + i).getBytes());
producer2.send((messagePredicate + "producer2-" + i).getBytes());
producer3.send((messagePredicate + "producer3-" + i).getBytes());
}
int messageSet = 0;
Message<byte[]> message = consumer.receive();
do {
assertTrue(message instanceof TopicMessageImpl);
messageSet ++;
consumer.acknowledge(message);
log.debug("Consumer acknowledged : " + new String(message.getData()));
message = consumer.receive(500, TimeUnit.MILLISECONDS);
} while (message != null);
assertEquals(messageSet, totalMessages);
// 4, unsubscribe topic3
CompletableFuture<Void> unsubFuture = ((MultiTopicsConsumerImpl<byte[]>) consumer).unsubscribeAsync(topicName3);
unsubFuture.get();
// 5. producer publish messages
for (int i = 0; i < totalMessages / 3; i++) {
producer1.send((messagePredicate + "producer1-round2" + i).getBytes());
producer2.send((messagePredicate + "producer2-round2" + i).getBytes());
producer3.send((messagePredicate + "producer3-round2" + i).getBytes());
}
// 6. should not receive messages from topic3, verify get 2/3 of all messages
messageSet = 0;
message = consumer.receive();
do {
assertTrue(message instanceof TopicMessageImpl);
messageSet ++;
consumer.acknowledge(message);
log.debug("Consumer acknowledged : " + new String(message.getData()));
message = consumer.receive(500, TimeUnit.MILLISECONDS);
} while (message != null);
assertEquals(messageSet, totalMessages * 2 / 3);
// 7. use getter to verify internal topics number after un-subscribe topic3
List<String> topics = ((MultiTopicsConsumerImpl<byte[]>) consumer).getPartitions();
List<ConsumerImpl<byte[]>> consumers = ((MultiTopicsConsumerImpl) consumer).getConsumers();
assertEquals(topics.size(), 3);
assertEquals(consumers.size(), 3);
assertEquals(((MultiTopicsConsumerImpl<byte[]>) consumer).getPartitionedTopics().size(), 1);
// 8. re-subscribe topic3
CompletableFuture<Void> subFuture = ((MultiTopicsConsumerImpl<byte[]>)consumer).subscribeAsync(topicName3, true);
subFuture.get();
// 9. producer publish messages
for (int i = 0; i < totalMessages / 3; i++) {
producer1.send((messagePredicate + "producer1-round3" + i).getBytes());
producer2.send((messagePredicate + "producer2-round3" + i).getBytes());
producer3.send((messagePredicate + "producer3-round3" + i).getBytes());
}
// 10. should receive messages from all 3 topics
messageSet = 0;
message = consumer.receive();
do {
assertTrue(message instanceof TopicMessageImpl);
messageSet ++;
consumer.acknowledge(message);
log.debug("Consumer acknowledged : " + new String(message.getData()));
message = consumer.receive(500, TimeUnit.MILLISECONDS);
} while (message != null);
assertEquals(messageSet, totalMessages);
// 11. use getter to verify internal topics number after subscribe topic3
topics = ((MultiTopicsConsumerImpl<byte[]>) consumer).getPartitions();
consumers = ((MultiTopicsConsumerImpl) consumer).getConsumers();
assertEquals(topics.size(), 6);
assertEquals(consumers.size(), 6);
assertEquals(((MultiTopicsConsumerImpl<byte[]>) consumer).getPartitionedTopics().size(), 2);
consumer.unsubscribe();
consumer.close();
producer1.close();
producer2.close();
producer3.close();
}
@Test
public void testResubscribeSameTopic() throws Exception {
final String localTopicName = "TopicsConsumerResubscribeSameTopicTest";
final String localPartitionName = localTopicName + "-partition-0";
final String topicNameWithNamespace = "public/default/" + localTopicName;
final String topicNameWithDomain = "persistent://" + topicNameWithNamespace;
admin.topics().createPartitionedTopic(localTopicName, 2);
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topic(localTopicName)
.subscriptionName("SubscriptionName")
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
MultiTopicsConsumerImpl<byte[]> multiTopicsConsumer = (MultiTopicsConsumerImpl<byte[]>) consumer;
multiTopicsConsumer.subscribeAsync(topicNameWithNamespace, false).handle((res, exception) -> {
assertTrue(exception instanceof PulsarClientException.AlreadyClosedException);
assertEquals(exception.getMessage(), "Already subscribed to " + topicNameWithNamespace);
return null;
}).get();
multiTopicsConsumer.subscribeAsync(topicNameWithDomain, false).handle((res, exception) -> {
assertTrue(exception instanceof PulsarClientException.AlreadyClosedException);
assertEquals(exception.getMessage(), "Already subscribed to " + topicNameWithDomain);
return null;
}).get();
multiTopicsConsumer.subscribeAsync(localPartitionName, false).handle((res, exception) -> {
assertTrue(exception instanceof PulsarClientException.AlreadyClosedException);
assertEquals(exception.getMessage(), "Already subscribed to " + localPartitionName);
return null;
}).get();
consumer.unsubscribe();
consumer.close();
}
@Test(timeOut = testTimeout)
public void testTopicsNameSubscribeWithBuilderFail() throws Exception {
String key = "TopicsNameSubscribeWithBuilder";
final String subscriptionName = "my-ex-subscription-" + key;
final String topicName2 = "persistent://prop/use/ns-abc/topic-2-" + key;
final String topicName3 = "persistent://prop/use/ns-abc/topic-3-" + key;
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName2, 2);
admin.topics().createPartitionedTopic(topicName3, 3);
// test failing builder with empty topics
try {
pulsarClient.newConsumer()
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.subscribe();
fail("subscribe1 with no topicName should fail.");
} catch (PulsarClientException e) {
// expected
}
try {
pulsarClient.newConsumer()
.topic()
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.subscribe();
fail("subscribe2 with no topicName should fail.");
} catch (IllegalArgumentException e) {
// expected
}
try {
pulsarClient.newConsumer()
.topics(null)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.subscribe();
fail("subscribe3 with no topicName should fail.");
} catch (IllegalArgumentException e) {
// expected
}
try {
pulsarClient.newConsumer()
.topics(Lists.newArrayList())
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.subscribe();
fail("subscribe4 with no topicName should fail.");
} catch (IllegalArgumentException e) {
// expected
}
}
/**
* Test Listener for github issue #2547
*/
@Test(timeOut = 30000)
public void testMultiTopicsMessageListener() throws Exception {
String key = "MultiTopicsMessageListenerTest";
final String subscriptionName = "my-ex-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int totalMessages = 6;
// set latch larger than totalMessages, so timeout message get resend
CountDownLatch latch = new CountDownLatch(totalMessages * 3);
final String topicName1 = "persistent://prop/use/ns-abc/topic-1-" + key;
List<String> topicNames = Lists.newArrayList(topicName1);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName1, 2);
// 1. producer connect
Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName1)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
// 2. Create consumer, set not ack in message listener, so time-out message will resend
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topics(topicNames)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(1000, TimeUnit.MILLISECONDS)
.receiverQueueSize(100)
.messageListener((c1, msg) -> {
assertNotNull(msg, "Message cannot be null");
String receivedMessage = new String(msg.getData());
latch.countDown();
log.info("Received message [{}] in the listener, latch: {}",
receivedMessage, latch.getCount());
// since not acked, it should retry another time
//c1.acknowledgeAsync(msg);
})
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
// 3. producer publish messages
for (int i = 0; i < totalMessages; i++) {
producer1.send((messagePredicate + "producer1-" + i).getBytes());
}
// verify should not time out, because of message redelivered several times.
latch.await();
consumer.close();
}
/**
* Test topic partitions auto subscribed.
*
* Steps:
* 1. Create a consumer with 2 topics, and each topic has 2 partitions: xx-partition-0, xx-partition-1.
* 2. update topics to have 3 partitions.
* 3. trigger partitionsAutoUpdate. this should be done automatically, this is to save time to manually trigger.
* 4. produce message to xx-partition-2 again, and verify consumer could receive message.
*
*/
@Test(timeOut = 30000)
public void testTopicAutoUpdatePartitions() throws Exception {
String key = "TestTopicAutoUpdatePartitions";
final String subscriptionName = "my-ex-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int totalMessages = 6;
final String topicName1 = "persistent://my-property/my-ns/topic-1-" + key;
final String topicName2 = "persistent://my-property/my-ns/topic-2-" + key;
List<String> topicNames = Lists.newArrayList(topicName1, topicName2);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName1, 2);
admin.topics().createPartitionedTopic(topicName2, 2);
// 1. Create a consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topics(topicNames)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.receiverQueueSize(4)
.autoUpdatePartitions(true)
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
MultiTopicsConsumerImpl topicsConsumer = (MultiTopicsConsumerImpl) consumer;
// 2. update to 3 partitions
admin.topics().updatePartitionedTopic(topicName1, 3);
admin.topics().updatePartitionedTopic(topicName2, 3);
// 3. trigger partitionsAutoUpdate. this should be done automatically in 1 minutes,
// this is to save time to manually trigger.
log.info("trigger partitionsAutoUpdateTimerTask");
Timeout timeout = topicsConsumer.getPartitionsAutoUpdateTimeout();
timeout.task().run(timeout);
Thread.sleep(200);
// 4. produce message to xx-partition-2, and verify consumer could receive message.
Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName1 + "-partition-2")
.enableBatching(false)
.create();
Producer<byte[]> producer2 = pulsarClient.newProducer().topic(topicName2 + "-partition-2")
.enableBatching(false)
.create();
for (int i = 0; i < totalMessages; i++) {
producer1.send((messagePredicate + "topic1-partition-2 index:" + i).getBytes());
producer2.send((messagePredicate + "topic2-partition-2 index:" + i).getBytes());
log.info("produce message to partition-2 again. messageindex: {}", i);
}
int messageSet = 0;
Message<byte[]> message = consumer.receive();
do {
messageSet ++;
consumer.acknowledge(message);
log.info("4 Consumer acknowledged : " + new String(message.getData()));
message = consumer.receive(200, TimeUnit.MILLISECONDS);
} while (message != null);
assertEquals(messageSet, 2 * totalMessages);
consumer.close();
}
@Test(timeOut = testTimeout)
public void testConsumerDistributionInFailoverSubscriptionWhenUpdatePartitions() throws Exception {
final String topicName = "persistent://my-property/my-ns/testConsumerDistributionInFailoverSubscriptionWhenUpdatePartitions";
final String subName = "failover-test";
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName, 2);
assertEquals(admin.topics().getPartitionedTopicMetadata(topicName).partitions, 2);
Consumer<String> consumer_1 = pulsarClient.newConsumer(Schema.STRING)
.topic(topicName)
.subscriptionType(SubscriptionType.Failover)
.subscriptionName(subName)
.subscribe();
assertTrue(consumer_1 instanceof MultiTopicsConsumerImpl);
assertEquals(((MultiTopicsConsumerImpl) consumer_1).allTopicPartitionsNumber.get(), 2);
Producer<String> producer = pulsarClient.newProducer(Schema.STRING)
.topic(topicName)
.messageRouter(new MessageRouter() {
@Override
public int choosePartition(Message<?> msg, TopicMetadata metadata) {
return Integer.parseInt(msg.getKey()) % metadata.numPartitions();
}
})
.create();
final int messages = 20;
for (int i = 0; i < messages; i++) {
producer.newMessage().key(String.valueOf(i)).value("message - " + i).send();
}
int received = 0;
Message lastMessage = null;
for (int i = 0; i < messages; i++) {
lastMessage = consumer_1.receive();
received++;
}
assertEquals(received, messages);
consumer_1.acknowledgeCumulative(lastMessage);
// 1.Update partition and check message consumption
admin.topics().updatePartitionedTopic(topicName, 4);
log.info("trigger partitionsAutoUpdateTimerTask");
Timeout timeout = ((MultiTopicsConsumerImpl) consumer_1).getPartitionsAutoUpdateTimeout();
timeout.task().run(timeout);
Thread.sleep(200);
assertEquals(((MultiTopicsConsumerImpl) consumer_1).allTopicPartitionsNumber.get(), 4);
for (int i = 0; i < messages; i++) {
producer.newMessage().key(String.valueOf(i)).value("message - " + i).send();
}
received = 0;
lastMessage = null;
for (int i = 0; i < messages; i++) {
lastMessage = consumer_1.receive();
received++;
}
assertEquals(received, messages);
consumer_1.acknowledgeCumulative(lastMessage);
// 2.Create a new consumer and check active consumer changed
Consumer<String> consumer_2 = pulsarClient.newConsumer(Schema.STRING)
.topic(topicName)
.subscriptionType(SubscriptionType.Failover)
.subscriptionName(subName)
.subscribe();
assertTrue(consumer_2 instanceof MultiTopicsConsumerImpl);
assertEquals(((MultiTopicsConsumerImpl) consumer_1).allTopicPartitionsNumber.get(), 4);
for (int i = 0; i < messages; i++) {
producer.newMessage().key(String.valueOf(i)).value("message - " + i).send();
}
Map<String, AtomicInteger> activeConsumers = new HashMap<>();
PartitionedTopicStats stats = admin.topics().getPartitionedStats(topicName, true);
for (TopicStats value : stats.getPartitions().values()) {
for (SubscriptionStats subscriptionStats : value.getSubscriptions().values()) {
assertTrue(subscriptionStats.getActiveConsumerName().equals(consumer_1.getConsumerName())
|| subscriptionStats.getActiveConsumerName().equals(consumer_2.getConsumerName()));
activeConsumers.putIfAbsent(subscriptionStats.getActiveConsumerName(), new AtomicInteger(0));
activeConsumers.get(subscriptionStats.getActiveConsumerName()).incrementAndGet();
}
}
assertEquals(activeConsumers.get(consumer_1.getConsumerName()).get(), 2);
assertEquals(activeConsumers.get(consumer_2.getConsumerName()).get(), 2);
// 4.Check new consumer can receive half of total messages
received = 0;
lastMessage = null;
for (int i = 0; i < messages / 2; i++) {
lastMessage = consumer_1.receive();
received++;
}
assertEquals(received, messages / 2);
consumer_1.acknowledgeCumulative(lastMessage);
received = 0;
lastMessage = null;
for (int i = 0; i < messages / 2; i++) {
lastMessage = consumer_2.receive();
received++;
}
assertEquals(received, messages / 2);
consumer_2.acknowledgeCumulative(lastMessage);
}
@Test(timeOut = testTimeout)
public void testDefaultBacklogTTL() throws Exception {
int defaultTTLSec = 5;
int totalMessages = 10;
this.conf.setTtlDurationDefaultInSeconds(defaultTTLSec);
final String namespace = "prop/use/expiry";
final String topicName = "persistent://" + namespace + "/expiry";
final String subName = "expiredSub";
admin.clusters().createCluster("use", ClusterData.builder().serviceUrl(brokerUrl.toString()).build());
admin.tenants().createTenant("prop", new TenantInfoImpl(null, Sets.newHashSet("use")));
admin.namespaces().createNamespace(namespace);
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subName)
.subscriptionType(SubscriptionType.Shared).ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.subscribe();
consumer.close();
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).enableBatching(false).create();
for (int i = 0; i < totalMessages; i++) {
producer.send(("" + i).getBytes());
}
Optional<Topic> topic = pulsar.getBrokerService().getTopic(topicName, false).get();
assertTrue(topic.isPresent());
PersistentSubscription subscription = (PersistentSubscription) topic.get().getSubscription(subName);
Thread.sleep((defaultTTLSec - 1) * 1000);
topic.get().checkMessageExpiry();
// Wait the message expire task done and make sure the message does not expire early.
Thread.sleep(1000);
assertEquals(subscription.getNumberOfEntriesInBacklog(false), 10);
Thread.sleep(2000);
topic.get().checkMessageExpiry();
// Wait the message expire task done and make sure the message expired.
retryStrategically((test) -> subscription.getNumberOfEntriesInBacklog(false) == 0, 5, 200);
assertEquals(subscription.getNumberOfEntriesInBacklog(false), 0);
}
@Test(timeOut = testTimeout)
public void testGetLastMessageId() throws Exception {
String key = "TopicGetLastMessageId";
final String subscriptionName = "my-ex-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int totalMessages = 30;
final String topicName1 = "persistent://prop/use/ns-abc/topic-1-" + key;
final String topicName2 = "persistent://prop/use/ns-abc/topic-2-" + key;
final String topicName3 = "persistent://prop/use/ns-abc/topic-3-" + key;
List<String> topicNames = Lists.newArrayList(topicName1, topicName2, topicName3);
TenantInfoImpl tenantInfo = createDefaultTenantInfo();
admin.tenants().createTenant("prop", tenantInfo);
admin.topics().createPartitionedTopic(topicName2, 2);
admin.topics().createPartitionedTopic(topicName3, 3);
// 1. producer connect
Producer<byte[]> producer1 = pulsarClient.newProducer().topic(topicName1)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
Producer<byte[]> producer2 = pulsarClient.newProducer().topic(topicName2)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
Producer<byte[]> producer3 = pulsarClient.newProducer().topic(topicName3)
.enableBatching(false)
.messageRoutingMode(org.apache.pulsar.client.api.MessageRoutingMode.RoundRobinPartition)
.create();
// 2. Create consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topics(topicNames)
.subscriptionName(subscriptionName)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(ackTimeOutMillis, TimeUnit.MILLISECONDS)
.receiverQueueSize(4)
.subscribe();
assertTrue(consumer instanceof MultiTopicsConsumerImpl);
// 3. producer publish messages
for (int i = 0; i < totalMessages; i++) {
producer1.send((messagePredicate + "producer1-" + i).getBytes());
producer2.send((messagePredicate + "producer2-" + i).getBytes());
producer3.send((messagePredicate + "producer3-" + i).getBytes());
}
MessageId messageId = consumer.getLastMessageId();
assertTrue(messageId instanceof MultiMessageIdImpl);
MultiMessageIdImpl multiMessageId = (MultiMessageIdImpl) messageId;
Map<String, MessageId> map = multiMessageId.getMap();
assertEquals(map.size(), 6);
map.forEach((k, v) -> {
log.info("topic: {}, messageId:{} ", k, v.toString());
assertTrue(v instanceof MessageIdImpl);
MessageIdImpl messageId1 = (MessageIdImpl) v;
if (k.contains(topicName1)) {
assertEquals(messageId1.entryId, totalMessages - 1);
} else if (k.contains(topicName2)) {
assertEquals(messageId1.entryId, totalMessages / 2 - 1);
} else {
assertEquals(messageId1.entryId, totalMessages / 3 - 1);
}
});
for (int i = 0; i < totalMessages; i++) {
producer1.send((messagePredicate + "producer1-" + i).getBytes());
producer2.send((messagePredicate + "producer2-" + i).getBytes());
producer3.send((messagePredicate + "producer3-" + i).getBytes());
}
messageId = consumer.getLastMessageId();
assertTrue(messageId instanceof MultiMessageIdImpl);
MultiMessageIdImpl multiMessageId2 = (MultiMessageIdImpl) messageId;
Map<String, MessageId> map2 = multiMessageId2.getMap();
assertEquals(map2.size(), 6);
map2.forEach((k, v) -> {
log.info("topic: {}, messageId:{} ", k, v.toString());
assertTrue(v instanceof MessageIdImpl);
MessageIdImpl messageId1 = (MessageIdImpl) v;
if (k.contains(topicName1)) {
assertEquals(messageId1.entryId, totalMessages * 2 - 1);
} else if (k.contains(topicName2)) {
assertEquals(messageId1.entryId, totalMessages - 1);
} else {
assertEquals(messageId1.entryId, totalMessages * 2 / 3 - 1);
}
});
consumer.unsubscribe();
consumer.close();
producer1.close();
producer2.close();
producer3.close();
}
@Test(timeOut = testTimeout)
public void multiTopicsInDifferentNameSpace() throws PulsarAdminException, PulsarClientException {
List<String> topics = new ArrayList<>();
topics.add("persistent://prop/use/ns-abc/topic-1");
topics.add("persistent://prop/use/ns-abc/topic-2");
topics.add("persistent://prop/use/ns-abc1/topic-3");
admin.clusters().createCluster("use", ClusterData.builder().serviceUrl(brokerUrl.toString()).build());
admin.tenants().createTenant("prop", new TenantInfoImpl(null, Sets.newHashSet("use")));
admin.namespaces().createNamespace("prop/use/ns-abc");
admin.namespaces().createNamespace("prop/use/ns-abc1");
Consumer consumer = pulsarClient.newConsumer()
.topics(topics)
.subscriptionName("multiTopicSubscription")
.subscriptionType(SubscriptionType.Exclusive)
.subscribe();
// create Producer
Producer<String> producer = pulsarClient.newProducer(Schema.STRING)
.topic("persistent://prop/use/ns-abc/topic-1")
.producerName("producer")
.create();
Producer<String> producer1 = pulsarClient.newProducer(Schema.STRING)
.topic("persistent://prop/use/ns-abc/topic-2")
.producerName("producer1")
.create();
Producer<String> producer2 = pulsarClient.newProducer(Schema.STRING)
.topic("persistent://prop/use/ns-abc1/topic-3")
.producerName("producer2")
.create();
//send message
producer.send("ns-abc/topic-1-Message1");
producer1.send("ns-abc/topic-2-Message1");
producer2.send("ns-abc1/topic-3-Message1");
int messageSet = 0;
Message<byte[]> message = consumer.receive();
do {
messageSet ++;
consumer.acknowledge(message);
log.info("Consumer acknowledged : " + new String(message.getData()));
message = consumer.receive(200, TimeUnit.MILLISECONDS);
} while (message != null);
assertEquals(messageSet, 3);
consumer.unsubscribe();
consumer.close();
producer.close();
producer1.close();
producer2.close();
}
@Test(timeOut = testTimeout)
public void testSubscriptionMustCompleteWhenOperationTimeoutOnMultipleTopics() throws PulsarClientException {
@Cleanup
PulsarClient client = PulsarClient.builder()
.serviceUrl(lookupUrl.toString())
.ioThreads(2)
.listenerThreads(3)
.operationTimeout(2, TimeUnit.MILLISECONDS) // Set this very small so the operation timeout can be triggered
.build();
String topic0 = "public/default/topic0";
String topic1 = "public/default/topic1";
for (int i = 0; i < 10; i++) {
try {
client.newConsumer(Schema.STRING)
.subscriptionName("subName")
.topics(Lists.newArrayList(topic0, topic1))
.receiverQueueSize(2)
.subscriptionType(SubscriptionType.Shared)
.ackTimeout(365, TimeUnit.DAYS)
.ackTimeoutTickTime(36, TimeUnit.DAYS)
.acknowledgmentGroupTime(0, TimeUnit.MILLISECONDS)
.subscriptionInitialPosition(SubscriptionInitialPosition.Earliest)
.subscribe();
Thread.sleep(3000);
} catch (Exception ex) {
Assert.assertTrue(ex instanceof PulsarClientException.TimeoutException);
}
}
}
@Test(timeOut = testTimeout)
public void testAutoDiscoverMultiTopicsPartitions() throws Exception {
final String topicName = "persistent://public/default/issue-9585";
admin.topics().createPartitionedTopic(topicName, 3);
PatternMultiTopicsConsumerImpl<String> consumer = (PatternMultiTopicsConsumerImpl<String>) pulsarClient.newConsumer(Schema.STRING)
.topicsPattern(topicName)
.subscriptionName("sub-issue-9585")
.subscribe();
Assert.assertEquals(consumer.getPartitionsOfTheTopicMap(), 3);
Assert.assertEquals(consumer.getConsumers().size(), 3);
admin.topics().deletePartitionedTopic(topicName, true);
consumer.getPartitionsAutoUpdateTimeout().task().run(consumer.getPartitionsAutoUpdateTimeout());
Awaitility.await().untilAsserted(() -> {
Assert.assertEquals(consumer.getPartitionsOfTheTopicMap(), 0);
Assert.assertEquals(consumer.getConsumers().size(), 0);
});
admin.topics().createPartitionedTopic(topicName, 7);
consumer.getPartitionsAutoUpdateTimeout().task().run(consumer.getPartitionsAutoUpdateTimeout());
Awaitility.await().untilAsserted(() -> {
Assert.assertEquals(consumer.getPartitionsOfTheTopicMap(), 7);
Assert.assertEquals(consumer.getConsumers().size(), 7);
});
}
@Test(timeOut = testTimeout)
public void testPartitionsUpdatesForMultipleTopics() throws Exception {
final String topicName0 = "persistent://public/default/testPartitionsUpdatesForMultipleTopics-0";
final String subName = "my-sub";
admin.topics().createPartitionedTopic(topicName0, 2);
assertEquals(admin.topics().getPartitionedTopicMetadata(topicName0).partitions, 2);
PatternMultiTopicsConsumerImpl<String> consumer = (PatternMultiTopicsConsumerImpl<String>) pulsarClient.newConsumer(Schema.STRING)
.topicsPattern("persistent://public/default/test.*")
.subscriptionType(SubscriptionType.Failover)
.subscriptionName(subName)
.subscribe();
Assert.assertEquals(consumer.getPartitionsOfTheTopicMap(), 2);
Assert.assertEquals(consumer.allTopicPartitionsNumber.intValue(), 2);
admin.topics().updatePartitionedTopic(topicName0, 5);
consumer.getPartitionsAutoUpdateTimeout().task().run(consumer.getPartitionsAutoUpdateTimeout());
Awaitility.await().untilAsserted(() -> {
Assert.assertEquals(consumer.getPartitionsOfTheTopicMap(), 5);
Assert.assertEquals(consumer.allTopicPartitionsNumber.intValue(), 5);
});
final String topicName1 = "persistent://public/default/testPartitionsUpdatesForMultipleTopics-1";
admin.topics().createPartitionedTopic(topicName1, 3);
assertEquals(admin.topics().getPartitionedTopicMetadata(topicName1).partitions, 3);
consumer.getRecheckPatternTimeout().task().run(consumer.getRecheckPatternTimeout());
Awaitility.await().untilAsserted(() -> {
Assert.assertEquals(consumer.getPartitionsOfTheTopicMap(), 8);
Assert.assertEquals(consumer.allTopicPartitionsNumber.intValue(), 8);
});
admin.topics().updatePartitionedTopic(topicName1, 5);
consumer.getPartitionsAutoUpdateTimeout().task().run(consumer.getPartitionsAutoUpdateTimeout());
Awaitility.await().untilAsserted(() -> {
Assert.assertEquals(consumer.getPartitionsOfTheTopicMap(), 10);
Assert.assertEquals(consumer.allTopicPartitionsNumber.intValue(), 10);
});
}
}
| |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.workbench.screens.scenariosimulation.kogito.client.editor;
import java.util.Arrays;
import com.google.gwtmockito.GwtMockitoTestRunner;
import elemental2.promise.Promise;
import org.drools.scenariosimulation.api.model.ScenarioSimulationModel;
import org.drools.scenariosimulation.api.model.Settings;
import org.drools.workbench.scenariosimulation.kogito.marshaller.js.callbacks.SCESIMMarshallCallback;
import org.drools.workbench.screens.scenariosimulation.client.commands.ScenarioSimulationContext;
import org.drools.workbench.screens.scenariosimulation.client.editor.ScenarioSimulationEditorPresenter;
import org.drools.workbench.screens.scenariosimulation.client.editor.strategies.DataManagementStrategy;
import org.drools.workbench.screens.scenariosimulation.client.enums.GridWidget;
import org.drools.workbench.screens.scenariosimulation.client.resources.i18n.ScenarioSimulationEditorConstants;
import org.drools.workbench.screens.scenariosimulation.client.widgets.ScenarioGridPanel;
import org.drools.workbench.screens.scenariosimulation.client.widgets.ScenarioGridWidget;
import org.drools.workbench.screens.scenariosimulation.kogito.client.dmn.KogitoScenarioSimulationBuilder;
import org.drools.workbench.screens.scenariosimulation.kogito.client.dmo.KogitoAsyncPackageDataModelOracle;
import org.drools.workbench.screens.scenariosimulation.kogito.client.editor.strategies.KogitoDMNDataManagementStrategy;
import org.drools.workbench.screens.scenariosimulation.kogito.client.editor.strategies.KogitoDMODataManagementStrategy;
import org.drools.workbench.screens.scenariosimulation.kogito.client.handlers.ScenarioSimulationKogitoDocksHandler;
import org.drools.workbench.screens.scenariosimulation.kogito.client.popup.ScenarioSimulationKogitoCreationPopupPresenter;
import org.gwtbootstrap3.client.ui.NavTabs;
import org.gwtbootstrap3.client.ui.TabListItem;
import org.jboss.errai.common.client.api.RemoteCallback;
import org.jboss.errai.ui.client.local.spi.TranslationService;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.kogito.client.editor.BaseKogitoEditor;
import org.kie.workbench.common.kogito.client.editor.MultiPageEditorContainerView;
import org.kie.workbench.common.kogito.client.resources.i18n.KogitoClientConstants;
import org.kie.workbench.common.widgets.client.docks.AuthoringEditorDock;
import org.kie.workbench.common.widgets.client.menu.FileMenuBuilder;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.internal.util.reflection.Whitebox;
import org.uberfire.backend.vfs.ObservablePath;
import org.uberfire.backend.vfs.Path;
import org.uberfire.backend.vfs.PathFactory;
import org.uberfire.client.mvp.PlaceStatus;
import org.uberfire.client.promise.Promises;
import org.uberfire.client.views.pfly.multipage.MultiPageEditorViewImpl;
import org.uberfire.client.workbench.widgets.multipage.MultiPageEditor;
import org.uberfire.client.workbench.widgets.multipage.Page;
import org.uberfire.mvp.Command;
import org.uberfire.mvp.PlaceRequest;
import org.uberfire.workbench.events.NotificationEvent;
import org.uberfire.workbench.model.menu.MenuItem;
import org.uberfire.workbench.model.menu.Menus;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.BDDMockito.willThrow;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isA;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(GwtMockitoTestRunner.class)
public class ScenarioSimulationEditorKogitoWrapperTest {
private static final String JSON_MODEL = "jsonModel";
@Mock
private FileMenuBuilder fileMenuBuilderMock;
@Mock
private Menus menusMock;
@Mock
private Promise.PromiseExecutorCallbackFn.ResolveCallbackFn<String> resolveCallBackMock;
@Mock
private Promises promisesMock;
@Mock
private ScenarioSimulationEditorPresenter scenarioSimulationEditorPresenterMock;
@Mock
private MenuItem menuItemMock;
@Mock
private ScenarioSimulationModel scenarioSimulationModelMock;
@Mock
private ScenarioSimulationContext scenarioSimulationContextMock;
@Mock
private Settings settingsMock;
@Mock
private ScenarioGridPanel simulationGridPanelMock;
@Mock
private ScenarioGridPanel backgroundGridPanelMock;
@Mock
private ScenarioGridWidget scenarioGridWidgetMock;
@Mock
private MultiPageEditorContainerView multiPageEditorContainerViewMock;
@Mock
private MultiPageEditor multiPageEditorMock;
@Mock
private MultiPageEditorViewImpl multiPageEditorViewMock;
@Mock
private TranslationService translationServiceMock;
@Mock
private NavTabs navBarsMock;
@Mock
private TabListItem editorItemMock;
@Mock
private TabListItem backgroundItemMock;
@Mock
private ScenarioSimulationKogitoCreationPopupPresenter scenarioSimulationKogitoCreationPopupPresenterMock;
@Mock
private KogitoScenarioSimulationBuilder kogitoScenarioSimulationBuilderMock;
@Mock
private KogitoAsyncPackageDataModelOracle kogitoAsyncPackageDataModelOracleMock;
@Mock
private Promise.PromiseExecutorCallbackFn.ResolveCallbackFn<Object> resolveCallbackFnMock;
@Mock
private Promise.PromiseExecutorCallbackFn.RejectCallbackFn rejectCallbackFnMock;
@Mock
private PlaceRequest placeRequestMock;
@Mock
private ScenarioSimulationKogitoDocksHandler scenarioSimulationKogitoDocksHandlerMock;
@Mock
private AuthoringEditorDock authoringEditorDockMock;
@Captor
private ArgumentCaptor<DataManagementStrategy> dataManagementStrategyCaptor;
@Captor
private ArgumentCaptor<Page> pageCaptor;
@Captor
private ArgumentCaptor<RemoteCallback> remoteCallbackArgumentCaptor;
@Captor
private ArgumentCaptor<Path> pathArgumentCaptor;
@Captor
private ArgumentCaptor<Promise.PromiseExecutorCallbackFn> promiseExecutorCallbackFnArgumentCaptor;
private ScenarioSimulationEditorKogitoWrapper scenarioSimulationEditorKogitoWrapperSpy;
private Path path = PathFactory.newPath("file.scesim", "path/");
@Before
public void setup() {
when(fileMenuBuilderMock.build()).thenReturn(menusMock);
when(menusMock.getItems()).thenReturn(Arrays.asList(menuItemMock));
when(scenarioSimulationEditorPresenterMock.getModel()).thenReturn(scenarioSimulationModelMock);
when(scenarioSimulationModelMock.getSettings()).thenReturn(settingsMock);
when(scenarioSimulationEditorPresenterMock.getContext()).thenReturn(scenarioSimulationContextMock);
when(scenarioSimulationContextMock.getScenarioGridPanelByGridWidget(GridWidget.SIMULATION)).thenReturn(simulationGridPanelMock);
when(scenarioSimulationContextMock.getScenarioGridPanelByGridWidget(GridWidget.BACKGROUND)).thenReturn(backgroundGridPanelMock);
when(multiPageEditorContainerViewMock.getMultiPage()).thenReturn(multiPageEditorMock);
when(multiPageEditorMock.getView()).thenReturn(multiPageEditorViewMock);
when(multiPageEditorViewMock.getPageIndex(KogitoClientConstants.KieEditorWrapperView_EditTabTitle)).thenReturn(1);
when(multiPageEditorViewMock.getPageIndex(ScenarioSimulationEditorConstants.INSTANCE.backgroundTabTitle())).thenReturn(2);
when(multiPageEditorViewMock.getTabBar()).thenReturn(navBarsMock);
when(navBarsMock.getWidget(1)).thenReturn(editorItemMock);
when(navBarsMock.getWidget(2)).thenReturn(backgroundItemMock);
when(scenarioSimulationKogitoCreationPopupPresenterMock.getSelectedPath()).thenReturn("selected");
when(translationServiceMock.getTranslation(KogitoClientConstants.KieEditorWrapperView_EditTabTitle)).thenReturn(KogitoClientConstants.KieEditorWrapperView_EditTabTitle);
scenarioSimulationEditorKogitoWrapperSpy = spy(new ScenarioSimulationEditorKogitoWrapper() {
{
this.fileMenuBuilder = fileMenuBuilderMock;
this.scenarioSimulationEditorPresenter = scenarioSimulationEditorPresenterMock;
this.promises = promisesMock;
this.kogitoOracle = kogitoAsyncPackageDataModelOracleMock;
this.translationService = translationServiceMock;
this.scenarioSimulationKogitoCreationPopupPresenter = scenarioSimulationKogitoCreationPopupPresenterMock;
this.scenarioSimulationBuilder = kogitoScenarioSimulationBuilderMock;
this.authoringWorkbenchDocks = authoringEditorDockMock;
this.scenarioSimulationKogitoDocksHandler = scenarioSimulationKogitoDocksHandlerMock;
}
@Override
public MultiPageEditorContainerView getWidget() {
return multiPageEditorContainerViewMock;
}
@Override
protected void marshallContent(ScenarioSimulationModel scenarioSimulationModel, Promise.PromiseExecutorCallbackFn.ResolveCallbackFn<String> resolveCallbackFn) {
// JSInterops logic, can't be tested
}
@Override
protected void resetEditorPages() {
//Do nothing
}
});
}
@Test
public void buildMenuBar() {
scenarioSimulationEditorKogitoWrapperSpy.buildMenuBar();
verify(fileMenuBuilderMock, times(1)).build();
verify(menuItemMock, times(1)).setEnabled(eq(true));
}
@Test
public void getContent() {
scenarioSimulationEditorKogitoWrapperSpy.getContent();
verify(promisesMock, times(1)).create(promiseExecutorCallbackFnArgumentCaptor.capture());
promiseExecutorCallbackFnArgumentCaptor.getValue().onInvoke(resolveCallBackMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).prepareContent(eq(resolveCallBackMock),
eq(rejectCallbackFnMock));
}
@Test
public void manageContent() {
scenarioSimulationEditorKogitoWrapperSpy.manageContent("path/file.scesim", "value", resolveCallbackFnMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).showScenarioSimulationCreationPopup(any());
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).gotoPath(pathArgumentCaptor.capture());
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).unmarshallContent(eq("value"));
assertEquals("file.scesim", pathArgumentCaptor.getValue().getFileName());
assertEquals("path/", pathArgumentCaptor.getValue().toURI());
}
@Test
public void prepareContent() {
scenarioSimulationEditorKogitoWrapperSpy.prepareContent(resolveCallBackMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).synchronizeColumnsDimension(eq(simulationGridPanelMock), eq(backgroundGridPanelMock));
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).marshallContent(eq(scenarioSimulationModelMock), eq(resolveCallBackMock));
verify(scenarioSimulationEditorPresenterMock, never()).sendNotification(any(), any());
}
@Test
public void prepareContent_Exception() {
willThrow(Exception.class).given(scenarioSimulationEditorKogitoWrapperSpy).marshallContent(any(), any());
scenarioSimulationEditorKogitoWrapperSpy.prepareContent(resolveCallBackMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).synchronizeColumnsDimension(eq(simulationGridPanelMock), eq(backgroundGridPanelMock));
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).marshallContent(eq(scenarioSimulationModelMock), eq(resolveCallBackMock));
verify(scenarioSimulationEditorPresenterMock, times(1)).sendNotification(anyString(), eq(NotificationEvent.NotificationType.ERROR));
}
@Test
public void manageContentFileWithoutPath() {
scenarioSimulationEditorKogitoWrapperSpy.manageContent("file.scesim", "value", resolveCallbackFnMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).showScenarioSimulationCreationPopup(any());
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).gotoPath(pathArgumentCaptor.capture());
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).unmarshallContent(eq("value"));
assertEquals("file.scesim", pathArgumentCaptor.getValue().getFileName());
assertEquals("/", pathArgumentCaptor.getValue().toURI());
}
@Test
public void manageContentNullPath() {
scenarioSimulationEditorKogitoWrapperSpy.manageContent("", "value", resolveCallbackFnMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).showScenarioSimulationCreationPopup(any());
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).gotoPath(pathArgumentCaptor.capture());
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).unmarshallContent(eq("value"));
assertEquals("new-file.scesim", pathArgumentCaptor.getValue().getFileName());
assertEquals("/", pathArgumentCaptor.getValue().toURI());
}
@Test
public void manageContentNullContentAndPath() {
scenarioSimulationEditorKogitoWrapperSpy.manageContent(null, null, resolveCallbackFnMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).showScenarioSimulationCreationPopup(pathArgumentCaptor.capture());
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).gotoPath(any());
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).unmarshallContent(any());
assertEquals("new-file.scesim", pathArgumentCaptor.getValue().getFileName());
assertEquals("/", pathArgumentCaptor.getValue().toURI());
}
@Test
public void manageContentNullContent() {
scenarioSimulationEditorKogitoWrapperSpy.manageContent("path/file.scesim", null, resolveCallbackFnMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).showScenarioSimulationCreationPopup(pathArgumentCaptor.capture());
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).gotoPath(any());
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).unmarshallContent(any());
assertEquals("file.scesim", pathArgumentCaptor.getValue().getFileName());
assertEquals("path/", pathArgumentCaptor.getValue().toURI());
}
@Test
public void manageContentWithPathAndNullContent() {
scenarioSimulationEditorKogitoWrapperSpy.manageContent("path/file.scesim", null, resolveCallbackFnMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).showScenarioSimulationCreationPopup(pathArgumentCaptor.capture());
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).gotoPath(any());
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).unmarshallContent(any());
assertEquals("file.scesim", pathArgumentCaptor.getValue().getFileName());
assertEquals("path/", pathArgumentCaptor.getValue().toURI());
}
@Test
public void manageContentWithException() {
willThrow(new IllegalStateException("Error message")).given(scenarioSimulationEditorKogitoWrapperSpy).unmarshallContent(any());
scenarioSimulationEditorKogitoWrapperSpy.manageContent("path/file.scesim", "value", resolveCallbackFnMock, rejectCallbackFnMock);
verify(scenarioSimulationEditorKogitoWrapperSpy, never()).showScenarioSimulationCreationPopup(any());
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).gotoPath(pathArgumentCaptor.capture());
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).unmarshallContent(eq("value"));
verify(scenarioSimulationEditorPresenterMock, times(1)).sendNotification(eq("Error message"),
eq(NotificationEvent.NotificationType.ERROR));
verify(rejectCallbackFnMock, times(1)).onInvoke("Error message");
}
@Test
public void onEditTabSelected() {
scenarioSimulationEditorKogitoWrapperSpy.onEditTabSelected();
verify(scenarioSimulationEditorPresenterMock, times(1)).onEditTabSelected();
}
@Test
public void makeMenuBar() {
scenarioSimulationEditorPresenterMock.makeMenuBar(fileMenuBuilderMock);
verify(scenarioSimulationEditorPresenterMock, times(1)).makeMenuBar(eq(fileMenuBuilderMock));
}
@Test
public void onStartup() {
Whitebox.setInternalState(scenarioSimulationEditorKogitoWrapperSpy, "multiPageEditorContainerView", multiPageEditorContainerViewMock);
scenarioSimulationEditorKogitoWrapperSpy.onStartup(placeRequestMock);
verify(authoringEditorDockMock, times(1)).setup(eq("AuthoringPerspective"), eq(placeRequestMock));
verify(scenarioSimulationEditorPresenterMock, times(1)).setWrapper(eq(scenarioSimulationEditorKogitoWrapperSpy));
}
@Test
public void gotoPath() {
scenarioSimulationEditorKogitoWrapperSpy.gotoPath(path);
verify(kogitoAsyncPackageDataModelOracleMock, times(1)).init(eq(path));
verify(scenarioSimulationEditorPresenterMock, times(1)).setPath(isA(ObservablePath.class));
assertEquals(path, scenarioSimulationEditorKogitoWrapperSpy.getCurrentPath());
}
@Test
public void getJSInteropMarshallCallback() {
SCESIMMarshallCallback callback = scenarioSimulationEditorKogitoWrapperSpy.getJSInteropMarshallCallback(resolveCallBackMock);
callback.callEvent("xmlString");
verify(resolveCallBackMock, times(1)).onInvoke(eq("xmlString"));
}
@Test
public void getModelSuccessCallbackMethodRule() {
when(settingsMock.getType()).thenReturn(ScenarioSimulationModel.Type.RULE);
when(scenarioSimulationEditorPresenterMock.getJsonModel(eq(scenarioSimulationModelMock))).thenReturn(JSON_MODEL);
scenarioSimulationEditorKogitoWrapperSpy.onModelSuccessCallbackMethod(scenarioSimulationModelMock);
verify(scenarioSimulationEditorPresenterMock, times(1)).setPackageName(eq(ScenarioSimulationEditorKogitoWrapper.DEFAULT_PACKAGE));
verify(((BaseKogitoEditor) scenarioSimulationEditorKogitoWrapperSpy), times(1)).setOriginalContentHash(eq(JSON_MODEL.hashCode()));
verify(scenarioSimulationEditorPresenterMock, times(1)).getModelSuccessCallbackMethod(dataManagementStrategyCaptor.capture(), eq(scenarioSimulationModelMock));
assertTrue(dataManagementStrategyCaptor.getValue() instanceof KogitoDMODataManagementStrategy);
verify(scenarioSimulationEditorPresenterMock, times(1)).showDocks(eq(PlaceStatus.CLOSE));
}
@Test
public void getModelSuccessCallbackMethodDMN() {
when(settingsMock.getType()).thenReturn(ScenarioSimulationModel.Type.DMN);
when(scenarioSimulationEditorPresenterMock.getJsonModel(eq(scenarioSimulationModelMock))).thenReturn(JSON_MODEL);
scenarioSimulationEditorKogitoWrapperSpy.onModelSuccessCallbackMethod(scenarioSimulationModelMock);
verify(scenarioSimulationEditorPresenterMock, times(1)).setPackageName(eq(ScenarioSimulationEditorKogitoWrapper.DEFAULT_PACKAGE));
verify(((BaseKogitoEditor)scenarioSimulationEditorKogitoWrapperSpy), times(1)).setOriginalContentHash(eq(JSON_MODEL.hashCode()));
verify(scenarioSimulationEditorPresenterMock, times(1)).getModelSuccessCallbackMethod(dataManagementStrategyCaptor.capture(), eq(scenarioSimulationModelMock));
assertTrue(dataManagementStrategyCaptor.getValue() instanceof KogitoDMNDataManagementStrategy);
verify(scenarioSimulationEditorPresenterMock, times(1)).showDocks(eq(PlaceStatus.CLOSE));
}
@Test
public void addBackgroundPage() {
scenarioSimulationEditorKogitoWrapperSpy.addBackgroundPage(scenarioGridWidgetMock);
verify(multiPageEditorViewMock, times(1)).addPage(eq(2), pageCaptor.capture());
pageCaptor.getValue().onFocus();
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).onBackgroundTabSelected();
assertEquals(ScenarioSimulationEditorConstants.INSTANCE.backgroundTabTitle(), pageCaptor.getValue().getLabel());
}
@Test
public void selectSimulationTab() {
scenarioSimulationEditorKogitoWrapperSpy.selectSimulationTab();
verify(editorItemMock, times(1)).showTab(eq(false));
verify(backgroundItemMock, never()).showTab(anyBoolean());
}
@Test
public void selectBackGroundTab() {
scenarioSimulationEditorKogitoWrapperSpy.selectBackgroundTab();
verify(backgroundItemMock, times(1)).showTab(eq(false));
verify(editorItemMock, never()).showTab(anyBoolean());
}
@Test
public void onBackgroundTabSelected() {
scenarioSimulationEditorKogitoWrapperSpy.onBackgroundTabSelected();
verify(scenarioSimulationEditorPresenterMock, times(1)).onBackgroundTabSelected();
}
@Test
public void showScenarioSimulationCreationPopup() {
scenarioSimulationEditorKogitoWrapperSpy.showScenarioSimulationCreationPopup(path);
verify(scenarioSimulationKogitoCreationPopupPresenterMock, times(1)).show(eq(ScenarioSimulationEditorConstants.INSTANCE.addScenarioSimulation()),
isA(Command.class));
}
@Test
public void newFileEmptySelectedType() {
Command command = scenarioSimulationEditorKogitoWrapperSpy.createNewFileCommand(path);
command.execute();
verify(scenarioSimulationKogitoCreationPopupPresenterMock, times(1)).getSelectedType();
verify(scenarioSimulationEditorPresenterMock, times(1)).sendNotification(eq(ScenarioSimulationEditorConstants.INSTANCE.missingSelectedType()),
eq(NotificationEvent.NotificationType.ERROR));
verify(kogitoScenarioSimulationBuilderMock, never()).populateScenarioSimulationModel(any(), any(), any(), any());
}
@Test
public void newFileEmptySelectedDMNPath() {
when(scenarioSimulationKogitoCreationPopupPresenterMock.getSelectedPath()).thenReturn(null);
when(scenarioSimulationKogitoCreationPopupPresenterMock.getSelectedType()).thenReturn(ScenarioSimulationModel.Type.DMN);
Command command = scenarioSimulationEditorKogitoWrapperSpy.createNewFileCommand(path);
command.execute();
verify(scenarioSimulationKogitoCreationPopupPresenterMock, times(1)).getSelectedType();
verify(scenarioSimulationEditorPresenterMock, times(1)).sendNotification(eq(ScenarioSimulationEditorConstants.INSTANCE.missingDmnPath()),
eq(NotificationEvent.NotificationType.ERROR));
verify(kogitoScenarioSimulationBuilderMock, never()).populateScenarioSimulationModel(any(), any(), any(), any());
}
@Test
public void newFileRule() {
when(scenarioSimulationKogitoCreationPopupPresenterMock.getSelectedType()).thenReturn(ScenarioSimulationModel.Type.RULE);
Command command = scenarioSimulationEditorKogitoWrapperSpy.createNewFileCommand(path);
command.execute();
verify(scenarioSimulationKogitoCreationPopupPresenterMock, times(1)).getSelectedType();
verify(kogitoScenarioSimulationBuilderMock, times(1)).populateScenarioSimulationModel(isA(ScenarioSimulationModel.class),
eq(ScenarioSimulationModel.Type.RULE),
eq(""),
remoteCallbackArgumentCaptor.capture());
remoteCallbackArgumentCaptor.getValue().callback("");
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).gotoPath(eq(path));
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).unmarshallContent(isA(String.class));
}
@Test
public void newFileDMN() {
when(scenarioSimulationKogitoCreationPopupPresenterMock.getSelectedType()).thenReturn(ScenarioSimulationModel.Type.DMN);
Command command = scenarioSimulationEditorKogitoWrapperSpy.createNewFileCommand(path);
command.execute();
verify(scenarioSimulationKogitoCreationPopupPresenterMock, times(1)).getSelectedType();
verify(kogitoScenarioSimulationBuilderMock, times(1)).populateScenarioSimulationModel(isA(ScenarioSimulationModel.class),
eq(ScenarioSimulationModel.Type.DMN),
eq("selected"),
remoteCallbackArgumentCaptor.capture());
remoteCallbackArgumentCaptor.getValue().callback("");
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).gotoPath(eq(path));
verify(scenarioSimulationEditorKogitoWrapperSpy, times(1)).unmarshallContent(isA(String.class));
}
@Test
public void getScenarioSimulationDocksHandler() {
assertEquals(scenarioSimulationKogitoDocksHandlerMock,
scenarioSimulationEditorKogitoWrapperSpy.getScenarioSimulationDocksHandler());
}
@Test
public void getScenarioSimulationEditorPresenter() {
assertEquals(scenarioSimulationEditorPresenterMock,
scenarioSimulationEditorKogitoWrapperSpy.getScenarioSimulationEditorPresenter());
}
}
| |
package com.crescentflare.unilayout.containers;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Point;
import android.util.AttributeSet;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import com.crescentflare.unilayout.R;
import com.crescentflare.unilayout.helpers.UniLayoutParams;
import com.crescentflare.unilayout.helpers.UniLayout;
import com.crescentflare.unilayout.views.UniView;
import java.util.ArrayList;
import java.util.List;
/**
* UniLayout container: a vertically or horizontally aligned view container
* Stacks views below or to the right of each other
*/
public class UniLinearContainer extends ViewGroup
{
// ---
// Constants
// ---
public static final int VERTICAL = LinearLayout.VERTICAL;
public static final int HORIZONTAL = LinearLayout.HORIZONTAL;
// ---
// Members
// ---
int orientation = VERTICAL;
// ---
// Initialization
// ---
public UniLinearContainer(Context context)
{
this(context, (AttributeSet)null);
}
public UniLinearContainer(Context context, AttributeSet attrs)
{
super(context, attrs);
init(attrs);
}
public UniLinearContainer(Context context, AttributeSet attrs, int defStyleAttr)
{
this(context, attrs);
}
public UniLinearContainer(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes)
{
this(context, attrs);
}
private void init(AttributeSet attrs)
{
if (attrs != null)
{
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.UniLinearContainer);
orientation = a.getInt(R.styleable.UniLinearContainer_uni_orientation, VERTICAL);
a.recycle();
}
}
@Override
protected LayoutParams generateDefaultLayoutParams()
{
return new UniLayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
}
@Override
public LayoutParams generateLayoutParams(AttributeSet attrs)
{
return new UniLayoutParams(getContext(), attrs);
}
// ---
// Adjust properties
// ---
public int getOrientation()
{
return orientation;
}
public void setOrientation(int orientation)
{
this.orientation = orientation;
}
// ---
// Custom layout
// ---
private Point measuredSize = new Point();
private void performVerticalLayout(Point measuredSize, int widthSize, int heightSize, int widthSpec, int heightSpec, boolean adjustLayout)
{
// Determine available size without padding
int paddedWidthSize = widthSize - getPaddingLeft() - getPaddingRight();
int paddedHeightSize = heightSize - getPaddingTop() - getPaddingBottom();
measuredSize.x = getPaddingLeft();
measuredSize.y = getPaddingTop();
if (widthSpec == MeasureSpec.UNSPECIFIED)
{
paddedWidthSize = 0xFFFFFF;
}
if (heightSpec == MeasureSpec.UNSPECIFIED)
{
paddedHeightSize = 0xFFFFFF;
}
// Determine first sizable view for spacing margin support
int firstSizableView = 0xFFFFFF;
int childCount = getChildCount();
for (int i = 0; i < childCount; i++)
{
View view = getChildAt(i);
if (view.getVisibility() != GONE)
{
firstSizableView = i;
break;
}
}
// Measure the views without any weight
float totalWeight = 0;
int remainingHeight = paddedHeightSize;
float totalMinHeightForWeight = 0;
for (int i = 0; i < childCount; i++)
{
// Skip hidden views if they are not part of the layout
View view = getChildAt(i);
if (view.getVisibility() == GONE)
{
continue;
}
// Skip views with weight, they will go in the second phase
ViewGroup.LayoutParams viewLayoutParams = view.getLayoutParams();
if (viewLayoutParams instanceof UniLayoutParams)
{
UniLayoutParams uniLayoutParams = (UniLayoutParams)viewLayoutParams;
if (uniLayoutParams.weight > 0)
{
totalWeight += uniLayoutParams.weight;
remainingHeight -= uniLayoutParams.topMargin + uniLayoutParams.bottomMargin + uniLayoutParams.minHeight;
if (i > firstSizableView)
{
remainingHeight -= uniLayoutParams.spacingMargin;
}
totalMinHeightForWeight += uniLayoutParams.minHeight;
continue;
}
}
// Perform measure and update remaining height
int limitWidth = paddedWidthSize;
if (viewLayoutParams instanceof MarginLayoutParams)
{
limitWidth -= ((MarginLayoutParams)viewLayoutParams).leftMargin + ((MarginLayoutParams)viewLayoutParams).rightMargin;
remainingHeight -= ((MarginLayoutParams)viewLayoutParams).topMargin + ((MarginLayoutParams)viewLayoutParams).bottomMargin;
}
if (i > firstSizableView && viewLayoutParams instanceof UniLayoutParams)
{
remainingHeight -= ((UniLayoutParams)viewLayoutParams).spacingMargin;
}
UniLayout.measure(view, limitWidth, remainingHeight, widthSpec, heightSpec, MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED);
remainingHeight = Math.max(0, remainingHeight - view.getMeasuredHeight());
}
// Measure the remaining views with weight
remainingHeight += totalMinHeightForWeight;
for (int i = 0; i < childCount; i++)
{
// Skip hidden views if they are not part of the layout
View view = getChildAt(i);
if (view.getVisibility() == GONE)
{
continue;
}
// Continue measuring views with weight
if (view.getLayoutParams() instanceof UniLayoutParams)
{
UniLayoutParams uniLayoutParams = (UniLayoutParams)view.getLayoutParams();
if (uniLayoutParams.weight > 0)
{
int forceViewHeightSpec = heightSpec == MeasureSpec.EXACTLY ? MeasureSpec.EXACTLY : MeasureSpec.UNSPECIFIED;
UniLayout.measure(view, paddedWidthSize - uniLayoutParams.leftMargin - uniLayoutParams.rightMargin, (int)(remainingHeight * uniLayoutParams.weight / totalWeight), widthSpec, heightSpec, MeasureSpec.UNSPECIFIED, forceViewHeightSpec);
remainingHeight = Math.max(0, remainingHeight - view.getMeasuredHeight());
totalWeight -= uniLayoutParams.weight;
}
}
}
// Start doing layout
int y = getPaddingTop();
for (int i = 0; i < childCount; i++)
{
// Skip hidden views if they are not part of the layout
View view = getChildAt(i);
if (view.getVisibility() == GONE)
{
continue;
}
// Continue with the others
int width = view.getMeasuredWidth();
int height = view.getMeasuredHeight();
int x = getPaddingLeft();
int nextY = y;
if (view.getLayoutParams() instanceof UniLayoutParams)
{
UniLayoutParams uniLayoutParams = (UniLayoutParams)view.getLayoutParams();
x += uniLayoutParams.leftMargin;
y += uniLayoutParams.topMargin;
if (i > firstSizableView)
{
y += uniLayoutParams.spacingMargin;
}
if (adjustLayout)
{
x += (paddedWidthSize - uniLayoutParams.leftMargin - uniLayoutParams.rightMargin - width) * uniLayoutParams.horizontalGravity;
}
measuredSize.x = Math.max(measuredSize.x, x + width + uniLayoutParams.rightMargin);
nextY = y + height + uniLayoutParams.bottomMargin;
}
else
{
measuredSize.x = Math.max(measuredSize.x, x + width);
nextY = y + height;
}
if (adjustLayout)
{
view.layout(x, y, x + width, y + height);
}
measuredSize.y = Math.max(measuredSize.y, nextY);
y = nextY;
}
// Adjust final measure with padding and limitations
measuredSize.x += getPaddingRight();
measuredSize.y += getPaddingBottom();
if (widthSpec == MeasureSpec.EXACTLY)
{
measuredSize.x = widthSize;
}
else if (widthSpec == MeasureSpec.AT_MOST)
{
measuredSize.x = Math.min(measuredSize.x, widthSize);
}
if (heightSpec == MeasureSpec.EXACTLY)
{
measuredSize.y = heightSize;
}
else if (heightSpec == MeasureSpec.AT_MOST)
{
measuredSize.y = Math.min(measuredSize.y, heightSize);
}
}
private void performHorizontalLayout(Point measuredSize, int widthSize, int heightSize, int widthSpec, int heightSpec, boolean adjustLayout)
{
// Determine available size without padding
int paddedWidthSize = widthSize - getPaddingLeft() - getPaddingRight();
int paddedHeightSize = heightSize - getPaddingTop() - getPaddingBottom();
measuredSize.x = getPaddingLeft();
measuredSize.y = getPaddingTop();
if (widthSpec == MeasureSpec.UNSPECIFIED)
{
paddedWidthSize = 0xFFFFFF;
}
if (heightSpec == MeasureSpec.UNSPECIFIED)
{
paddedHeightSize = 0xFFFFFF;
}
// Determine first sizable view for spacing margin support
int firstSizableView = 0xFFFFFF;
int childCount = getChildCount();
for (int i = 0; i < childCount; i++)
{
View view = getChildAt(i);
if (view.getVisibility() != GONE)
{
firstSizableView = i;
break;
}
}
// Measure the views without any weight
float totalWeight = 0;
int remainingWidth = paddedWidthSize;
float totalMinWidthForWeight = 0;
for (int i = 0; i < childCount; i++)
{
// Skip hidden views if they are not part of the layout
View view = getChildAt(i);
if (view.getVisibility() == GONE)
{
continue;
}
// Skip views with weight, they will go in the second phase
ViewGroup.LayoutParams viewLayoutParams = view.getLayoutParams();
if (viewLayoutParams instanceof UniLayoutParams)
{
UniLayoutParams uniLayoutParams = (UniLayoutParams)viewLayoutParams;
if (uniLayoutParams.weight > 0)
{
totalWeight += uniLayoutParams.weight;
remainingWidth -= uniLayoutParams.leftMargin + uniLayoutParams.rightMargin + uniLayoutParams.minWidth;
if (i > firstSizableView)
{
remainingWidth -= uniLayoutParams.spacingMargin;
}
totalMinWidthForWeight += uniLayoutParams.minWidth;
continue;
}
}
// Perform measure and update remaining width
int limitHeight = paddedHeightSize;
if (viewLayoutParams instanceof MarginLayoutParams)
{
remainingWidth -= ((MarginLayoutParams)viewLayoutParams).leftMargin + ((MarginLayoutParams)viewLayoutParams).rightMargin;
limitHeight -= ((MarginLayoutParams)viewLayoutParams).topMargin + ((MarginLayoutParams)viewLayoutParams).bottomMargin;
}
if (i > firstSizableView && viewLayoutParams instanceof UniLayoutParams)
{
remainingWidth -= ((UniLayoutParams)viewLayoutParams).spacingMargin;
}
UniLayout.measure(view, remainingWidth, limitHeight, widthSpec, heightSpec, MeasureSpec.UNSPECIFIED, MeasureSpec.UNSPECIFIED);
remainingWidth = Math.max(0, remainingWidth - view.getMeasuredWidth());
}
// Measure the remaining views with weight
remainingWidth += totalMinWidthForWeight;
for (int i = 0; i < childCount; i++)
{
// Skip hidden views if they are not part of the layout
View view = getChildAt(i);
if (view.getVisibility() == GONE)
{
continue;
}
// Continue with views with weight
if (view.getLayoutParams() instanceof UniLayoutParams)
{
UniLayoutParams uniLayoutParams = (UniLayoutParams)view.getLayoutParams();
if (uniLayoutParams.weight > 0)
{
int forceViewWidthSpec = widthSpec == MeasureSpec.EXACTLY ? MeasureSpec.EXACTLY : MeasureSpec.UNSPECIFIED;
UniLayout.measure(view, (int)(remainingWidth * uniLayoutParams.weight / totalWeight), paddedHeightSize - uniLayoutParams.topMargin - uniLayoutParams.bottomMargin, widthSpec, heightSpec, forceViewWidthSpec, MeasureSpec.UNSPECIFIED);
remainingWidth = Math.max(0, remainingWidth - view.getMeasuredWidth());
totalWeight -= uniLayoutParams.weight;
}
}
}
// Start doing layout
int x = getPaddingLeft();
for (int i = 0; i < childCount; i++)
{
// Skip hidden views if they are not part of the layout
View view = getChildAt(i);
if (view.getVisibility() == GONE)
{
continue;
}
// Continue with the others
int width = view.getMeasuredWidth();
int height = view.getMeasuredHeight();
int y = getPaddingTop();
int nextX = x;
if (view.getLayoutParams() instanceof UniLayoutParams)
{
UniLayoutParams uniLayoutParams = (UniLayoutParams)view.getLayoutParams();
x += uniLayoutParams.leftMargin;
y += uniLayoutParams.topMargin;
if (i > firstSizableView)
{
x += uniLayoutParams.spacingMargin;
}
if (adjustLayout)
{
y += (paddedHeightSize - uniLayoutParams.topMargin - uniLayoutParams.bottomMargin - height) * uniLayoutParams.verticalGravity;
}
measuredSize.y = Math.max(measuredSize.y, y + height + uniLayoutParams.bottomMargin);
nextX = x + width + uniLayoutParams.rightMargin;
}
else
{
measuredSize.y = Math.max(measuredSize.y, y + height);
nextX = x + width;
}
if (adjustLayout)
{
view.layout(x, y, x + width, y + height);
}
measuredSize.x = Math.max(measuredSize.x, nextX);
x = nextX;
}
// Adjust final measure with padding and limitations
measuredSize.x += getPaddingRight();
measuredSize.y += getPaddingBottom();
if (widthSpec == MeasureSpec.EXACTLY)
{
measuredSize.x = widthSize;
}
else if (widthSpec == MeasureSpec.AT_MOST)
{
measuredSize.x = Math.min(measuredSize.x, widthSize);
}
if (heightSpec == MeasureSpec.EXACTLY)
{
measuredSize.y = heightSize;
}
else if (heightSpec == MeasureSpec.AT_MOST)
{
measuredSize.y = Math.min(measuredSize.y, heightSize);
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
{
if (getOrientation() == VERTICAL)
{
performVerticalLayout(measuredSize, MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec), MeasureSpec.getMode(widthMeasureSpec), MeasureSpec.getMode(heightMeasureSpec), false);
setMeasuredDimension(measuredSize.x, measuredSize.y);
}
else
{
performHorizontalLayout(measuredSize, MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.getSize(heightMeasureSpec), MeasureSpec.getMode(widthMeasureSpec), MeasureSpec.getMode(heightMeasureSpec), false);
setMeasuredDimension(measuredSize.x, measuredSize.y);
}
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom)
{
if (getOrientation() == VERTICAL)
{
performVerticalLayout(measuredSize, right - left, bottom - top, MeasureSpec.EXACTLY, MeasureSpec.EXACTLY, true);
}
else
{
performHorizontalLayout(measuredSize, right - left, bottom - top, MeasureSpec.EXACTLY, MeasureSpec.EXACTLY, true);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.