signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PojoDataParser { /** * { @ inheritDoc } */ @ NonNull @ Override public BaseCell parseSingleComponent ( @ Nullable JSONObject data , Card parent , ServiceManager serviceManager ) { } }
if ( data == null ) { return BaseCell . NaN ; } final CardResolver cardResolver = serviceManager . getService ( CardResolver . class ) ; Preconditions . checkState ( cardResolver != null , "Must register CardResolver into ServiceManager first" ) ; final MVHelper cellResolver = serviceManager . getService ( MVHelper . class ) ; Preconditions . checkState ( cellResolver != null , "Must register CellResolver into ServiceManager first" ) ; BaseCell cell = Card . createCell ( parent , cellResolver , data , serviceManager , true ) ; if ( cellResolver . isValid ( cell , serviceManager ) ) { return cell ; } else { return BaseCell . NaN ; }
public class KeyVaultClientCustomImpl { /** * Verifies a signature using the specified key . * @ param keyIdentifier * The full key identifier * @ param algorithm * The signing / verification algorithm . For more information on * possible algorithm types , see JsonWebKeySignatureAlgorithm . * @ param digest * The digest used for signing * @ param signature * The signature to be verified * @ param serviceCallback * the async ServiceCallback to handle successful and failed * responses . * @ return the { @ link ServiceFuture } object */ public ServiceFuture < KeyVerifyResult > verifyAsync ( String keyIdentifier , JsonWebKeySignatureAlgorithm algorithm , byte [ ] digest , byte [ ] signature , final ServiceCallback < KeyVerifyResult > serviceCallback ) { } }
KeyIdentifier id = new KeyIdentifier ( keyIdentifier ) ; return verifyAsync ( id . vault ( ) , id . name ( ) , id . version ( ) == null ? "" : id . version ( ) , algorithm , digest , signature , serviceCallback ) ;
public class InfinispanSubsystemXMLWriter { /** * { @ inheritDoc } * @ see org . jboss . staxmapper . XMLElementWriter # writeContent ( org . jboss . staxmapper . XMLExtendedStreamWriter , java . lang . Object ) */ @ Override public void writeContent ( XMLExtendedStreamWriter writer , SubsystemMarshallingContext context ) throws XMLStreamException { } }
context . startSubsystemElement ( InfinispanSchema . CURRENT . getNamespaceUri ( ) , false ) ; ModelNode model = context . getModelNode ( ) ; if ( model . isDefined ( ) ) { for ( Property entry : model . get ( ModelKeys . CACHE_CONTAINER ) . asPropertyList ( ) ) { String containerName = entry . getName ( ) ; ModelNode container = entry . getValue ( ) ; writer . writeStartElement ( Element . CACHE_CONTAINER . getLocalName ( ) ) ; writer . writeAttribute ( Attribute . NAME . getLocalName ( ) , containerName ) ; // AS7-3488 make default - cache a non required attribute // this . writeRequired ( writer , Attribute . DEFAULT _ CACHE , container , ModelKeys . DEFAULT _ CACHE ) ; this . writeListAsAttribute ( writer , Attribute . ALIASES , container , ModelKeys . ALIASES ) ; this . writeOptional ( writer , Attribute . DEFAULT_CACHE , container , ModelKeys . DEFAULT_CACHE ) ; this . writeOptional ( writer , Attribute . JNDI_NAME , container , ModelKeys . JNDI_NAME ) ; this . writeOptional ( writer , Attribute . START , container , ModelKeys . START ) ; this . writeOptional ( writer , Attribute . MODULE , container , ModelKeys . MODULE ) ; this . writeOptional ( writer , Attribute . STATISTICS , container , ModelKeys . STATISTICS ) ; if ( container . hasDefined ( ModelKeys . TRANSPORT ) ) { writer . writeStartElement ( Element . TRANSPORT . getLocalName ( ) ) ; ModelNode transport = container . get ( ModelKeys . TRANSPORT , ModelKeys . TRANSPORT_NAME ) ; this . writeOptional ( writer , Attribute . CHANNEL , transport , ModelKeys . CHANNEL ) ; this . writeOptional ( writer , Attribute . LOCK_TIMEOUT , transport , ModelKeys . LOCK_TIMEOUT ) ; this . writeOptional ( writer , Attribute . STRICT_PEER_TO_PEER , transport , ModelKeys . STRICT_PEER_TO_PEER ) ; this . writeOptional ( writer , Attribute . INITIAL_CLUSTER_SIZE , transport , ModelKeys . INITIAL_CLUSTER_SIZE ) ; this . writeOptional ( writer , Attribute . INITIAL_CLUSTER_TIMEOUT , transport , ModelKeys . INITIAL_CLUSTER_TIMEOUT ) ; writer . writeEndElement ( ) ; } if ( container . hasDefined ( ModelKeys . SECURITY ) ) { writer . writeStartElement ( Element . SECURITY . getLocalName ( ) ) ; ModelNode security = container . get ( ModelKeys . SECURITY , ModelKeys . SECURITY_NAME ) ; if ( security . hasDefined ( ModelKeys . AUTHORIZATION ) ) { writer . writeStartElement ( Element . AUTHORIZATION . getLocalName ( ) ) ; ModelNode authorization = security . get ( ModelKeys . AUTHORIZATION , ModelKeys . AUTHORIZATION_NAME ) ; if ( authorization . hasDefined ( ModelKeys . MAPPER ) ) { String mapper = authorization . get ( ModelKeys . MAPPER ) . asString ( ) ; if ( CommonNameRoleMapper . class . getName ( ) . equals ( mapper ) ) { writer . writeEmptyElement ( Element . COMMON_NAME_ROLE_MAPPER . getLocalName ( ) ) ; } else if ( ClusterRoleMapper . class . getName ( ) . equals ( mapper ) ) { writer . writeEmptyElement ( Element . CLUSTER_ROLE_MAPPER . getLocalName ( ) ) ; } else if ( IdentityRoleMapper . class . getName ( ) . equals ( mapper ) ) { writer . writeEmptyElement ( Element . IDENTITY_ROLE_MAPPER . getLocalName ( ) ) ; } else { writer . writeStartElement ( Element . CUSTOM_ROLE_MAPPER . getLocalName ( ) ) ; writer . writeAttribute ( Attribute . CLASS . getLocalName ( ) , mapper ) ; writer . writeEndElement ( ) ; } } ModelNode roles = authorization . get ( ModelKeys . ROLE ) ; if ( roles . isDefined ( ) ) { for ( ModelNode roleNode : roles . asList ( ) ) { ModelNode role = roleNode . get ( 0 ) ; writer . writeStartElement ( Element . ROLE . getLocalName ( ) ) ; AuthorizationRoleResource . NAME . marshallAsAttribute ( role , writer ) ; this . writeListAsAttribute ( writer , Attribute . PERMISSIONS , role , ModelKeys . PERMISSIONS ) ; writer . writeEndElement ( ) ; } } writer . writeEndElement ( ) ; } writer . writeEndElement ( ) ; } if ( container . hasDefined ( ModelKeys . GLOBAL_STATE ) ) { writer . writeStartElement ( Element . GLOBAL_STATE . getLocalName ( ) ) ; ModelNode globalState = container . get ( ModelKeys . GLOBAL_STATE , ModelKeys . GLOBAL_STATE_NAME ) ; writeStatePathElement ( Element . PERSISTENT_LOCATION , ModelKeys . PERSISTENT_LOCATION , writer , globalState ) ; writeStatePathElement ( Element . SHARED_PERSISTENT_LOCATION , ModelKeys . SHARED_PERSISTENT_LOCATION , writer , globalState ) ; writeStatePathElement ( Element . TEMPORARY_LOCATION , ModelKeys . TEMPORARY_LOCATION , writer , globalState ) ; if ( globalState . hasDefined ( ModelKeys . CONFIGURATION_STORAGE ) ) { ConfigurationStorage configurationStorage = ConfigurationStorage . valueOf ( globalState . get ( ModelKeys . CONFIGURATION_STORAGE ) . asString ( ) ) ; switch ( configurationStorage ) { case IMMUTABLE : writer . writeEmptyElement ( Element . IMMUTABLE_CONFIGURATION_STORAGE . getLocalName ( ) ) ; break ; case VOLATILE : writer . writeEmptyElement ( Element . VOLATILE_CONFIGURATION_STORAGE . getLocalName ( ) ) ; break ; case OVERLAY : writer . writeEmptyElement ( Element . OVERLAY_CONFIGURATION_STORAGE . getLocalName ( ) ) ; break ; case MANAGED : writer . writeEmptyElement ( Element . MANAGED_CONFIGURATION_STORAGE . getLocalName ( ) ) ; break ; case CUSTOM : writer . writeStartElement ( Element . CUSTOM_CONFIGURATION_STORAGE . getLocalName ( ) ) ; writer . writeAttribute ( Attribute . CLASS . getLocalName ( ) , globalState . get ( ModelKeys . CONFIGURATION_STORAGE_CLASS ) . asString ( ) ) ; writer . writeEndElement ( ) ; break ; } } writer . writeEndElement ( ) ; } // write any configured thread pools if ( container . hasDefined ( ThreadPoolResource . WILDCARD_PATH . getKey ( ) ) ) { writeThreadPoolElements ( Element . ASYNC_OPERATIONS_THREAD_POOL , ThreadPoolResource . ASYNC_OPERATIONS , writer , container ) ; writeScheduledThreadPoolElements ( Element . EXPIRATION_THREAD_POOL , ScheduledThreadPoolResource . EXPIRATION , writer , container ) ; writeThreadPoolElements ( Element . LISTENER_THREAD_POOL , ThreadPoolResource . LISTENER , writer , container ) ; writeScheduledThreadPoolElements ( Element . PERSISTENCE_THREAD_POOL , ScheduledThreadPoolResource . PERSISTENCE , writer , container ) ; writeThreadPoolElements ( Element . REMOTE_COMMAND_THREAD_POOL , ThreadPoolResource . REMOTE_COMMAND , writer , container ) ; writeScheduledThreadPoolElements ( Element . REPLICATION_QUEUE_THREAD_POOL , ScheduledThreadPoolResource . REPLICATION_QUEUE , writer , container ) ; writeThreadPoolElements ( Element . STATE_TRANSFER_THREAD_POOL , ThreadPoolResource . STATE_TRANSFER , writer , container ) ; writeThreadPoolElements ( Element . TRANSPORT_THREAD_POOL , ThreadPoolResource . TRANSPORT , writer , container ) ; } // write modules if ( container . hasDefined ( ModelKeys . MODULES ) ) { writer . writeStartElement ( Element . MODULES . getLocalName ( ) ) ; ModelNode modules = container . get ( ModelKeys . MODULES , ModelKeys . MODULES_NAME , ModelKeys . MODULE ) ; for ( ModelNode moduleNode : modules . asList ( ) ) { if ( moduleNode . isDefined ( ) ) { ModelNode modelNode = moduleNode . get ( 0 ) ; writer . writeStartElement ( Element . MODULE . getLocalName ( ) ) ; writeAttribute ( writer , modelNode , CacheContainerModuleResource . NAME ) ; if ( modelNode . hasDefined ( ModelKeys . SLOT ) ) { writeAttribute ( writer , modelNode , CacheContainerModuleResource . SLOT ) ; } writer . writeEndElement ( ) ; } } writer . writeEndElement ( ) ; } ModelNode configurations = container . get ( ModelKeys . CONFIGURATIONS , ModelKeys . CONFIGURATIONS_NAME ) ; // write any existent cache types processCacheConfiguration ( writer , container , configurations , ModelKeys . LOCAL_CACHE ) ; processCacheConfiguration ( writer , container , configurations , ModelKeys . INVALIDATION_CACHE ) ; processCacheConfiguration ( writer , container , configurations , ModelKeys . REPLICATED_CACHE ) ; processCacheConfiguration ( writer , container , configurations , ModelKeys . DISTRIBUTED_CACHE ) ; // counters processCounterConfigurations ( writer , container ) ; writer . writeEndElement ( ) ; } } writer . writeEndElement ( ) ;
public class BootstrapConfig { /** * For Java 8 and newer JVMs , the PermGen command line parameter is no * longer supported . This method checks the Java level and if it is * less than Java 8 , it simply returns OK . If it is Java 8 or higher , * this method will attempt to create a server . env file with * @ param bootProps * @ return */ protected ReturnCode generateServerEnv ( boolean generatePassword ) { } }
double jvmLevel ; String s = null ; try { s = AccessController . doPrivileged ( new java . security . PrivilegedExceptionAction < String > ( ) { @ Override public String run ( ) throws Exception { String javaSpecVersion = System . getProperty ( "java.specification.version" ) ; return javaSpecVersion ; } } ) ; jvmLevel = Double . parseDouble ( s ) ; } catch ( Exception ex ) { // If we get here , it is most likely because the java . specification . version property // is not a valid double . Return bad java version throw new LaunchException ( "Invalid java.specification.version, " + s , MessageFormat . format ( BootstrapConstants . messages . getString ( "error.create.unknownJavaLevel" ) , s ) , ex , ReturnCode . ERROR_BAD_JAVA_VERSION ) ; } BufferedWriter bw = null ; File serverEnv = getConfigFile ( "server.env" ) ; try { char [ ] keystorePass = PasswordGenerator . generateRandom ( ) ; String serverEnvContents = FileUtils . readFile ( serverEnv ) ; String toWrite = "" ; if ( generatePassword && ( serverEnvContents == null || ! serverEnvContents . contains ( "keystore_password=" ) ) ) { if ( serverEnvContents != null ) toWrite += System . getProperty ( "line.separator" ) ; toWrite += "keystore_password=" + new String ( keystorePass ) ; } if ( jvmLevel >= 1.8 && ( serverEnvContents == null || ! serverEnvContents . contains ( "WLP_SKIP_MAXPERMSIZE=" ) ) ) { if ( serverEnvContents != null || ! toWrite . isEmpty ( ) ) toWrite += System . getProperty ( "line.separator" ) ; toWrite += "WLP_SKIP_MAXPERMSIZE=true" ; } if ( serverEnvContents == null ) FileUtils . createFile ( serverEnv , new ByteArrayInputStream ( toWrite . getBytes ( "UTF-8" ) ) ) ; else FileUtils . appendFile ( serverEnv , new ByteArrayInputStream ( toWrite . getBytes ( "UTF-8" ) ) ) ; } catch ( IOException ex ) { throw new LaunchException ( "Failed to create/update the server.env file for this server" , MessageFormat . format ( BootstrapConstants . messages . getString ( "error.create.java8serverenv" ) , serverEnv . getAbsolutePath ( ) ) , ex , ReturnCode . LAUNCH_EXCEPTION ) ; } finally { if ( bw != null ) { try { bw . close ( ) ; } catch ( IOException ex ) { } } } return ReturnCode . OK ;
public class AttackRepository { /** * Save { @ link Attack } to DB * @ param attack { @ link Attack } to save */ @ Transactional public void save ( Attack attack ) { } }
Attack merged = em . merge ( attack ) ; em . flush ( ) ; attack . setId ( merged . getId ( ) ) ;
public class GrpcServiceBuilder { /** * Constructs a new { @ link GrpcService } that can be bound to * { @ link ServerBuilder } . It is recommended to bind the service to a server * using { @ link ServerBuilder # service ( ServiceWithPathMappings ) } to mount all * service paths without interfering with other services . */ public ServiceWithPathMappings < HttpRequest , HttpResponse > build ( ) { } }
final HandlerRegistry handlerRegistry = registryBuilder . build ( ) ; final GrpcService grpcService = new GrpcService ( handlerRegistry , handlerRegistry . methods ( ) . keySet ( ) . stream ( ) . map ( path -> PathMapping . ofExact ( '/' + path ) ) . collect ( ImmutableSet . toImmutableSet ( ) ) , firstNonNull ( decompressorRegistry , DecompressorRegistry . getDefaultInstance ( ) ) , firstNonNull ( compressorRegistry , CompressorRegistry . getDefaultInstance ( ) ) , supportedSerializationFormats , maxOutboundMessageSizeBytes , useBlockingTaskExecutor , unsafeWrapRequestBuffers , protoReflectionService , maxInboundMessageSizeBytes ) ; return enableUnframedRequests ? grpcService . decorate ( UnframedGrpcService :: new ) : grpcService ;
public class CoronaJobHistory { /** * Log a number of keys and values with record . the array length of * keys and values should be same . * @ param writers the writers to send the data to * @ param recordType type of log event * @ param keys type of log event * @ param values type of log event */ private void log ( ArrayList < PrintWriter > writers , RecordTypes recordType , Keys [ ] keys , String [ ] values ) { } }
log ( writers , recordType , keys , values , false ) ;
public class VertxJdbcClientImpl { /** * Translate our abstracted { @ link JdbcOptionsBean } into a Vert . x - specific config . * We are assuming that the user is using HikariCP . */ @ SuppressWarnings ( "nls" ) protected JsonObject parseConfig ( JdbcOptionsBean config ) { } }
JsonObject jsonConfig = new JsonObject ( ) ; nullSafePut ( jsonConfig , "provider_class" , HikariCPDataSourceProvider . class . getCanonicalName ( ) ) ; // Vert . x thing nullSafePut ( jsonConfig , "jdbcUrl" , config . getJdbcUrl ( ) ) ; nullSafePut ( jsonConfig , "username" , config . getUsername ( ) ) ; nullSafePut ( jsonConfig , "password" , config . getPassword ( ) ) ; nullSafePut ( jsonConfig , "autoCommit" , config . isAutoCommit ( ) ) ; nullSafePut ( jsonConfig , "connectionTimeout" , config . getConnectionTimeout ( ) ) ; nullSafePut ( jsonConfig , "idleTimeout" , config . getIdleTimeout ( ) ) ; nullSafePut ( jsonConfig , "maxLifetime" , config . getMaxLifetime ( ) ) ; nullSafePut ( jsonConfig , "minimumIdle" , config . getMinimumIdle ( ) ) ; nullSafePut ( jsonConfig , "maximumPoolSize" , config . getMaximumPoolSize ( ) ) ; nullSafePut ( jsonConfig , "poolName" , config . getPoolName ( ) ) ; JsonObject dsProperties = new JsonObject ( ) ; for ( Entry < String , Object > entry : config . getDsProperties ( ) . entrySet ( ) ) { dsProperties . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } jsonConfig . put ( "properties" , dsProperties ) ; return jsonConfig ;
public class AbstractNotification { /** * - - - - - NotificationInterface - - - - - */ @ Override public < T > void send ( NotificationType type , List < T > messages ) throws NotificationException { } }
String [ ] strMessages = new String [ messages . size ( ) ] ; for ( int index = 0 ; index < messages . size ( ) ; index ++ ) { strMessages [ index ] = getMessageJson ( messages . get ( index ) ) ; } sendInternal ( type , strMessages ) ;
public class GVRAssetLoader { /** * Loads atlas information file placed in the assets folder . * Atlas information file contains in UV space the information of offset and * scale for each mesh mapped in some atlas texture . * The content of the file is at json format like : * [ { name : SUN , offset . x : 0.9 , offset . y : 0.9 , scale . x : 0.5 , scale . y : 0.5 } , * { name : EARTH , offset . x : 0.5 , offset . y : 0.9 , scale . x : 0.5 , scale . y : 0.5 } ] * @ param resource * A stream containing a text file on JSON format . * @ since 3.3 * @ return List of atlas information load . */ public List < GVRAtlasInformation > loadTextureAtlasInformation ( GVRAndroidResource resource ) throws IOException { } }
List < GVRAtlasInformation > atlasInformation = GVRAsynchronousResourceLoader . loadAtlasInformation ( resource . getStream ( ) ) ; resource . closeStream ( ) ; return atlasInformation ;
public class FeedItemTargetServiceLocator { /** * For the given interface , get the stub implementation . * If this service has no port for the given interface , * then ServiceException is thrown . */ public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } }
try { if ( com . google . api . ads . adwords . axis . v201809 . cm . FeedItemTargetServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . adwords . axis . v201809 . cm . FeedItemTargetServiceSoapBindingStub _stub = new com . google . api . ads . adwords . axis . v201809 . cm . FeedItemTargetServiceSoapBindingStub ( new java . net . URL ( FeedItemTargetServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getFeedItemTargetServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ;
public class BeanBuilder { /** * Overrides property setting in the scope of the BeanBuilder to set * properties on the current BeanConfiguration . */ @ Override public void setProperty ( String name , Object value ) { } }
if ( currentBeanConfig != null ) { setPropertyOnBeanConfig ( name , value ) ; }
public class SqlUtils { /** * 通过接口获取sql */ public static String getMapperSql ( Object mapper , String methodName , Object ... args ) { } }
MetaObject metaObject = SystemMetaObject . forObject ( mapper ) ; SqlSession session = ( SqlSession ) metaObject . getValue ( "h.sqlSession" ) ; Class < ? > mapperInterface = ( Class < ? > ) metaObject . getValue ( "h.mapperInterface" ) ; String fullMethodName = mapperInterface . getCanonicalName ( ) + "." + methodName ; if ( args == null || args . length == 0 ) { return getNamespaceSql ( session , fullMethodName , null ) ; } else { return getMapperSql ( session , mapperInterface , methodName , args ) ; }
public class DualInputOperator { /** * Gets the information about the operators input / output types . */ @ Override @ SuppressWarnings ( "unchecked" ) public BinaryOperatorInformation < IN1 , IN2 , OUT > getOperatorInfo ( ) { } }
return ( BinaryOperatorInformation < IN1 , IN2 , OUT > ) this . operatorInfo ;
public class ParameterUtility { /** * Build a customizable class . * @ param parameter The parameter class to load * @ param defaultObject the default object class to use as fallback * @ param interfaceClass the interface that the wanted type shall implement ( for log purpose ) * @ param < D > the type wanted * @ return a new instance of the generic type */ public static < D extends Object > Object buildCustomizableClass ( final ParameterItem < Class < ? > > parameter , final Class < D > defaultObject , final Class < ? > interfaceClass ) { } }
Object object = null ; try { object = parameter . get ( ) . newInstance ( ) ; } catch ( InstantiationException | IllegalAccessException e ) { LOGGER . error ( CUSTOM_CLASS_LOADING_ERROR , e , interfaceClass . getSimpleName ( ) ) ; try { object = defaultObject . newInstance ( ) ; } catch ( InstantiationException | IllegalAccessException e2 ) { throw new CoreRuntimeException ( "Impossible to build Default " + interfaceClass . getSimpleName ( ) , e2 ) ; } } return object ;
public class CodeGenerator { /** * Write the generated methods by the fields defined the in specified class to the source file . * < br / > * Add below comments to specified the section where the generated methods should be written to * < pre > * < / pre > * @ param srcDir * @ param cls * @ param constructor generate constructor * @ param copyMethod generate the copy method . * @ param fluentSetter * @ param ignoreFieldNames * @ param fieldName2MethodName * @ param utilClassForHashEqualsToString is < code > Objects . class < / code > by default . It can also be < code > N . class < / code > or any classes else which provide the { @ code hashCode / equals / toString } method . * Or specify < code > CodeGenerator . _ N < / code > or your own utility class to generate entity classes which not dependent on AbacusUtil . jar for Methods { @ code hashCode / equals / toString } . */ public static void writeClassMethod ( final File srcDir , final Class < ? > cls , final boolean constructor , final boolean copyMethod , final boolean fluentSetter , Set < String > ignoreFieldNames , final Map < String , String > fieldName2MethodName , final Class < ? > utilClassForHashEqualsToString ) { } }
writeClassMethod ( srcDir , cls , constructor , copyMethod , fluentSetter , ignoreFieldNames , fieldName2MethodName , ParentPropertyMode . FIRST , ParentPropertyMode . FIRST , utilClassForHashEqualsToString ) ;
public class RMItoIDL { /** * Returns the OMG IDL Exception name for the specified exception * class name . < p > * The name mangling performed by this method follows the OMG IDL * specification , which basically is as follows : * < ul > * < li > replace all ' . ' separators with ' / ' . * < li > prepend " IDL : " * < li > replace the ending " Exception " with " Ex " or add " Ex " . * < li > ( optionally ) prepend ' J ' if the name starts with ' _ ' , * or prepend ' _ ' if the component name is an IDL keyword * < li > append " : 1.0 " . * < / ul > * For example , " javax . ejb . RemoveException " would be mapped to * " IDL : javax / ejb / RemoveEx : 1.0 " . * @ param exClassName fully qualified name of an exception class * @ param mangleComponents true if component names should be mangled */ static String getIdlExceptionName ( String exClassName , boolean mangleComponents ) { } }
StringBuilder idlName = new StringBuilder ( 256 ) ; idlName . append ( "IDL:" ) ; idlName . append ( exClassName . replace ( '.' , '/' ) ) ; if ( exClassName . endsWith ( "Exception" ) ) { idlName . setLength ( idlName . length ( ) - 7 ) ; } else { idlName . append ( "Ex" ) ; } if ( mangleComponents ) // PM94096 { for ( int begin = 0 ; begin < idlName . length ( ) ; ) { int end = idlName . indexOf ( "/" , begin ) ; if ( end == - 1 ) { end = idlName . length ( ) ; } if ( idlName . charAt ( begin ) == '_' ) { idlName . insert ( begin , 'J' ) ; end ++ ; } else { String comp = idlName . substring ( begin , end ) ; if ( isIDLKeyword ( comp ) ) { idlName . insert ( begin , '_' ) ; end ++ ; } } begin = end + 1 ; } } idlName . append ( ":1.0" ) ; return idlName . toString ( ) ;
public class ConfigLoader { /** * Loads a config file . * @ param resources classpath resources to from which to load javascript * @ return Config . */ public static Config load ( final String ... resources ) { } }
final ScriptEngine engine = new ScriptEngineManager ( ) . getEngineByName ( "nashorn" ) ; try ( final InputStream resourceAsStream = findResource ( "jjs-config-utils.js" ) ) { engine . eval ( new InputStreamReader ( resourceAsStream ) ) ; loadResources ( engine , resources ) ; } catch ( final ScriptException se ) { throw new IllegalArgumentException ( "unable to execute main javascript." , se ) ; } catch ( final Exception ex ) { if ( ex instanceof IllegalArgumentException ) { throw ( IllegalArgumentException ) ex ; } throw new IllegalArgumentException ( "unable to load main resource " , ex ) ; } return loadFromObject ( engine . get ( "config" ) ) ;
public class DescribeTagsRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < DescribeTagsRequest > getDryRunRequest ( ) { } }
Request < DescribeTagsRequest > request = new DescribeTagsRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class ConcurrentReferenceHashMap { /** * Return a { @ link Reference } to the { @ link Entry } for the specified { @ code key } , * or { @ code null } if not found . * @ param key the key ( can be { @ code null } ) * @ param restructure types of restructure allowed during this call * @ return the reference , or { @ code null } if not found */ protected final Reference < K , V > getReference ( Object key , Restructure restructure ) { } }
int hash = getHash ( key ) ; return getSegmentForHash ( hash ) . getReference ( key , hash , restructure ) ;
public class VarBindingBuilder { /** * Starts building a new binding . */ public VarBindingBuilder start ( VarBinding binding ) { } }
varBinding_ = binding == null ? new VarBinding ( "V" , IVarDef . ARG , "?" ) : binding ; return this ;
public class GISShapeFileReader { /** * Replies the type of map element which is corresponding to the given ESRI type . * @ param type the type of element . * @ return the type of map element . * @ throws IllegalArgumentException if the given type is not supported by the I / O API . * @ since 4.0 */ @ Pure @ SuppressWarnings ( "checkstyle:cyclomaticcomplexity" ) public static Class < ? extends MapElement > fromESRI ( ShapeElementType type ) { } }
switch ( type ) { case MULTIPOINT : case MULTIPOINT_M : case MULTIPOINT_Z : return MapMultiPoint . class ; case POINT : case POINT_M : case POINT_Z : return MapPoint . class ; case POLYGON : case POLYGON_M : case POLYGON_Z : return MapPolygon . class ; case POLYLINE : case POLYLINE_M : case POLYLINE_Z : return MapPolyline . class ; // $ CASES - OMITTED $ default : } throw new IllegalArgumentException ( ) ;
public class UserCredentialSecurityTokenProvider { /** * Add serialized token to teh credentials . * @ param tokens ByteBuffer containing token . */ @ Override public void addTokens ( final byte [ ] tokens ) { } }
try ( final DataInputBuffer buf = new DataInputBuffer ( ) ) { buf . reset ( tokens , tokens . length ) ; final Credentials credentials = new Credentials ( ) ; credentials . readTokenStorageStream ( buf ) ; final UserGroupInformation ugi = UserGroupInformation . getCurrentUser ( ) ; ugi . addCredentials ( credentials ) ; LOG . log ( Level . FINEST , "Added {0} tokens for user {1}" , new Object [ ] { credentials . numberOfTokens ( ) , ugi } ) ; } catch ( final IOException ex ) { LOG . log ( Level . SEVERE , "Could not access tokens in user credentials." , ex ) ; throw new RuntimeException ( ex ) ; }
public class ConfigUtil { /** * Gets a property from system , environment or an external map . This method supports also passing an alternative * name . * The reason for supporting multiple names , is to support multiple keys for the same property ( e . g . adding a new and * deprecating the old ) . * The lookup order is system [ name ] > env [ name ] > map [ name ] > system [ alternativeName ] > env [ alternativeName ] > * map [ alternativeName ] > defaultValue . * @ param name * The name of the property . * @ param alternativeName * An alternate name to use . * @ param map * The external map . * @ param defaultValue * The value that should be used if property is not found . */ public static String getStringProperty ( String name , String alternativeName , Map < String , String > map , String defaultValue ) { } }
return getStringProperty ( name , map , getStringProperty ( alternativeName , map , defaultValue ) ) ;
public class BusinessProcess { /** * Associates the task with the provided taskId with the current conversation . * @ param taskId * the id of the task * @ return the resumed task * @ throws ProcessEngineCdiException * if no such task is found */ public Task startTask ( String taskId ) { } }
Task currentTask = associationManager . getTask ( ) ; if ( currentTask != null && currentTask . getId ( ) . equals ( taskId ) ) { return currentTask ; } Task task = processEngine . getTaskService ( ) . createTaskQuery ( ) . taskId ( taskId ) . singleResult ( ) ; if ( task == null ) { throw new ProcessEngineCdiException ( "Cannot resume task with id '" + taskId + "', no such task." ) ; } associationManager . setTask ( task ) ; associateExecutionById ( task . getExecutionId ( ) ) ; return task ;
public class LogbackHelper { /** * Make logback configuration for a process to push all its logs to a log file . * < ul > * < li > the file ' s name will use the prefix defined in { @ link RootLoggerConfig # getProcessId ( ) # getLogFilenamePrefix ( ) } . < / li > * < li > the file will follow the rotation policy defined in property { @ link # ROLLING _ POLICY _ PROPERTY } and * the max number of files defined in property { @ link # MAX _ FILES _ PROPERTY } < / li > * < li > the logs will follow the specified log pattern < / li > * < / ul > * @ see # buildLogPattern ( RootLoggerConfig ) */ public FileAppender < ILoggingEvent > configureGlobalFileLog ( Props props , RootLoggerConfig config , String logPattern ) { } }
LoggerContext ctx = getRootContext ( ) ; Logger rootLogger = ctx . getLogger ( ROOT_LOGGER_NAME ) ; FileAppender < ILoggingEvent > fileAppender = newFileAppender ( ctx , props , config , logPattern ) ; rootLogger . addAppender ( fileAppender ) ; return fileAppender ;
public class CommandLine { /** * Process command line arguments . * @ param < T > a type * @ param args the command line arguments to be parsed * @ param argClasses the target named classes to be set * @ return Selfie if the command line parsing succeeded , null ( or exception ) otherwise . * @ throws IOException if parsing fails * @ throws BindException if a binding of short - named parameter fails */ @ SafeVarargs public final < T > CommandLine processCommandLine ( final String [ ] args , final Class < ? extends Name < ? > > ... argClasses ) throws IOException , BindException { } }
for ( final Class < ? extends Name < ? > > c : argClasses ) { registerShortNameOfClass ( c ) ; } final Options o = getCommandLineOptions ( ) ; o . addOption ( new Option ( "?" , "help" ) ) ; final Parser g = new GnuParser ( ) ; final org . apache . commons . cli . CommandLine cl ; try { cl = g . parse ( o , args ) ; } catch ( final ParseException e ) { throw new IOException ( "Could not parse config file" , e ) ; } if ( cl . hasOption ( "?" ) ) { new HelpFormatter ( ) . printHelp ( "reef" , o ) ; return null ; } for ( final Option option : cl . getOptions ( ) ) { final String shortName = option . getOpt ( ) ; final String value = option . getValue ( ) ; if ( applicationOptions . containsKey ( option ) ) { applicationOptions . get ( option ) . process ( option ) ; } else { try { conf . bind ( shortNames . get ( shortName ) , value ) ; } catch ( final BindException e ) { throw new BindException ( "Could not bind shortName " + shortName + " to value " + value , e ) ; } } } return this ;
public class DefaultGroovyMethods { /** * Counts the number of occurrences of the given value inside this Iterable . * Comparison is done using Groovy ' s = = operator ( using * < code > compareTo ( value ) = = 0 < / code > or < code > equals ( value ) < / code > ) . * Example usage : * < pre class = " groovyTestCase " > assert [ 2,4,2,1,3,5,2,4,3 ] . count ( 4 ) = = 2 < / pre > * @ param self the Iterable within which we count the number of occurrences * @ param value the value being searched for * @ return the number of occurrences * @ since 2.2.0 */ public static Number count ( Iterable self , Object value ) { } }
return count ( self . iterator ( ) , value ) ;
public class CpoClassSourceGenerator { /** * Resets all of the buffers . * This is needed if you intend to reuse the visitor . This is always called by visit ( CpoClass ) */ protected void reset ( ) { } }
attributeStatics = new StringBuilder ( ) ; functionGroupStatics = new StringBuilder ( ) ; properties = new StringBuilder ( ) ; constructor = new StringBuilder ( ) ; gettersSetters = new StringBuilder ( ) ; equals = new StringBuilder ( ) ; hashCode = new StringBuilder ( ) ; toString = new StringBuilder ( ) ; footer = new StringBuilder ( ) ;
public class CmsWorkplace { /** * Returns all present request parameters as String . < p > * The String is formatted as a parameter String ( < code > param1 = val1 & amp ; param2 = val2 < / code > ) with UTF - 8 encoded values . < p > * @ return all present request parameters as String */ public String allParamsAsRequest ( ) { } }
StringBuffer retValue = new StringBuffer ( 512 ) ; HttpServletRequest request = getJsp ( ) . getRequest ( ) ; Iterator < String > paramNames = request . getParameterMap ( ) . keySet ( ) . iterator ( ) ; while ( paramNames . hasNext ( ) ) { String paramName = paramNames . next ( ) ; String paramValue = request . getParameter ( paramName ) ; retValue . append ( paramName + "=" + CmsEncoder . encode ( paramValue , getCms ( ) . getRequestContext ( ) . getEncoding ( ) ) ) ; if ( paramNames . hasNext ( ) ) { retValue . append ( "&" ) ; } } return retValue . toString ( ) ;
public class BaseSerializer { /** * Deserialize a Condition List serialized using { @ link # serializeConditionList ( List ) } , or * an array serialized using { @ link # serialize ( Condition [ ] ) } * @ param str String representation ( YAML / JSON ) of the Condition list * @ return { @ code List < Condition > } */ public List < Condition > deserializeConditionList ( String str ) { } }
return load ( str , ListWrappers . ConditionList . class ) . getList ( ) ;
public class VoiceApi { /** * Clear all the parties in the call * End the conference call for all parties . This can be performed by any agent participating in the conference . * @ param id The connection ID of the call to clear . ( required ) * @ param clearData ( required ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse clear ( String id , ClearData clearData ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = clearWithHttpInfo ( id , clearData ) ; return resp . getData ( ) ;
public class IonizationPotentialTool { /** * Method which is predict the Ionization Potential from given atom . * @ param container The IAtomContainer where is contained the IAtom * @ param atom The IAtom to prediction the IP * @ return The value in eV */ public static double predictIP ( IAtomContainer container , IAtom atom ) throws CDKException { } }
double value = 0 ; // at least one lone pair orbital is necessary to ionize if ( container . getConnectedLonePairsCount ( atom ) == 0 ) return value ; // control if the IAtom belongs in some family if ( familyHalogen ( atom ) ) value = getDTHalogenF ( getQSARs ( container , atom ) ) ; else if ( familyOxygen ( atom ) ) value = getDTOxygenF ( getQSARs ( container , atom ) ) ; else if ( familyNitrogen ( atom ) ) value = getDTNitrogenF ( getQSARs ( container , atom ) ) ; return value ;
public class AmazonSQSClient { /** * Gets attributes for the specified queue . * < note > * To determine whether a queue is < a * href = " http : / / docs . aws . amazon . com / AWSSimpleQueueService / latest / SQSDeveloperGuide / FIFO - queues . html " > FIFO < / a > , you * can check whether < code > QueueName < / code > ends with the < code > . fifo < / code > suffix . * < / note > * Some actions take lists of parameters . These lists are specified using the < code > param . n < / code > notation . Values * of < code > n < / code > are integers starting from 1 . For example , a parameter list with two elements looks like this : * < code > & amp ; Attribute . 1 = first < / code > * < code > & amp ; Attribute . 2 = second < / code > * @ param getQueueAttributesRequest * @ return Result of the GetQueueAttributes operation returned by the service . * @ throws InvalidAttributeNameException * The specified attribute doesn ' t exist . * @ sample AmazonSQS . GetQueueAttributes * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / sqs - 2012-11-05 / GetQueueAttributes " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetQueueAttributesResult getQueueAttributes ( GetQueueAttributesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetQueueAttributes ( request ) ;
public class LocalFilterManagerServiceImpl { /** * / * ( non - Javadoc ) * @ see org . apache . pluto . container . FilterManagerService # getFilterManager ( org . apache . pluto . container . PortletWindow , java . lang . String ) */ @ Override public FilterManager getFilterManager ( PortletWindow window , String lifeCycle ) { } }
FilterManagerImpl filterManager = new FilterManagerImpl ( window , lifeCycle ) ; return filterManager ;
public class BatchClientFactoryImpl { /** * Given a segment , fetch its SegmentRange . * - If segment is part of startStreamCut / endStreamCut update startOffset and endOffset accordingly . * - If segment is not part of the streamCuts fetch the data using SegmentMetadataClient . */ private SegmentRange getSegmentRange ( final Segment segment , final StreamCut startStreamCut , final StreamCut endStreamCut ) { } }
SegmentRangeImpl . SegmentRangeImplBuilder segmentRangeBuilder = SegmentRangeImpl . builder ( ) . segment ( segment ) ; if ( startStreamCut . asImpl ( ) . getPositions ( ) . containsKey ( segment ) && endStreamCut . asImpl ( ) . getPositions ( ) . containsKey ( segment ) ) { // use the meta data present in startStreamCut and endStreamCuts . segmentRangeBuilder . startOffset ( startStreamCut . asImpl ( ) . getPositions ( ) . get ( segment ) ) . endOffset ( endStreamCut . asImpl ( ) . getPositions ( ) . get ( segment ) ) ; } else { // use segment meta data client to fetch the segment offsets . SegmentInfo r = segmentToInfo ( segment ) ; segmentRangeBuilder . startOffset ( startStreamCut . asImpl ( ) . getPositions ( ) . getOrDefault ( segment , r . getStartingOffset ( ) ) ) . endOffset ( endStreamCut . asImpl ( ) . getPositions ( ) . getOrDefault ( segment , r . getWriteOffset ( ) ) ) ; } return segmentRangeBuilder . build ( ) ;
public class CmsFormatterBeanParser { /** * Parses the matching criteria ( container types or widths ) for the formatter . < p > * @ param linkFormatterLoc the formatter value location * @ param strict if we should throw an error for incomplete match * @ throws ParseException if parsing goes wrong */ private void parseMatch ( I_CmsXmlContentLocation linkFormatterLoc , boolean strict ) throws ParseException { } }
Set < String > containerTypes = new HashSet < String > ( ) ; I_CmsXmlContentValueLocation typesLoc = linkFormatterLoc . getSubValue ( path ( N_MATCH , N_TYPES ) ) ; I_CmsXmlContentValueLocation widthLoc = linkFormatterLoc . getSubValue ( path ( N_MATCH , N_WIDTH ) ) ; if ( typesLoc != null ) { List < I_CmsXmlContentValueLocation > singleTypeLocs = typesLoc . getSubValues ( N_CONTAINER_TYPE ) ; for ( I_CmsXmlContentValueLocation singleTypeLoc : singleTypeLocs ) { String containerType = singleTypeLoc . getValue ( ) . getStringValue ( m_cms ) . trim ( ) ; containerTypes . add ( containerType ) ; } m_containerTypes = containerTypes ; } else if ( widthLoc != null ) { String widthStr = getString ( widthLoc , N_WIDTH , null ) ; String maxWidthStr = getString ( widthLoc , N_MAX_WIDTH , null ) ; try { m_width = Integer . parseInt ( widthStr ) ; } catch ( Exception e ) { throw new ParseException ( "Invalid container width: [" + widthStr + "]" , e ) ; } try { m_maxWidth = Integer . parseInt ( maxWidthStr ) ; } catch ( Exception e ) { m_maxWidth = Integer . MAX_VALUE ; LOG . debug ( maxWidthStr , e ) ; } } else { if ( strict ) { throw new ParseException ( "Neither container types nor container widths defined!" ) ; } else { m_width = - 1 ; m_maxWidth = Integer . MAX_VALUE ; } }
public class GeoPackageCoreImpl { /** * { @ inheritDoc } */ @ Override public boolean createGriddedTileTable ( ) { } }
verifyWritable ( ) ; boolean created = false ; GriddedTileDao dao = getGriddedTileDao ( ) ; try { if ( ! dao . isTableExists ( ) ) { created = tableCreator . createGriddedTile ( ) > 0 ; } } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to check if " + GriddedTile . class . getSimpleName ( ) + " table exists and create it" , e ) ; } return created ;
public class WingsService { /** * Acquires a wake lock . * @ param context the { @ link Context } . */ private synchronized static void acquireWakeLock ( Context context ) { } }
// Setup wake lock . if ( sWakeLock == null ) { PowerManager powerManager = ( PowerManager ) context . getApplicationContext ( ) . getSystemService ( Context . POWER_SERVICE ) ; sWakeLock = powerManager . newWakeLock ( PowerManager . PARTIAL_WAKE_LOCK , NAME ) ; sWakeLock . setReferenceCounted ( true ) ; } // Acquire lock . sWakeLock . acquire ( ) ; sLogger . log ( WingsService . class , "acquireWakeLock" , "sWakeLock=" + sWakeLock ) ;
public class UResourceBundle { /** * < strong > [ icu ] < / strong > Returns a resource in a given resource that has a given key . * @ param aKey a key associated with the wanted resource * @ return a resource bundle object representing the resource * @ throws MissingResourceException If resource bundle is missing . */ public UResourceBundle get ( String aKey ) { } }
UResourceBundle obj = findTopLevel ( aKey ) ; if ( obj == null ) { String fullName = ICUResourceBundleReader . getFullName ( getBaseName ( ) , getLocaleID ( ) ) ; throw new MissingResourceException ( "Can't find resource for bundle " + fullName + ", key " + aKey , this . getClass ( ) . getName ( ) , aKey ) ; } return obj ;
public class SequenceQuality { /** * Factory method for the SequenceQualityPhred object . It performs all necessary range checks if required . * @ param format format of encoded quality values * @ param data byte with encoded quality values * @ param from starting position in { @ code data } * @ param length number of bytes to parse * @ param check determines whether range check is required * @ return quality line object * @ throws WrongQualityFormat if encoded value are out of range and checking is enabled */ public static SequenceQuality create ( QualityFormat format , byte [ ] data , int from , int length , boolean check ) { } }
if ( from + length >= data . length || from < 0 || length < 0 ) throw new IllegalArgumentException ( ) ; // For performance final byte valueOffset = format . getOffset ( ) , minValue = format . getMinValue ( ) , maxValue = format . getMaxValue ( ) ; byte [ ] res = new byte [ length ] ; int pointer = from ; for ( int i = 0 ; i < length ; i ++ ) { res [ i ] = ( byte ) ( data [ pointer ++ ] - valueOffset ) ; if ( check && ( res [ i ] < minValue || res [ i ] > maxValue ) ) throw new WrongQualityFormat ( ( ( char ) ( data [ i ] ) ) + " [" + res [ i ] + "]" ) ; } return new SequenceQuality ( res , true ) ;
public class VimGenerator2 { /** * Append a Vim region . * @ param it the receiver of the generated elements . * @ param name the name of the pattern . * @ param start the start pattern . * @ param end the end pattern . * @ param contains the contained elements . * @ return { @ code it } . */ protected IStyleAppendable appendRegion ( IStyleAppendable it , String name , String start , String end , String ... contains ) { } }
return appendRegion ( it , true , name , start , end , contains ) ;
public class AssetUtil { /** * Helper to extract a ClassResources full path . < br / > * < br / > * ie : package . MyClass = package / MyClass . class * @ param clazz * @ return */ public static ArchivePath getFullPathForClassResource ( Class < ? > clazz ) { } }
String classResourceDelimiter = clazz . getName ( ) . replaceAll ( DELIMITER_CLASS_NAME_PATH , DELIMITER_RESOURCE_PATH ) ; String classFullPath = classResourceDelimiter + EXTENSION_CLASS ; return new BasicPath ( classFullPath ) ;
public class ConfigClient { /** * batch process for { @ link # getConfig ( String ) } method * @ param configKeyStrs * @ return * @ throws ConfigStoreFactoryDoesNotExistsException * @ throws ConfigStoreCreationException * @ throws VersionDoesNotExistException * @ throws URISyntaxException */ public Map < URI , Config > getConfigsFromStrings ( Collection < String > configKeyStrs ) throws ConfigStoreFactoryDoesNotExistsException , ConfigStoreCreationException , VersionDoesNotExistException , URISyntaxException { } }
if ( configKeyStrs == null || configKeyStrs . size ( ) == 0 ) return Collections . emptyMap ( ) ; Collection < URI > configKeyUris = new ArrayList < > ( ) ; for ( String s : configKeyStrs ) { configKeyUris . add ( new URI ( s ) ) ; } return getConfigs ( configKeyUris ) ;
public class SecurityServletConfiguratorHelper { /** * Creates a map of url patterns to serlvet names that represents the url - pattern elements inside the servlet - mapping elements * in the web . xml and / or web - fragment . xml * @ param servletMappings the servlet mappings */ private void processURLPatterns ( List < ServletMapping > servletMappings ) { } }
for ( ServletMapping servletMapping : servletMappings ) { String servletName = servletMapping . getServletName ( ) ; List < String > urlPatterns = servletMapping . getURLPatterns ( ) ; if ( urlPatterns != null ) { for ( String pattern : urlPatterns ) { urlPatternToServletName . put ( pattern , servletName ) ; } } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "urlPatternToServletName: " + urlPatternToServletName ) ; }
public class StreamUtility { /** * Gets a byte array for the given input stream . */ public static byte [ ] getBytes ( InputStream in ) throws IOException { } }
ByteArrayOutputStream out = new ByteArrayOutputStream ( ) ; pipeStream ( in , out , 4096 ) ; return out . toByteArray ( ) ;
public class ObjectUtils { /** * Checks if all values in the array are not { @ code nulls } . * If any value is { @ code null } or the array is { @ code null } then * { @ code false } is returned . If all elements in array are not * { @ code null } or the array is empty ( contains no elements ) { @ code true } * is returned . * < pre > * ObjectUtils . allNotNull ( * ) = true * ObjectUtils . allNotNull ( * , * ) = true * ObjectUtils . allNotNull ( null ) = false * ObjectUtils . allNotNull ( null , null ) = false * ObjectUtils . allNotNull ( null , * ) = false * ObjectUtils . allNotNull ( * , null ) = false * ObjectUtils . allNotNull ( * , * , null , * ) = false * < / pre > * @ param values the values to test , may be { @ code null } or empty * @ return { @ code false } if there is at least one { @ code null } value in the array or the array is { @ code null } , * { @ code true } if all values in the array are not { @ code null } s or array contains no elements . * @ since 3.5 */ public static boolean allNotNull ( final Object ... values ) { } }
if ( values == null ) { return false ; } for ( final Object val : values ) { if ( val == null ) { return false ; } } return true ;
public class AltsTsiHandshaker { /** * Gets bytes that need to be sent to the peer . * @ param bytes The buffer to put handshake bytes . */ @ Override public void getBytesToSendToPeer ( ByteBuffer bytes ) throws GeneralSecurityException { } }
if ( outputFrame == null ) { // A null outputFrame indicates we haven ' t started the handshake . if ( isClient ) { outputFrame = handshaker . startClientHandshake ( ) ; } else { // The server needs bytes to process before it can start the handshake . return ; } } // Write as many bytes as we are able . ByteBuffer outputFrameAlias = outputFrame ; if ( outputFrame . remaining ( ) > bytes . remaining ( ) ) { outputFrameAlias = outputFrame . duplicate ( ) ; outputFrameAlias . limit ( outputFrameAlias . position ( ) + bytes . remaining ( ) ) ; } bytes . put ( outputFrameAlias ) ; outputFrame . position ( outputFrameAlias . position ( ) ) ;
public class HostInfoStoreImpl { /** * Deprecated : Use topology functions instead . See { @ link org . jboss . as . console . client . domain . topology . TopologyFunctions } * @ param serverGroup * @ param callback */ @ Override @ Deprecated public void loadServerInstances ( final String serverGroup , final AsyncCallback < List < ServerInstance > > callback ) { } }
final List < ServerInstance > instancesOfGroup = new LinkedList < ServerInstance > ( ) ; loadHostsAndServerInstances ( new SimpleCallback < List < HostInfo > > ( ) { @ Override public void onSuccess ( final List < HostInfo > result ) { for ( HostInfo host : result ) { List < ServerInstance > instances = host . getServerInstances ( ) ; for ( ServerInstance instance : instances ) { if ( serverGroup == null ) { instancesOfGroup . add ( instance ) ; } else if ( instance . getGroup ( ) . equals ( instance . getGroup ( ) ) ) { instancesOfGroup . add ( instance ) ; } } } callback . onSuccess ( instancesOfGroup ) ; } } ) ;
public class ConversionQueryBuilder { /** * Set the providers to be considered . If not set explicitly the < i > default < / i > ISO currencies as * returned by { @ link java . util . Currency } is used . * @ param rateTypes the rate types to use , not null . * @ return the query for chaining . */ public ConversionQueryBuilder setRateTypes ( RateType ... rateTypes ) { } }
return set ( ConversionQuery . KEY_RATE_TYPES , new HashSet < > ( Arrays . asList ( rateTypes ) ) ) ;
public class AbstractJobStatus { /** * Deprecated */ @ Override @ Deprecated public List < LogEvent > getLog ( LogLevel level ) { } }
return getLog ( ) . getLogs ( level ) ;
public class ResourceProcessor { /** * F743-22218.3 */ private void collectEjb10Properties ( String jndiName , Object injectionObject ) throws InjectionException { } }
// For EJB 1.0 compatibility only // According to spec remove the first element // of the name ( ejb10 - properties ) final String prefix = "ejb10-properties/" ; if ( jndiName . startsWith ( prefix ) ) // d710771.1 { Properties envProperties = ivNameSpaceConfig . getEnvProperties ( ) ; if ( envProperties != null ) { envProperties . put ( jndiName . substring ( prefix . length ( ) ) , injectionObject ) ; } }
public class DCSplashPanel { /** * Creates a label for the title of the screen * @ param text * @ param includeBackButton * @ param window * @ return */ public static JComponent createTitleLabel ( final String text , final ActionListener backButtonActionListener ) { } }
final DCLabel titleLabel = new DCLabel ( false , text , WidgetUtils . BG_COLOR_BLUE_DARK , null ) ; titleLabel . setFont ( WidgetUtils . FONT_BANNER ) ; final EmptyBorder border = new EmptyBorder ( adjuster . adjust ( 20 ) , MARGIN_LEFT , adjuster . adjust ( 10 ) , 0 ) ; if ( backButtonActionListener != null ) { titleLabel . addMouseListener ( new MouseAdapter ( ) { @ Override public void mouseClicked ( final MouseEvent e ) { backButtonActionListener . actionPerformed ( null ) ; } } ) ; titleLabel . setCursor ( Cursor . getPredefinedCursor ( Cursor . HAND_CURSOR ) ) ; final DCPanel panel = DCPanel . flow ( Alignment . LEFT , MARGIN_LEFT , 0 , createBackToWelcomeScreenButton ( backButtonActionListener ) , titleLabel ) ; panel . setBorder ( border ) ; return panel ; } else { titleLabel . setBorder ( border ) ; return titleLabel ; }
public class ManageTagsDialog { /** * This method initializes btnStart * @ return javax . swing . JButton */ private JButton getBtnAdd ( ) { } }
if ( btnAdd == null ) { btnAdd = new JButton ( ) ; btnAdd . setText ( Constant . messages . getString ( "history.managetags.button.add" ) ) ; btnAdd . setMinimumSize ( new java . awt . Dimension ( 75 , 30 ) ) ; btnAdd . setPreferredSize ( new java . awt . Dimension ( 75 , 30 ) ) ; btnAdd . setMaximumSize ( new java . awt . Dimension ( 100 , 40 ) ) ; btnAdd . addActionListener ( new java . awt . event . ActionListener ( ) { @ Override public void actionPerformed ( java . awt . event . ActionEvent e ) { addTag ( getTxtTagAdd ( ) . getSelectedItem ( ) . toString ( ) ) ; getTxtTagAdd ( ) . setSelectedIndex ( 0 ) ; } } ) ; } return btnAdd ;
public class StreamsUtils { /** * < p > Generates a stream that is computed from a provided double stream by first rolling it in the same * way as the < code > roll ( ) < / code > method does . The average is then computed on each substream , to * form the final double stream . No boxing / unboxing is conducted in the process . * < p > The resulting stream has the same number of elements as the provided stream , * minus the size of the window width , to preserve consistency of each collection . < / p > * < p > A < code > NullPointerException < / code > will be thrown if the provided stream is null . < / p > * @ param doubleStream the processed stream * @ param rollingFactor the size of the window to apply the collector on * @ return a stream in which each value is the collection of the provided stream */ public static DoubleStream shiftingWindowAveragingDouble ( DoubleStream doubleStream , int rollingFactor ) { } }
Objects . requireNonNull ( doubleStream ) ; RollingOfDoubleSpliterator ofDoubleSpliterator = RollingOfDoubleSpliterator . of ( doubleStream . spliterator ( ) , rollingFactor ) ; return StreamSupport . stream ( ofDoubleSpliterator , doubleStream . isParallel ( ) ) . onClose ( doubleStream :: close ) . mapToDouble ( subStream -> subStream . average ( ) . getAsDouble ( ) ) ;
public class ConfigurationPropertyRegistryModule { /** * Create fake bindings for all the properties in { @ link com . peterphi . std . guice . apploader . GuiceProperties } * @ param registry * @ param injector */ private static void bindAllGuiceProperties ( ConfigurationPropertyRegistry registry , AtomicReference < Injector > injector ) { } }
for ( Field field : GuiceProperties . class . getFields ( ) ) { if ( Modifier . isStatic ( field . getModifiers ( ) ) && Modifier . isPublic ( field . getModifiers ( ) ) ) { try { // We are just assuming these properties have a string type final String propertyName = String . valueOf ( field . get ( null ) ) ; registry . register ( GuiceProperties . class , injector , propertyName , String . class , field ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Error trying to process GuiceProperties." + field . getName ( ) , e ) ; } } }
public class AbstractFramedStreamSinkConduit { /** * Queues a frame for sending . * @ param callback * @ param data */ protected void queueFrame ( FrameCallBack callback , ByteBuffer ... data ) { } }
queuedData += Buffers . remaining ( data ) ; bufferCount += data . length ; frameQueue . add ( new Frame ( callback , data , 0 , data . length ) ) ;
public class RippleMarketDataService { /** * If the base currency is not XRP then the returned orders ' additional data map contains a value * for { @ link RippleExchange . DATA _ BASE _ COUNTERPARTY } , similarly if the counter currency is not XRP * then { @ link RippleExchange . DATA _ COUNTER _ COUNTERPARTY } is populated . * @ param currencyPair the base / counter currency pair * @ param args a RippleMarketDataParams object needs to be supplied */ @ Override public OrderBook getOrderBook ( final CurrencyPair currencyPair , final Object ... args ) throws IOException { } }
if ( ( args != null && args . length > 0 ) && ( args [ 0 ] instanceof RippleMarketDataParams ) ) { final RippleMarketDataParams params = ( RippleMarketDataParams ) args [ 0 ] ; final RippleOrderBook orderBook = getRippleOrderBook ( currencyPair , params ) ; return RippleAdapters . adaptOrderBook ( orderBook , params , currencyPair ) ; } else { throw new ExchangeException ( "RippleMarketDataParams is missing" ) ; }
public class IntervalST { /** * look in subtree rooted at x */ private boolean searchAll ( Node < K , V > x , Interval1D < K > interval , List < Node < K , V > > toAppend ) { } }
boolean found1 = false ; boolean found2 = false ; boolean found3 = false ; if ( x == null ) { return false ; } if ( interval . intersects ( x . interval ) ) { toAppend . add ( x ) ; found1 = true ; } if ( x . left != null && x . left . max . compareTo ( interval . lo ) >= 0 ) { found2 = searchAll ( x . left , interval , toAppend ) ; } if ( found2 || x . left == null || x . left . max . compareTo ( interval . lo ) < 0 ) { found3 = searchAll ( x . right , interval , toAppend ) ; } return found1 || found2 || found3 ;
public class NodeTypeManagerImpl { /** * { @ inheritDoc } * @ return */ public NodeTypeIterator registerNodeTypes ( InputStream xml , int alreadyExistsBehaviour , String contentType ) throws RepositoryException { } }
Collection < NodeTypeData > nts = typesManager . registerNodeTypes ( xml , alreadyExistsBehaviour , contentType ) ; EntityCollection types = new EntityCollection ( ) ; for ( NodeTypeData ntdata : nts ) { types . add ( new NodeTypeImpl ( ntdata , typesManager , this , locationFactory , valueFactory , dataManager ) ) ; } return types ;
public class DropboxFilePickerFragment { /** * Once loading has finished , show the list and hide the progress bar . */ @ Override public void onLoaderReset ( Loader < SortedList < Metadata > > loader ) { } }
progressBar . setVisibility ( View . INVISIBLE ) ; recyclerView . setVisibility ( View . VISIBLE ) ; super . onLoaderReset ( loader ) ;
public class IfAxis { /** * { @ inheritDoc } */ @ Override public boolean hasNext ( ) { } }
resetToLastKey ( ) ; if ( mFirst ) { mFirst = false ; try { mResult = ( Function . ebv ( mIf , mRtx ) ) ? mThen : mElse ; } catch ( TTXPathException e ) { throw new RuntimeException ( e ) ; } } if ( mResult . hasNext ( ) ) { return true ; } else { resetToStartKey ( ) ; return false ; }
public class DbManager { /** * lazy init */ @ SuppressWarnings ( "deprecation" ) private synchronized void init ( ) { } }
if ( initialized ) { return ; } if ( dbname == null ) { throw new IllegalStateException ( "MongoDB dbname not defined" ) ; } try { if ( dbUrl1 == null || dbUrl1 . equals ( "" ) ) { // default host / port , but with options mongo = new Mongo ( new ServerAddress ( ) , options ) ; } else if ( dbUrl2 != null && ! dbUrl2 . equals ( "" ) ) { DBAddress left = new DBAddress ( urlWithDbname ( dbUrl1 ) ) ; DBAddress right = new DBAddress ( urlWithDbname ( dbUrl2 ) ) ; mongo = new Mongo ( left , right , options ) ; } else { DBAddress left = new DBAddress ( urlWithDbname ( dbUrl1 ) ) ; mongo = new Mongo ( left , options ) ; } db = mongo . getDB ( dbname ) ; initialized = true ; } catch ( Exception e ) { throw new RuntimeException ( e . getMessage ( ) , e ) ; }
public class Initializer { /** * Initializes an Object by calling it ' s init method with a Map of parameters if the Object is an instance of the * ParameterizedInitable interface . Calls the init ( : Object ) method with no arguments / parameters if the Object is not * an instance of the ParameterizedInitable interface . * @ param initableObj the Object to be initialized . * @ param parameters a Map of parameters used to initialize the Object . * @ return a boolean value indicating whether the Object has been initialized . * @ see java . util . Map * @ see # init ( Object ) */ public static boolean init ( Object initableObj , Map < ? , ? > parameters ) { } }
if ( initableObj instanceof ParameterizedInitable ) { ( ( ParameterizedInitable ) initableObj ) . init ( parameters ) ; return true ; } return init ( initableObj ) ;
public class DataReader { /** * Returns the global DataReader instance , populating it if necessary . */ public static DataReader get ( ) throws IOException { } }
if ( ! instance . initialized ) { long start = System . currentTimeMillis ( ) ; instance . init ( ) ; long elapsed = System . currentTimeMillis ( ) - start ; System . out . printf ( "Loaded CLDR data in %d ms.\n" , elapsed ) ; } return instance ;
public class CentralDogmaBeanFactory { /** * Returns a newly - created bean instance with the settings specified by { @ link CentralDogmaBean } annotation . * @ param defaultValue a Java bean annotated with { @ link CentralDogmaBean } . The default value is used before * initialization . * @ param beanType the type of { @ code bean } * @ param changeListener the { @ link Consumer } of { @ code beanType } , invoked when { @ code bean } is updated . * Will consume the new value of the bean . * @ return a new Java bean whose getters return the latest known values mirrored from Central Dogma */ public < T > T get ( T defaultValue , Class < T > beanType , Consumer < T > changeListener ) { } }
return get ( defaultValue , beanType , changeListener , CentralDogmaBeanConfig . EMPTY ) ;
public class SegmentSlic { /** * Computes how far away each cluster is from each pixel . Expectation step . */ protected void computeClusterDistance ( ) { } }
for ( int i = 0 ; i < pixels . size ; i ++ ) { pixels . data [ i ] . reset ( ) ; } for ( int i = 0 ; i < clusters . size && ! stopRequested ; i ++ ) { Cluster c = clusters . data [ i ] ; // compute search bounds int centerX = ( int ) ( c . x + 0.5f ) ; int centerY = ( int ) ( c . y + 0.5f ) ; int x0 = centerX - gridInterval ; int x1 = centerX + gridInterval + 1 ; int y0 = centerY - gridInterval ; int y1 = centerY + gridInterval + 1 ; if ( x0 < 0 ) x0 = 0 ; if ( y0 < 0 ) y0 = 0 ; if ( x1 > input . width ) x1 = input . width ; if ( y1 > input . height ) y1 = input . height ; for ( int y = y0 ; y < y1 ; y ++ ) { int indexPixel = y * input . width + x0 ; int indexInput = input . startIndex + y * input . stride + x0 ; int dy = y - centerY ; for ( int x = x0 ; x < x1 ; x ++ ) { int dx = x - centerX ; float distanceColor = colorDistance ( c . color , indexInput ++ ) ; float distanceSpacial = dx * dx + dy * dy ; pixels . data [ indexPixel ++ ] . add ( c , distanceColor + adjustSpacial * distanceSpacial ) ; } } }
public class ArtifactManagingServiceImpl { /** * ( non - Javadoc ) * @ see org . exoplatform . services . jcr . ext . maven . ArtifactManagingService # searchArtifacts * ( org . exoplatform . services . jcr . ext . common . SessionProvider , * org . exoplatform . services . jcr . ext . maven . SearchCriteria ) */ public List < Descriptor > searchArtifacts ( SessionProvider sp , SearchCriteria criteria ) throws RepositoryException { } }
Session session = currentSession ( sp ) ; session . refresh ( true ) ; session . save ( ) ; String param = criteria . getContainsExpr ( ) ; String pathConstraint = "" ; if ( rootNodePath . length ( ) > 1 ) { // artifact root is some real node if ( rootNodePath . endsWith ( "/" ) ) pathConstraint = rootNodePath + "%/" + param + "[%]" ; else pathConstraint = rootNodePath + "/%/" + param + "[%]" ; } else { pathConstraint = "/%/" + param + "[%]" ; // artifact root is workspace root } // node ! ! String sqlQuery = String . format ( "SELECT * FROM nt:folder WHERE jcr:path LIKE '%s' " , pathConstraint ) ; LOG . info ( sqlQuery ) ; QueryManager manager = session . getWorkspace ( ) . getQueryManager ( ) ; Query query = manager . createQuery ( sqlQuery , Query . SQL ) ; QueryResult queryResult = query . execute ( ) ; NodeIterator iterator = queryResult . getNodes ( ) ; List < Descriptor > preciseNode = new ArrayList < Descriptor > ( ) ; while ( iterator . hasNext ( ) ) { Node candidate = iterator . nextNode ( ) ; Descriptor descriptor = new FolderDescriptor ( candidate . getPath ( ) ) ; preciseNode . add ( descriptor ) ; } return preciseNode ;
public class MPP14Reader { /** * Create an index of base font numbers and their associated base * font instances . * @ param data property data */ private void processBaseFonts ( byte [ ] data ) { } }
int offset = 0 ; int blockCount = MPPUtility . getShort ( data , 0 ) ; offset += 2 ; int size ; String name ; for ( int loop = 0 ; loop < blockCount ; loop ++ ) { /* unknownAttribute = MPPUtility . getShort ( data , offset ) ; */ offset += 2 ; size = MPPUtility . getShort ( data , offset ) ; offset += 2 ; name = MPPUtility . getUnicodeString ( data , offset ) ; offset += 64 ; if ( name . length ( ) != 0 ) { FontBase fontBase = new FontBase ( Integer . valueOf ( loop ) , name , size ) ; m_fontBases . put ( fontBase . getIndex ( ) , fontBase ) ; } }
public class Primitive { public static Object [ ] wrap ( Object [ ] args , Class < ? > [ ] paramTypes ) { } }
if ( args == null ) return null ; Object [ ] oa = new Object [ args . length ] ; for ( int i = 0 ; i < args . length ; i ++ ) oa [ i ] = wrap ( args [ i ] , paramTypes [ i ] ) ; return oa ;
public class DateUtil { /** * 获取某周的开始时间 , 周一定为一周的开始时间 * @ param calendar 日期 { @ link Calendar } * @ param isMondayAsFirstDay 是否周一做为一周的第一天 ( false表示周日做为第一天 ) * @ return { @ link Calendar } * @ since 3.1.2 */ public static Calendar beginOfWeek ( Calendar calendar , boolean isMondayAsFirstDay ) { } }
if ( isMondayAsFirstDay ) { calendar . setFirstDayOfWeek ( Calendar . MONDAY ) ; } return truncate ( calendar , DateField . WEEK_OF_MONTH ) ;
public class ScanRequest { /** * This is a legacy parameter . Use < code > ProjectionExpression < / code > instead . For more information , see < a href = * " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / LegacyConditionalParameters . AttributesToGet . html " * > AttributesToGet < / a > in the < i > Amazon DynamoDB Developer Guide < / i > . * @ param attributesToGet * This is a legacy parameter . Use < code > ProjectionExpression < / code > instead . For more information , see < a * href = * " https : / / docs . aws . amazon . com / amazondynamodb / latest / developerguide / LegacyConditionalParameters . AttributesToGet . html " * > AttributesToGet < / a > in the < i > Amazon DynamoDB Developer Guide < / i > . */ public void setAttributesToGet ( java . util . Collection < String > attributesToGet ) { } }
if ( attributesToGet == null ) { this . attributesToGet = null ; return ; } this . attributesToGet = new java . util . ArrayList < String > ( attributesToGet ) ;
public class TileBoundingBoxUtils { /** * Get the X pixel for where the longitude fits into the bounding box * @ param width * width * @ param boundingBox * bounding box * @ param longitude * longitude * @ return x pixel */ public static float getXPixel ( long width , BoundingBox boundingBox , double longitude ) { } }
double boxWidth = boundingBox . getMaxLongitude ( ) - boundingBox . getMinLongitude ( ) ; double offset = longitude - boundingBox . getMinLongitude ( ) ; double percentage = offset / boxWidth ; float pixel = ( float ) ( percentage * width ) ; return pixel ;
public class ParameterConfig { /** * 自定义的key是否合法 * @ param paramkey 参数key * @ return 是否合法 */ public static boolean isValidParamKey ( String paramkey ) { } }
char c = paramkey . charAt ( 0 ) ; return c != RpcConstants . HIDE_KEY_PREFIX && c != RpcConstants . INTERNAL_KEY_PREFIX ;
public class PerCaseBuilderWithProduct { /** * Sets the function to evaluate _ eventually _ when input data do not match any case . */ public final PCB eventuallyProduce ( @ Nonnull R directValue ) { } }
eventually = directToFunction . apply ( directValue ) ; return self ( ) ;
public class Constraints { /** * Apply a inclusive " range " constraint to a bean property . * @ param propertyName the property with the range constraint . * @ param min the low edge of the range * @ param max the high edge of the range * @ param comparator the comparator to use while comparing the values * @ return The range constraint constraint * @ since 0.3.0 */ public PropertyConstraint inRange ( String propertyName , Object min , Object max , Comparator comparator ) { } }
return value ( propertyName , range ( min , max , comparator ) ) ;
public class ColVals { /** * < p > Evaluate column val for SQL statement INSERT or UPDATE . < / p > * @ param pNm column name * @ return String column val * @ throws ExceptionWithCode if column not found */ public final String evSqlVl ( final String pNm ) throws ExceptionWithCode { } }
Object val = evObjVl ( pNm ) ; if ( val == null ) { return "null" ; } else { return val . toString ( ) ; }
public class Tools { /** * 判断指定数组dests中是否包含指定数据src * @ param src 数据 * @ param dests 数组 , 不能为空 * @ return 返回true表示dests中包含src */ public static boolean contains ( Object src , Object ... dests ) { } }
if ( dests == null ) { throw new NullPointerException ( "dest must not be null" ) ; } if ( src == null ) { return false ; } for ( Object dest : dests ) { if ( src == dest || src . equals ( dest ) ) { return true ; } } return false ;
public class LongTupleDistanceFunctions { /** * Computes the squared Euclidean distance between the given tuples * @ param t0 The first tuple * @ param t1 The second tuple * @ return The distance * @ throws IllegalArgumentException If the given tuples do not * have the same { @ link Tuple # getSize ( ) size } */ static double computeEuclideanSquared ( LongTuple t0 , LongTuple t1 ) { } }
Utils . checkForEqualSize ( t0 , t1 ) ; long sum = 0 ; for ( int i = 0 ; i < t0 . getSize ( ) ; i ++ ) { long d = t0 . get ( i ) - t1 . get ( i ) ; sum += d * d ; } return sum ;
public class CoverageDataPng { /** * { @ inheritDoc } */ @ Override public Double getValue ( GriddedTile griddedTile , byte [ ] imageBytes , int x , int y ) { } }
BufferedImage image ; try { image = ImageUtils . getImage ( imageBytes ) ; } catch ( IOException e ) { throw new GeoPackageException ( "Failed to create an image from image bytes" , e ) ; } Double value = getValue ( griddedTile , image , x , y ) ; return value ;
public class DbTableAccess { /** * Updates a set of records ( rows ) in a table based on the selectors and * new values . This method respect the table schema and the user ' s capabilities * while performing the operation . * @ param whereClauses Listof WhereExpression instances to select proper records * @ param params Map of column names to values used to modify the records . * @ return An array of JSON objects , representing the updated objects . * @ throws Exception */ public JSONArray update ( List < RecordSelector > whereClauses , Map < String , String > params ) throws Exception { } }
OperationAccess operationAccess = tableSchema . getUpdateAccess ( ) ; if ( false == operationAccess . isAllowed ( ) ) { throw new Exception ( "Attempting to update a table while the privilege is not allowed: " + tableSchema . getLogicalName ( ) + " (" + tableSchema . getPhysicalName ( ) + ")" ) ; } // Figure out all WHERE clauses List < RecordSelector > effectiveWhereClauses = computeEffectiveWhereClauses ( whereClauses , operationAccess ) ; // Create a list of all writable columns where a value is specified in // the parameters List < ColumnData > columnsWithParam = new Vector < ColumnData > ( ) ; for ( String columnName : params . keySet ( ) ) { ColumnData columnData = tableSchema . getColumnFromName ( columnName ) ; if ( null != columnData && false == columnData . isWriteable ( ) ) { columnData = null ; } if ( null != columnData && columnData . isAutoIncrementInteger ( ) ) { columnData = null ; } if ( null == columnData ) { throw new Exception ( "No write access to column " + columnName + " in table " + tableSchema . getLogicalName ( ) + " (" + tableSchema . getPhysicalName ( ) + ")" ) ; } else { columnsWithParam . add ( columnData ) ; } } // Sort according to column name . This offers greater reusability // of the prepared statement . Collections . sort ( columnsWithParam , new Comparator < ColumnData > ( ) { public int compare ( ColumnData left , ColumnData right ) { return left . getColumnName ( ) . compareTo ( right . getColumnName ( ) ) ; } } ) ; // No point in running an update if there is nothing to set if ( columnsWithParam . size ( ) < 1 ) { throw new Exception ( "Attempting to update without providing any values to set" ) ; } // Create SQL command String sqlQuery = null ; PreparedStatement pstmt = null ; { StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; pw . print ( "UPDATE " ) ; pw . print ( tableSchema . getPhysicalName ( ) ) ; pw . print ( " SET " ) ; { boolean first = true ; for ( ColumnData columnData : columnsWithParam ) { if ( first ) { first = false ; } else { pw . print ( "," ) ; } pw . print ( columnData . getColumnName ( ) ) ; pw . print ( " = " ) ; pw . print ( columnData . getInsertWildcard ( ) ) ; } } { boolean first = true ; for ( RecordSelector exp : effectiveWhereClauses ) { if ( first ) { pw . print ( " WHERE " ) ; first = false ; } else { pw . print ( " AND " ) ; } pw . print ( exp . getQueryString ( tableSchema , SqlElement . Phase . WHERE ) ) ; } } pw . flush ( ) ; sqlQuery = sw . toString ( ) ; pstmt = connection . prepareStatement ( sqlQuery ) ; // Populate prepared statement int index = 1 ; for ( ColumnData columnData : columnsWithParam ) { // Compute value String value = params . get ( columnData . getColumnName ( ) ) ; ColumnDataUtils . writeToPreparedStatement ( pstmt , index , value , columnData . getColumnType ( ) ) ; ++ index ; } for ( RecordSelector exp : effectiveWhereClauses ) { for ( TypedValue value : exp . getQueryValues ( tableSchema , variables ) ) { ColumnDataUtils . writeToPreparedStatement ( pstmt , index , value ) ; ++ index ; } } } // Execute insert pstmt . execute ( ) ; // Now , we need to retrieve the objects JSONArray array = query ( whereClauses , null , null , null , null , null ) ; return array ;
public class UpdateVirtualInterfaceAttributesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateVirtualInterfaceAttributesRequest updateVirtualInterfaceAttributesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateVirtualInterfaceAttributesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateVirtualInterfaceAttributesRequest . getVirtualInterfaceId ( ) , VIRTUALINTERFACEID_BINDING ) ; protocolMarshaller . marshall ( updateVirtualInterfaceAttributesRequest . getMtu ( ) , MTU_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class VpcPeeringConnectionVpcInfo { /** * Information about the IPv4 CIDR blocks for the VPC . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCidrBlockSet ( java . util . Collection ) } or { @ link # withCidrBlockSet ( java . util . Collection ) } if you want to * override the existing values . * @ param cidrBlockSet * Information about the IPv4 CIDR blocks for the VPC . * @ return Returns a reference to this object so that method calls can be chained together . */ public VpcPeeringConnectionVpcInfo withCidrBlockSet ( CidrBlock ... cidrBlockSet ) { } }
if ( this . cidrBlockSet == null ) { setCidrBlockSet ( new com . amazonaws . internal . SdkInternalList < CidrBlock > ( cidrBlockSet . length ) ) ; } for ( CidrBlock ele : cidrBlockSet ) { this . cidrBlockSet . add ( ele ) ; } return this ;
public class RedisClient { /** * Unwraps redis results into entity . * @ param entityMetadata * the entity metadata * @ param results * the results * @ param key * the key * @ return the object * @ throws InstantiationException * the instantiation exception * @ throws IllegalAccessException * the illegal access exception */ private Object unwrap ( EntityMetadata entityMetadata , Map < byte [ ] , byte [ ] > results , Object key ) throws InstantiationException , IllegalAccessException { } }
MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( entityMetadata . getPersistenceUnit ( ) ) ; List < String > relationNames = entityMetadata . getRelationNames ( ) ; EntityType entityType = metaModel . entity ( entityMetadata . getEntityClazz ( ) ) ; Map < String , Object > relations = new HashMap < String , Object > ( ) ; Object entity = null ; // Set < Attribute > attributes = entityType . getAttributes ( ) ; Set < byte [ ] > columnNames = results . keySet ( ) ; for ( byte [ ] nameInByte : columnNames ) { if ( entity == null ) { entity = KunderaCoreUtils . createNewInstance ( entityMetadata . getEntityClazz ( ) ) ; } String columnName = PropertyAccessorFactory . STRING . fromBytes ( String . class , nameInByte ) ; byte [ ] value = results . get ( nameInByte ) ; String discriminatorColumn = ( ( AbstractManagedType ) entityType ) . getDiscriminatorColumn ( ) ; if ( columnName != null && ! columnName . equals ( discriminatorColumn ) ) { String fieldName = entityMetadata . getFieldName ( columnName ) ; if ( fieldName != null ) { Attribute attribute = entityType . getAttribute ( fieldName ) ; if ( relationNames != null && relationNames . contains ( columnName ) ) { Field field = ( Field ) attribute . getJavaMember ( ) ; EntityMetadata associationMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , ( ( AbstractAttribute ) attribute ) . getBindableJavaType ( ) ) ; relations . put ( columnName , PropertyAccessorHelper . getObject ( associationMetadata . getIdAttribute ( ) . getBindableJavaType ( ) , value ) ) ; } else { PropertyAccessorHelper . set ( entity , ( Field ) attribute . getJavaMember ( ) , value ) ; } } else { // means it might be an embeddable field , if not simply omit // this field . if ( StringUtils . contains ( columnName , ":" ) ) { StringTokenizer tokenizer = new StringTokenizer ( columnName , ":" ) ; while ( tokenizer . hasMoreTokens ( ) ) { String embeddedFieldName = tokenizer . nextToken ( ) ; String embeddedColumnName = tokenizer . nextToken ( ) ; Map < String , EmbeddableType > embeddables = metaModel . getEmbeddables ( entityMetadata . getEntityClazz ( ) ) ; EmbeddableType embeddableAttribute = embeddables . get ( embeddedFieldName ) ; AbstractAttribute attrib = null ; Iterator itr = embeddableAttribute . getAttributes ( ) . iterator ( ) ; while ( itr . hasNext ( ) ) { attrib = ( AbstractAttribute ) itr . next ( ) ; if ( attrib . getJPAColumnName ( ) . equals ( embeddedColumnName ) ) { break ; } } Object embeddedObject = PropertyAccessorHelper . getObject ( entity , ( Field ) entityType . getAttribute ( embeddedFieldName ) . getJavaMember ( ) ) ; if ( embeddedObject == null ) { embeddedObject = KunderaCoreUtils . createNewInstance ( ( ( AbstractAttribute ) entityType . getAttribute ( embeddedFieldName ) ) . getBindableJavaType ( ) ) ; PropertyAccessorHelper . set ( entity , ( Field ) entityType . getAttribute ( embeddedFieldName ) . getJavaMember ( ) , embeddedObject ) ; } PropertyAccessorHelper . set ( embeddedObject , ( Field ) attrib . getJavaMember ( ) , value ) ; // PropertyAccessorHelper . } } // It might be a case of embeddable attribute . } } } if ( entity != null ) { Class javaType = entityMetadata . getIdAttribute ( ) . getBindableJavaType ( ) ; if ( ! metaModel . isEmbeddable ( entityMetadata . getIdAttribute ( ) . getBindableJavaType ( ) ) && key . getClass ( ) . isAssignableFrom ( String . class ) && ! key . getClass ( ) . equals ( javaType ) ) { key = PropertyAccessorFactory . getPropertyAccessor ( javaType ) . fromString ( javaType , key . toString ( ) ) ; } // PropertyAccessorHelper . set ( entity , ( Field ) entityMetadata . getIdAttribute ( ) . getJavaMember ( ) , key ) ; } if ( ! relations . isEmpty ( ) ) { return new EnhanceEntity ( entity , key , relations ) ; } return entity ;
public class Graph { /** * Reverse the direction of the edges in the graph . * @ return a new graph with all edges reversed * @ throws UnsupportedOperationException */ public Graph < K , VV , EV > reverse ( ) throws UnsupportedOperationException { } }
DataSet < Edge < K , EV > > reversedEdges = edges . map ( new ReverseEdgesMap < > ( ) ) . name ( "Reverse edges" ) ; return new Graph < > ( vertices , reversedEdges , this . context ) ;
public class Getter { /** * Returns a { @ code View } with a given id . * @ param id the id of the { @ link View } to return * @ param index the index of the { @ link View } . { @ code 0 } if only one is available * @ return a { @ code View } with a given id */ public View getView ( String id , int index ) { } }
View viewToReturn = null ; Context targetContext = instrumentation . getTargetContext ( ) ; String packageName = targetContext . getPackageName ( ) ; int viewId = targetContext . getResources ( ) . getIdentifier ( id , "id" , packageName ) ; if ( viewId != 0 ) { viewToReturn = getView ( viewId , index , TIMEOUT ) ; } if ( viewToReturn == null ) { int androidViewId = targetContext . getResources ( ) . getIdentifier ( id , "id" , "android" ) ; if ( androidViewId != 0 ) { viewToReturn = getView ( androidViewId , index , TIMEOUT ) ; } } if ( viewToReturn != null ) { return viewToReturn ; } return getView ( viewId , index ) ;
public class XMLAssert { /** * Assert that the result of an XML comparison is or is not identical * @ param msg Message to display if assertion fails * @ param diff the result of an XML comparison * @ param assertion true if asserting that result is identical */ public static void assertXMLIdentical ( String msg , Diff diff , boolean assertion ) { } }
if ( assertion != diff . identical ( ) ) { fail ( getFailMessage ( msg , diff ) ) ; }
public class FieldDescriptorConstraints { /** * Checks that sequence - name is only used with autoincrement = ' ojb ' * @ param fieldDef The field descriptor * @ param checkLevel The current check level ( this constraint is checked in basic and strict ) * @ exception ConstraintException If the constraint has been violated */ private void checkSequenceName ( FieldDescriptorDef fieldDef , String checkLevel ) throws ConstraintException { } }
if ( CHECKLEVEL_NONE . equals ( checkLevel ) ) { return ; } String autoIncr = fieldDef . getProperty ( PropertyHelper . OJB_PROPERTY_AUTOINCREMENT ) ; String seqName = fieldDef . getProperty ( PropertyHelper . OJB_PROPERTY_SEQUENCE_NAME ) ; if ( ( seqName != null ) && ( seqName . length ( ) > 0 ) ) { if ( ! "ojb" . equals ( autoIncr ) && ! "database" . equals ( autoIncr ) ) { throw new ConstraintException ( "The field " + fieldDef . getName ( ) + " in class " + fieldDef . getOwner ( ) . getName ( ) + " has sequence-name set though it's autoincrement value is not set to 'ojb'" ) ; } }
public class RpcConfigs { /** * Gets int value . * @ param primaryKey the primary key * @ return the int value */ public static int getIntValue ( String primaryKey ) { } }
Object val = CFG . get ( primaryKey ) ; if ( val == null ) { throw new SofaRpcRuntimeException ( "Not found key: " + primaryKey ) ; } else { return Integer . parseInt ( val . toString ( ) ) ; }
public class LumberjackClient { /** * Checks whether or not this connection has been used recently enough * to still be considered usable */ public boolean isConnectionStale ( ) { } }
if ( lastUsedTime == 0 ) return false ; long currentTime = System . currentTimeMillis ( ) ; long timeSinceLastUse = currentTime - lastUsedTime ; return ( timeSinceLastUse > MAX_KEEPALIVE ) ;
public class ZipUtil { /** * Copies an existing ZIP file and replaces a given entry in it . * @ param zip * an existing ZIP file ( only read ) . * @ param entry * new ZIP entry . * @ param destZip * new ZIP file created . * @ return < code > true < / code > if the entry was replaced . */ public static boolean replaceEntry ( File zip , ZipEntrySource entry , File destZip ) { } }
return replaceEntries ( zip , new ZipEntrySource [ ] { entry } , destZip ) ;
public class FixedLengthStreamSourceConduit { /** * Exit a read method . * @ param consumed the number of bytes consumed by this call ( may be 0) */ private void exitRead ( long consumed ) throws IOException { } }
long oldVal = state ; if ( consumed == - 1 ) { if ( anyAreSet ( oldVal , MASK_COUNT ) ) { invokeFinishListener ( ) ; state &= ~ MASK_COUNT ; throw UndertowMessages . MESSAGES . couldNotReadContentLengthData ( ) ; } return ; } long newVal = oldVal - consumed ; state = newVal ;
public class DFACacheOracle { /** * Creates a prefix - closed cache oracle for a DFA learning setup , using a tree for internal cache organization . * @ param alphabet * the alphabet containing the symbols of possible queries * @ param delegate * the oracle to delegate queries to , in case of a cache - miss . * @ param < I > * input symbol type * @ return the cached { @ link DFACacheOracle } . * @ see IncrementalPCDFATreeBuilder */ public static < I > DFACacheOracle < I > createTreePCCacheOracle ( Alphabet < I > alphabet , MembershipOracle < I , Boolean > delegate ) { } }
return new DFACacheOracle < > ( new IncrementalPCDFATreeBuilder < > ( alphabet ) , delegate ) ;
public class Shell { /** * Older versions of bash have a bug where non - ASCII on the first line * makes the shell think the file is a binary file and not a script . Adding * a leading line feed works around this problem . */ private static String addLineFeedForNonASCII ( String s ) { } }
if ( ! s . startsWith ( "#!" ) ) { if ( s . indexOf ( '\n' ) != 0 ) { return "\n" + s ; } } return s ;
public class Methods { /** * Delegates to { @ link Method # invoke ( Object , Object . . . ) } . < br > * < br > * This call is wrapping all possible checked exceptions and * SecurityExceptions into a { @ link IllegalArgumentException } * @ param method The method * @ param object The object * @ param arguments The arguments * @ return The method call result * @ throws IllegalArgumentException if the call did not succeed */ static Object invokeUnchecked ( Method method , Object object , Object ... arguments ) { } }
try { return method . invoke ( object , arguments ) ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( e ) ; } catch ( IllegalAccessException e ) { throw new IllegalArgumentException ( e ) ; } catch ( InvocationTargetException e ) { throw new IllegalArgumentException ( e ) ; }
public class MatchExptScript { /** * Compute learners . */ public void compute ( ) { } }
if ( ! computable ) { throw new RuntimeException ( "can't re-'compute' experiment results after a 'restore'" ) ; } expt = new MatchExpt [ blockers . size ( ) ] [ learners . size ( ) ] [ datasets . size ( ) ] ; for ( int i = 0 ; i < blockers . size ( ) ; i ++ ) { Blocker blocker = ( Blocker ) blockers . get ( i ) ; for ( int j = 0 ; j < learners . size ( ) ; j ++ ) { StringDistanceLearner distance = ( StringDistanceLearner ) learners . get ( j ) ; for ( int k = 0 ; k < datasets . size ( ) ; k ++ ) { MatchData dataset = ( MatchData ) datasets . get ( k ) ; expt [ i ] [ j ] [ k ] = new MatchExpt ( dataset , distance , blocker ) ; } } }
public class Client { /** * Calls { @ link # connect ( int , InetAddress , int , int ) connect } with the specified timeout and the other values last passed to * connect . * @ throws IllegalStateException if connect has never been called . */ public void reconnect ( int timeout ) throws IOException { } }
if ( connectHost == null ) throw new IllegalStateException ( "This client has never been connected." ) ; connect ( timeout , connectHost , connectTcpPort , connectUdpPort ) ;
public class DeleteClusterRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteClusterRequest deleteClusterRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteClusterRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteClusterRequest . getClusterArn ( ) , CLUSTERARN_BINDING ) ; protocolMarshaller . marshall ( deleteClusterRequest . getCurrentVersion ( ) , CURRENTVERSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DistributedSocketFactory { /** * The provided index must be positive , but it can be out of the factory * list bounds . */ private SocketFactory getFactory ( int index ) throws ConnectException { } }
synchronized ( mFactories ) { int size = mFactories . size ( ) ; if ( size <= 0 ) { throw new ConnectException ( "No SocketFactories available" ) ; } return ( SocketFactory ) mFactories . get ( index % size ) ; }
public class NtpMessage { /** * This method constructs the data bytes of a raw NTP packet . * @ return */ public byte [ ] toByteArray ( ) { } }
// All bytes are automatically set to 0 byte [ ] p = new byte [ 48 ] ; p [ 0 ] = ( byte ) ( leapIndicator << 6 | version << 3 | mode ) ; p [ 1 ] = ( byte ) stratum ; p [ 2 ] = pollInterval ; p [ 3 ] = precision ; // root delay is a signed 16.16 - bit FP , in Java an int is 32 - bits int l = ( int ) ( rootDelay * 65536.0 ) ; p [ 4 ] = ( byte ) ( ( l >> 24 ) & 0xFF ) ; p [ 5 ] = ( byte ) ( ( l >> 16 ) & 0xFF ) ; p [ 6 ] = ( byte ) ( ( l >> 8 ) & 0xFF ) ; p [ 7 ] = ( byte ) ( l & 0xFF ) ; // root dispersion is an unsigned 16.16 - bit FP , in Java there are no // unsigned primitive types , so we use a long which is 64 - bits long ul = ( long ) ( rootDispersion * 65536.0 ) ; p [ 8 ] = ( byte ) ( ( ul >> 24 ) & 0xFF ) ; p [ 9 ] = ( byte ) ( ( ul >> 16 ) & 0xFF ) ; p [ 10 ] = ( byte ) ( ( ul >> 8 ) & 0xFF ) ; p [ 11 ] = ( byte ) ( ul & 0xFF ) ; p [ 12 ] = referenceIdentifier [ 0 ] ; p [ 13 ] = referenceIdentifier [ 1 ] ; p [ 14 ] = referenceIdentifier [ 2 ] ; p [ 15 ] = referenceIdentifier [ 3 ] ; encodeTimestamp ( p , 16 , referenceTimestamp ) ; encodeTimestamp ( p , 24 , originateTimestamp ) ; encodeTimestamp ( p , 32 , receiveTimestamp ) ; encodeTimestamp ( p , 40 , transmitTimestamp ) ; return p ;