signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ApiOvhTelephony { /** * Get this object properties * REST : GET / telephony / { billingAccount } / line / { serviceName } / automaticCall / { identifier } * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] * @ param identifier [ required ] Generated call identifier */ public OvhCallsGenerated billingAccount_line_serviceName_automaticCall_identifier_GET ( String billingAccount , String serviceName , String identifier ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/line/{serviceName}/automaticCall/{identifier}" ; StringBuilder sb = path ( qPath , billingAccount , serviceName , identifier ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhCallsGenerated . class ) ;
public class CmsLogFileApp { /** * Returns the file name or < code > null < / code > associated with the given appender . < p > * @ param app the appender * @ return the file name */ protected static String getFileName ( Appender app ) { } }
String result = null ; Method getFileName ; try { getFileName = app . getClass ( ) . getDeclaredMethod ( "getFileName" , ( Class < ? > [ ] ) null ) ; result = ( String ) getFileName . invoke ( app , ( Object [ ] ) null ) ; } catch ( Exception e ) { LOG . warn ( e . getLocalizedMessage ( ) , e ) ; } return result ;
public class WebService { /** * method to generate a SMILES representation for a whole HELM2 input * @ param notation * given helm notation * @ return generated canonical smiles for the given notation * @ throws BuilderMoleculeException * if the molecule can ' t be built * @ throws CTKException * general ChemToolKit exception passed to HELMToolKit * @ throws ChemistryException * if chemistry engine can not be initialized * @ throws ValidationException * if notation is not valid * @ throws MonomerLoadingException * if monomers can not be loaded * @ throws NotationException * if notation is not valid */ public String generateCanSMILESForHELM2 ( String notation ) throws BuilderMoleculeException , CTKException , ChemistryException , ValidationException , MonomerLoadingException , NotationException { } }
String result = SMILES . getCanonicalSMILESForAll ( ( validate ( notation ) ) ) ; setMonomerFactoryToDefault ( notation ) ; return result ;
public class SigningImageFormat { /** * The supported formats of an AWS Signer signing image . * @ param supportedFormats * The supported formats of an AWS Signer signing image . * @ return Returns a reference to this object so that method calls can be chained together . * @ see ImageFormat */ public SigningImageFormat withSupportedFormats ( ImageFormat ... supportedFormats ) { } }
java . util . ArrayList < String > supportedFormatsCopy = new java . util . ArrayList < String > ( supportedFormats . length ) ; for ( ImageFormat value : supportedFormats ) { supportedFormatsCopy . add ( value . toString ( ) ) ; } if ( getSupportedFormats ( ) == null ) { setSupportedFormats ( supportedFormatsCopy ) ; } else { getSupportedFormats ( ) . addAll ( supportedFormatsCopy ) ; } return this ;
public class Transport { /** * Use this method if you need direct access to Json results . * < pre > * Params params = new Params ( ) * . add ( . . . ) * getJsonResponseFromGet ( Issue . class , params ) ; * < / pre > */ public < T > JSONObject getJsonResponseFromGet ( Class < T > objectClass , Collection < ? extends NameValuePair > params ) throws RedmineException , JSONException { } }
final List < NameValuePair > newParams = new ArrayList < > ( params ) ; List < NameValuePair > paramsList = new ArrayList < > ( newParams ) ; final URI uri = getURIConfigurator ( ) . getObjectsURI ( objectClass , paramsList ) ; final HttpGet http = new HttpGet ( uri ) ; final String response = send ( http ) ; final JSONObject responseObject = RedmineJSONParser . getResponse ( response ) ; return responseObject ;
public class LeaderCache { /** * Create or update a new rootNode child */ @ Override public void put ( int partitionId , String HSIdStr ) throws KeeperException , InterruptedException { } }
try { m_zk . create ( ZKUtil . joinZKPath ( m_rootNode , Integer . toString ( partitionId ) ) , HSIdStr . getBytes ( Charsets . UTF_8 ) , Ids . OPEN_ACL_UNSAFE , CreateMode . PERSISTENT ) ; } catch ( KeeperException . NodeExistsException e ) { m_zk . setData ( ZKUtil . joinZKPath ( m_rootNode , Integer . toString ( partitionId ) ) , HSIdStr . getBytes ( Charsets . UTF_8 ) , - 1 ) ; }
public class Model { /** * Generate implementation for super class . */ protected SB toJavaSuper ( SB sb ) { } }
sb . nl ( ) ; sb . ii ( 1 ) ; sb . i ( ) . p ( "public String[] getNames() { return NAMES; } " ) . nl ( ) ; sb . i ( ) . p ( "public String[][] getDomainValues() { return DOMAINS; }" ) . nl ( ) ; String uuid = this . uniqueId != null ? this . uniqueId . getId ( ) : this . _key . toString ( ) ; sb . i ( ) . p ( "public String getUUID() { return " ) . ps ( uuid ) . p ( "; }" ) . nl ( ) ; return sb ;
public class KerasModelUtils { /** * Helper function to import weights from nested Map into existing model . Depends critically * on matched layer and parameter names . In general this seems to be straightforward for most * Keras models and layersOrdered , but there may be edge cases . * @ param model DL4J Model interface * @ return DL4J Model interface * @ throws InvalidKerasConfigurationException Invalid Keras config */ public static Model copyWeightsToModel ( Model model , Map < String , KerasLayer > kerasLayers ) throws InvalidKerasConfigurationException { } }
/* Get list if layers from model . */ Layer [ ] layersFromModel ; if ( model instanceof MultiLayerNetwork ) layersFromModel = ( ( MultiLayerNetwork ) model ) . getLayers ( ) ; else layersFromModel = ( ( ComputationGraph ) model ) . getLayers ( ) ; /* Iterate over layers in model , setting weights when relevant . */ Set < String > layerNames = new HashSet < > ( kerasLayers . keySet ( ) ) ; for ( org . deeplearning4j . nn . api . Layer layer : layersFromModel ) { String layerName = layer . conf ( ) . getLayer ( ) . getLayerName ( ) ; if ( ! kerasLayers . containsKey ( layerName ) ) throw new InvalidKerasConfigurationException ( "No weights found for layer in model (named " + layerName + ")" ) ; kerasLayers . get ( layerName ) . copyWeightsToLayer ( layer ) ; layerNames . remove ( layerName ) ; } for ( String layerName : layerNames ) { if ( kerasLayers . get ( layerName ) . getNumParams ( ) > 0 ) throw new InvalidKerasConfigurationException ( "Attemping to copy weights for layer not in model (named " + layerName + ")" ) ; } return model ;
public class DistCp { /** * Check whether the contents of src and dst are the same . * Return false if dstpath does not exist * If the files have different sizes , return false . * If the files have the same sizes , the file checksums will be compared . * When file checksum is not supported in any of file systems , * two files are considered as the same if they have the same size . */ static private boolean sameFile ( FileSystem srcfs , FileStatus srcstatus , FileSystem dstfs , Path dstpath , boolean skipCRCCheck ) throws IOException { } }
FileStatus dststatus ; try { dststatus = dstfs . getFileStatus ( dstpath ) ; } catch ( FileNotFoundException fnfe ) { return false ; } // same length ? if ( srcstatus . getLen ( ) != dststatus . getLen ( ) ) { return false ; } if ( skipCRCCheck ) { LOG . debug ( "Skipping CRC Check" ) ; return true ; } // get src checksum final FileChecksum srccs ; try { srccs = srcfs . getFileChecksum ( srcstatus . getPath ( ) ) ; } catch ( FileNotFoundException fnfe ) { /* * Two possible cases : * ( 1 ) src existed once but was deleted between the time period that * srcstatus was obtained and the try block above . * ( 2 ) srcfs does not support file checksum and ( incorrectly ) throws * FNFE , e . g . some previous versions of HftpFileSystem . * For case ( 1 ) , it is okay to return true since src was already deleted . * For case ( 2 ) , true should be returned . */ return true ; } // compare checksums try { final FileChecksum dstcs = dstfs . getFileChecksum ( dststatus . getPath ( ) ) ; // return true if checksum is not supported // ( i . e . some of the checksums is null ) return srccs == null || dstcs == null || srccs . equals ( dstcs ) ; } catch ( FileNotFoundException fnfe ) { return false ; }
public class StatusTransition { /** * Changing status * @ param args - - will be used in the status changing callback */ public < T > void transitionLock ( String topologyId , boolean errorOnNoTransition , StatusType changeStatus , T ... args ) throws Exception { } }
// get ZK ' s topology node ' s data , which is StormBase StormBase stormbase = data . getStormClusterState ( ) . storm_base ( topologyId , null ) ; if ( stormbase == null ) { LOG . error ( "Cannot apply event: changing status " + topologyId + " -> " + changeStatus . getStatus ( ) + ", cause: failed to get StormBase from ZK" ) ; return ; } StormStatus currentStatus = stormbase . getStatus ( ) ; if ( currentStatus == null ) { LOG . error ( "Cannot apply event: changing status " + topologyId + " -> " + changeStatus . getStatus ( ) + ", cause: topologyStatus is null in ZK" ) ; return ; } // < currentStatus , Map < changingStatus , callback > > Map < StatusType , Map < StatusType , Callback > > callbackMap = stateTransitions ( topologyId , currentStatus ) ; // get current changingCallbacks Map < StatusType , Callback > changingCallbacks = callbackMap . get ( currentStatus . getStatusType ( ) ) ; if ( changingCallbacks == null || ! changingCallbacks . containsKey ( changeStatus ) || changingCallbacks . get ( changeStatus ) == null ) { String msg = "No transition for event: changing status:" + changeStatus . getStatus ( ) + ", current status: " + currentStatus . getStatusType ( ) + ", topology-id: " + topologyId ; LOG . info ( msg ) ; if ( errorOnNoTransition ) { throw new RuntimeException ( msg ) ; } return ; } Callback callback = changingCallbacks . get ( changeStatus ) ; Object obj = callback . execute ( args ) ; if ( obj != null && obj instanceof StormStatus ) { StormStatus newStatus = ( StormStatus ) obj ; // update status to ZK data . getStormClusterState ( ) . update_storm ( topologyId , newStatus ) ; LOG . info ( "Successfully updated " + topologyId + " to status " + newStatus ) ; } LOG . info ( "Successfully apply event: changing status " + topologyId + " -> " + changeStatus . getStatus ( ) ) ;
public class AbstractCompact { /** * Return a copy of the provided array with updated memory layout . * @ param oldStorage * the current array * @ param defaultValue * default value for newly allocated array positions * @ param payload * the payload object * @ return a copy of the provided array with updated memory layout . * @ see # updateTransitionStorage ( int [ ] , int , Payload ) * @ see # updateTransitionStorage ( Object [ ] , Object , Payload ) */ protected final < T > T [ ] updateTransitionStorage ( T [ ] oldStorage , IntFunction < T [ ] > arrayConstructor , @ Nullable T defaultValue , Payload payload ) { } }
return payload . type . updateStorage ( oldStorage , payload , arrayConstructor , ( arr , idx ) -> arr [ idx ] = defaultValue ) ;
public class KinesisConfigUtil { /** * Validate configuration properties related to Amazon AWS service . */ public static void validateAwsConfiguration ( Properties config ) { } }
if ( config . containsKey ( AWSConfigConstants . AWS_CREDENTIALS_PROVIDER ) ) { String credentialsProviderType = config . getProperty ( AWSConfigConstants . AWS_CREDENTIALS_PROVIDER ) ; // value specified for AWSConfigConstants . AWS _ CREDENTIALS _ PROVIDER needs to be recognizable CredentialProvider providerType ; try { providerType = CredentialProvider . valueOf ( credentialsProviderType ) ; } catch ( IllegalArgumentException e ) { StringBuilder sb = new StringBuilder ( ) ; for ( CredentialProvider type : CredentialProvider . values ( ) ) { sb . append ( type . toString ( ) ) . append ( ", " ) ; } throw new IllegalArgumentException ( "Invalid AWS Credential Provider Type set in config. Valid values are: " + sb . toString ( ) ) ; } // if BASIC type is used , also check that the Access Key ID and Secret Key is supplied if ( providerType == CredentialProvider . BASIC ) { if ( ! config . containsKey ( AWSConfigConstants . AWS_ACCESS_KEY_ID ) || ! config . containsKey ( AWSConfigConstants . AWS_SECRET_ACCESS_KEY ) ) { throw new IllegalArgumentException ( "Please set values for AWS Access Key ID ('" + AWSConfigConstants . AWS_ACCESS_KEY_ID + "') " + "and Secret Key ('" + AWSConfigConstants . AWS_SECRET_ACCESS_KEY + "') when using the BASIC AWS credential provider type." ) ; } } } if ( config . containsKey ( AWSConfigConstants . AWS_REGION ) ) { // specified AWS Region name must be recognizable if ( ! AWSUtil . isValidRegion ( config . getProperty ( AWSConfigConstants . AWS_REGION ) ) ) { StringBuilder sb = new StringBuilder ( ) ; for ( Regions region : Regions . values ( ) ) { sb . append ( region . getName ( ) ) . append ( ", " ) ; } throw new IllegalArgumentException ( "Invalid AWS region set in config. Valid values are: " + sb . toString ( ) ) ; } }
public class NearCachePreloaderLock { /** * package private for testing */ void releaseInternal ( FileLock lock , FileChannel channel ) { } }
try { lock . release ( ) ; channel . close ( ) ; } catch ( IOException e ) { logger . severe ( "Problem while releasing the lock and closing channel on " + lockFile , e ) ; } finally { lockFile . deleteOnExit ( ) ; }
public class StringUtils { /** * Appends the suffix to the end of the string if the string does not * already end with any of the suffixes . * < pre > * StringUtils . appendIfMissing ( null , null ) = null * StringUtils . appendIfMissing ( " abc " , null ) = " abc " * StringUtils . appendIfMissing ( " " , " xyz " ) = " xyz " * StringUtils . appendIfMissing ( " abc " , " xyz " ) = " abcxyz " * StringUtils . appendIfMissing ( " abcxyz " , " xyz " ) = " abcxyz " * StringUtils . appendIfMissing ( " abcXYZ " , " xyz " ) = " abcXYZxyz " * < / pre > * < p > With additional suffixes , < / p > * < pre > * StringUtils . appendIfMissing ( null , null , null ) = null * StringUtils . appendIfMissing ( " abc " , null , null ) = " abc " * StringUtils . appendIfMissing ( " " , " xyz " , null ) = " xyz " * StringUtils . appendIfMissing ( " abc " , " xyz " , new CharSequence [ ] { null } ) = " abcxyz " * StringUtils . appendIfMissing ( " abc " , " xyz " , " " ) = " abc " * StringUtils . appendIfMissing ( " abc " , " xyz " , " mno " ) = " abcxyz " * StringUtils . appendIfMissing ( " abcxyz " , " xyz " , " mno " ) = " abcxyz " * StringUtils . appendIfMissing ( " abcmno " , " xyz " , " mno " ) = " abcmno " * StringUtils . appendIfMissing ( " abcXYZ " , " xyz " , " mno " ) = " abcXYZxyz " * StringUtils . appendIfMissing ( " abcMNO " , " xyz " , " mno " ) = " abcMNOxyz " * < / pre > * @ param str The string . * @ param suffix The suffix to append to the end of the string . * @ param suffixes Additional suffixes that are valid terminators . * @ return A new String if suffix was appended , the same string otherwise . * @ since 3.2 */ public static String appendIfMissing ( final String str , final CharSequence suffix , final CharSequence ... suffixes ) { } }
return appendIfMissing ( str , suffix , false , suffixes ) ;
public class AbstractGrid { @ Override public boolean exists ( int row , int column ) { } }
return row >= 0 && row < rowCount ( ) && column >= 0 && column < columnCount ( ) ;
public class Config { /** * Users should use the version of this method at uses ByteAmount * @ deprecated use * setComponentRam ( Map & lt ; String , Object & gt ; conf , String component , ByteAmount ramInBytes ) */ @ Deprecated public static void setComponentRam ( Map < String , Object > conf , String component , long ramInBytes ) { } }
setComponentRam ( conf , component , ByteAmount . fromBytes ( ramInBytes ) ) ;
public class DTMManagerDefault { /** * Get the first free DTM ID available . % OPT % Linear search is inefficient ! */ synchronized public int getFirstFreeDTMID ( ) { } }
int n = m_dtms . length ; for ( int i = 1 ; i < n ; i ++ ) { if ( null == m_dtms [ i ] ) { return i ; } } return n ; // count on addDTM ( ) to throw exception if out of range
public class ObjectParser { /** * 将对象解析为 < code > NodeConfig < / code > 格式 * @ param nodeName 节点名称 * @ param rootObject 需要解析的对象 * @ return 解析后的 < code > NodeConfig < / code > 格式 * @ throws MarshalException 解析异常 */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public NodeConfig parse ( String nodeName , Object rootObject ) throws MarshalException { Class rootClass = rootObject . getClass ( ) ; if ( TypeConver . isBesicType ( rootClass ) ) { return new NodeConfig ( nodeName , rootObject . toString ( ) ) ; } else if ( rootObject instanceof List ) { return new ListParser ( ) . parse ( nodeName , ( List ) rootObject ) ; } else if ( rootObject instanceof Map ) { return new MapParser ( ) . parse ( nodeName , ( Map ) rootObject ) ; } else if ( rootObject instanceof XmlText ) { return new NodeConfig ( nodeName , rootObject ) ; } else if ( rootObject instanceof CdataText ) { return new NodeConfig ( nodeName , rootObject ) ; } else { NodeConfig objConfig = null ; if ( rootClass . isAnnotationPresent ( Root . class ) ) { objConfig = new AnnotationObjectParser ( ) . parse ( nodeName , rootObject ) ; } else { objConfig = new DefaultObjectParser ( ) . parse ( rootObject ) ; } List < Namespace > namespaces = objConfig . getNamespace ( ) ; for ( Namespace namespace : namespaces ) { if ( namespace != null && namespace . getPrefix ( ) != null ) { this . namespaces . put ( namespace . getPrefix ( ) , namespace . getUri ( ) ) ; } } return objConfig ; }
public class PeriodType { /** * Gets the indexed field part of the period . * @ param period the period to query * @ param index the index to use * @ return the value of the field , zero if unsupported */ int getIndexedField ( ReadablePeriod period , int index ) { } }
int realIndex = iIndices [ index ] ; return ( realIndex == - 1 ? 0 : period . getValue ( realIndex ) ) ;
public class JacksonUtils { /** * < p > setObjectWriterInjector . < / p > * @ param provider a { @ link javax . inject . Provider } object . * @ param genericType a { @ link java . lang . reflect . Type } object . * @ param annotations an array of { @ link java . lang . annotation . Annotation } objects . * @ throws java . io . IOException if any . */ public static void setObjectWriterInjector ( Provider < ObjectProvider < FilterProvider > > provider , final Type genericType , final Annotation [ ] annotations ) throws IOException { } }
final FilterProvider filterProvider = provider . get ( ) . getFilteringObject ( genericType , true , annotations ) ; if ( filterProvider != null ) { ObjectWriterInjector . set ( new FilteringObjectWriterModifier ( filterProvider , ObjectWriterInjector . getAndClear ( ) ) ) ; }
public class Utils { /** * Compares two QNames for equality . Either or both of the values may be null . * @ param qn1 * @ param qn2 * @ return */ public static boolean compareQNames ( QName qn1 , QName qn2 ) { } }
if ( qn1 == qn2 ) return true ; if ( qn1 == null || qn2 == null ) return false ; return qn1 . equals ( qn2 ) ;
public class CmsGalleryService { /** * Convenience method for reading the saved VFS tree state from the session . < p > * @ param request the current request * @ param treeToken the tree token ( may be null ) * @ return the saved tree open state ( may be null ) */ public static CmsTreeOpenState getVfsTreeState ( HttpServletRequest request , String treeToken ) { } }
return ( CmsTreeOpenState ) request . getSession ( ) . getAttribute ( getTreeOpenStateAttributeName ( I_CmsGalleryProviderConstants . TREE_VFS , treeToken ) ) ;
public class Status { public void copy ( Copier aFrom ) { } }
if ( aFrom == null ) return ; primaryKey = ( ( PrimaryKey ) aFrom ) . getPrimaryKey ( ) ; name = ( ( Nameable ) aFrom ) . getName ( ) ;
public class DescriptorValue { /** * Returns an array of names for each descriptor value calculated . * Many descriptors return multiple values . In general it is useful for the * descriptor to indicate the names for each value . When a descriptor creates * a < code > DescriptorValue < / code > object , it should supply an array of names equal * in length to the number of descriptor calculated . * In many cases , these names can be as simple as X0 , X1 , . . . , XN where X is a prefix * and 0 , 1 , . . . , N are the indices . On the other hand it is also possible to return * other arbitrary names , which should be documented in the JavaDocs for the descriptor * ( e . g . , the CPSA descriptor ) . * Note that by default if a descriptor returns a single value ( such as { @ link org . openscience . cdk . qsar . descriptors . molecular . ALOGPDescriptor } * the return array will have a single element * In case a descriptor creates a < code > DescriptorValue < / code > object with no names , this * method will generate a set of names based on the { @ link DescriptorSpecification } object * supplied at instantiation . * @ return An array of descriptor names . */ public String [ ] getNames ( ) { } }
if ( descriptorNames == null || descriptorNames . length == 0 ) { String title = specification . getImplementationTitle ( ) ; if ( value instanceof BooleanResult || value instanceof DoubleResult || value instanceof IntegerResult ) { descriptorNames = new String [ 1 ] ; descriptorNames [ 0 ] = title ; } else { int ndesc = 0 ; if ( value instanceof DoubleArrayResult ) { ndesc = value . length ( ) ; } else if ( value instanceof IntegerArrayResult ) { ndesc = value . length ( ) ; } descriptorNames = new String [ ndesc ] ; for ( int i = 0 ; i < ndesc ; i ++ ) descriptorNames [ i ] = title + i ; } } return descriptorNames ;
public class DetectorFactoryCollection { /** * Set the metadata for a bug category . If the category ' s metadata has * already been set , this does nothing . * @ param bc * the BugCategory object holding the metadata for the category * @ return false if the category ' s metadata has already been set , true * otherwise */ public boolean registerBugCategory ( BugCategory bc ) { } }
String category = bc . getCategory ( ) ; if ( categoryDescriptionMap . get ( category ) != null ) { return false ; } categoryDescriptionMap . put ( category , bc ) ; return true ;
public class PeepholeRemoveDeadCode { /** * The function assumes that when checking a CASE node there is no DEFAULT _ CASE node in the * SWITCH , or the DEFAULT _ CASE is the last case in the SWITCH . * @ return Whether the CASE or DEFAULT _ CASE block does anything useful . */ private boolean isUselessCase ( Node caseNode , @ Nullable Node previousCase , @ Nullable Node defaultCase ) { } }
checkState ( previousCase == null || previousCase . getNext ( ) == caseNode ) ; // A case isn ' t useless if a previous case falls through to it unless it happens to be the last // case in the switch . Node switchNode = caseNode . getParent ( ) ; if ( switchNode . getLastChild ( ) != caseNode && previousCase != null ) { Node previousBlock = previousCase . getLastChild ( ) ; if ( ! previousBlock . hasChildren ( ) || ! isExit ( previousBlock . getLastChild ( ) ) ) { return false ; } } Node executingCase = caseNode ; while ( executingCase != null ) { checkState ( executingCase . isDefaultCase ( ) || executingCase . isCase ( ) ) ; // We only expect a DEFAULT case if the case we are checking is the // DEFAULT case . Otherwise , we assume the DEFAULT case has already // been removed . checkState ( caseNode == executingCase || ! executingCase . isDefaultCase ( ) ) ; if ( ! executingCase . isDefaultCase ( ) && mayHaveSideEffects ( executingCase . getFirstChild ( ) ) ) { // The case falls thru to a case whose condition has a potential side - effect , // removing the candidate case would skip that side - effect , so don ' t . return false ; } Node block = executingCase . getLastChild ( ) ; checkState ( block . isBlock ( ) ) ; if ( block . hasChildren ( ) ) { for ( Node blockChild : block . children ( ) ) { // If this is a block with a labelless break , it is useless . switch ( blockChild . getToken ( ) ) { case BREAK : // A case with a single labelless break is useless if it is the default case or if // there is no default case . A break to a different control structure isn ' t useless . return ! blockChild . hasChildren ( ) && ( defaultCase == null || defaultCase == executingCase ) ; case VAR : if ( blockChild . hasOneChild ( ) && blockChild . getFirstFirstChild ( ) == null ) { // Variable declarations without initializations are OK . continue ; } return false ; default : return false ; } } } // Look at the fallthrough case executingCase = executingCase . getNext ( ) ; } return true ;
public class UnitOfWorkAwareProxyFactory { /** * Creates a new < b > @ UnitOfWork < / b > aware proxy of a class with a complex constructor . * @ param clazz the specified class definition * @ param constructorParamTypes the types of the constructor parameters * @ param constructorArguments the arguments passed to the constructor * @ param < T > the type of the class * @ return a new proxy */ @ SuppressWarnings ( "unchecked" ) public < T > T create ( Class < T > clazz , Class < ? > [ ] constructorParamTypes , Object [ ] constructorArguments ) { } }
final ProxyFactory factory = new ProxyFactory ( ) ; factory . setSuperclass ( clazz ) ; try { final Proxy proxy = ( Proxy ) ( constructorParamTypes . length == 0 ? factory . createClass ( ) . newInstance ( ) : factory . create ( constructorParamTypes , constructorArguments ) ) ; proxy . setHandler ( ( self , overridden , proceed , args ) -> { final UnitOfWork unitOfWork = overridden . getAnnotation ( UnitOfWork . class ) ; final UnitOfWorkAspect unitOfWorkAspect = new UnitOfWorkAspect ( entityManagerFactories ) ; try { unitOfWorkAspect . beforeStart ( unitOfWork ) ; Object result = proceed . invoke ( self , args ) ; unitOfWorkAspect . afterEnd ( ) ; return result ; } catch ( InvocationTargetException e ) { unitOfWorkAspect . onError ( ) ; throw e . getCause ( ) ; } catch ( Exception e ) { unitOfWorkAspect . onError ( ) ; throw e ; } } ) ; return ( T ) proxy ; } catch ( NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e ) { throw new IllegalStateException ( "Unable to create a proxy for the class '" + clazz + "'" , e ) ; }
public class RequestCancelExternalWorkflowExecutionDecisionAttributesMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RequestCancelExternalWorkflowExecutionDecisionAttributes requestCancelExternalWorkflowExecutionDecisionAttributes , ProtocolMarshaller protocolMarshaller ) { } }
if ( requestCancelExternalWorkflowExecutionDecisionAttributes == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( requestCancelExternalWorkflowExecutionDecisionAttributes . getWorkflowId ( ) , WORKFLOWID_BINDING ) ; protocolMarshaller . marshall ( requestCancelExternalWorkflowExecutionDecisionAttributes . getRunId ( ) , RUNID_BINDING ) ; protocolMarshaller . marshall ( requestCancelExternalWorkflowExecutionDecisionAttributes . getControl ( ) , CONTROL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class StringUtil { /** * Checks if given string in _ str starts with any of the given strings in _ args . * @ param _ ignoreCase true to ignore case , false to be case sensitive * @ param _ str string to check * @ param _ args patterns to find * @ return true if given string in _ str starts with any of the given strings in _ args , false if not or _ str / _ args is null */ public static boolean startsWithAny ( boolean _ignoreCase , String _str , String ... _args ) { } }
if ( _str == null || _args == null || _args . length == 0 ) { return false ; } String heystack = _str ; if ( _ignoreCase ) { heystack = _str . toLowerCase ( ) ; } for ( String s : _args ) { String needle = _ignoreCase ? s . toLowerCase ( ) : s ; if ( heystack . startsWith ( needle ) ) { return true ; } } return false ;
public class IntegerExtensions { /** * The < code > . . & lt ; < / code > operator yields an { @ link ExclusiveRange } that increments from * a to b ( exclusive ) . * @ param a the start of the range . * @ param b the end of the range ( exclusive ) . * @ return an incrementing { @ link ExclusiveRange } . Never < code > null < / code > . * @ since 2.4 */ @ Pure @ Inline ( value = "new $3($1, $2, true)" , imported = ExclusiveRange . class , statementExpression = false ) public static ExclusiveRange operator_doubleDotLessThan ( final int a , final int b ) { } }
return new ExclusiveRange ( a , b , true ) ;
public class SyntheticStorableReferenceBuilder { /** * Look for conflicting method names */ private boolean methodExists ( Class clazz , String name ) { } }
Method [ ] methods = clazz . getDeclaredMethods ( ) ; for ( int i = 0 ; i < methods . length ; i ++ ) { if ( methods [ i ] . getName ( ) . equals ( name ) ) { return true ; } } if ( clazz . getSuperclass ( ) != null && methodExists ( clazz . getSuperclass ( ) , name ) ) { return true ; } Class [ ] interfaces = clazz . getInterfaces ( ) ; for ( int i = 0 ; i < interfaces . length ; i ++ ) { if ( methodExists ( interfaces [ i ] , name ) ) { return true ; } } return false ;
public class V1KnowledgeMarshaller { /** * Reads in the Configuration , looking for various knowledge models . * If not found , it falls back to the super class ( V1CompositeMarshaller ) . * @ param config the Configuration * @ return the Model */ @ Override public Model read ( Configuration config ) { } }
String name = config . getName ( ) ; Descriptor desc = getDescriptor ( ) ; if ( CHANNELS . equals ( name ) ) { return new V1ChannelsModel ( config , desc ) ; } else if ( CHANNEL . equals ( name ) ) { return new V1ChannelModel ( config , desc ) ; } else if ( LISTENERS . equals ( name ) ) { return new V1ListenersModel ( config , desc ) ; } else if ( LISTENER . equals ( name ) ) { return new V1ListenerModel ( config , desc ) ; } else if ( LOGGERS . equals ( name ) ) { return new V1LoggersModel ( config , desc ) ; } else if ( LOGGER . equals ( name ) ) { return new V1LoggerModel ( config , desc ) ; } else if ( MANIFEST . equals ( name ) ) { return new V1ManifestModel ( config , desc ) ; } else if ( CONTAINER . equals ( name ) ) { return new V1ContainerModel ( config , desc ) ; } else if ( RESOURCES . equals ( name ) ) { return new V1ResourcesModel ( config , desc ) ; } else if ( RESOURCE . equals ( name ) ) { return new V1ResourceModel ( config , desc ) ; } else if ( RESOURCE_DETAIL . equals ( name ) ) { return new V1ResourceDetailModel ( config , desc ) ; } else if ( OPERATIONS . equals ( name ) ) { return new V1OperationsModel ( config , desc ) ; } else if ( GLOBALS . equals ( name ) ) { return new V1GlobalsModel ( config , desc ) ; } else if ( GLOBAL . equals ( name ) ) { return new V1GlobalModel ( config , desc ) ; } else if ( INPUTS . equals ( name ) ) { return new V1InputsModel ( config , desc ) ; } else if ( INPUT . equals ( name ) ) { return new V1InputModel ( config , desc ) ; } else if ( OUTPUTS . equals ( name ) ) { return new V1OutputsModel ( config , desc ) ; } else if ( OUTPUT . equals ( name ) ) { return new V1OutputModel ( config , desc ) ; } else if ( FAULTS . equals ( name ) ) { return new V1FaultsModel ( config , desc ) ; } else if ( FAULT . equals ( name ) ) { return new V1FaultModel ( config , desc ) ; } else if ( PROPERTIES . equals ( name ) ) { return new V1PropertiesModel ( config , desc ) ; } else if ( PROPERTY . equals ( name ) ) { return new V1PropertyModel ( config , desc ) ; } else if ( USER_GROUP_CALLBACK . equals ( name ) ) { return new V1UserGroupCallbackModel ( config , desc ) ; } else if ( WORK_ITEM_HANDLERS . equals ( name ) ) { return new V1WorkItemHandlersModel ( config , desc ) ; } else if ( WORK_ITEM_HANDLER . equals ( name ) ) { return new V1WorkItemHandlerModel ( config , desc ) ; } return super . read ( config ) ;
public class WEmailField { /** * Performs validation of the email address . This only performs very basic validation - an email address must * contain some text , followed by an ' @ ' , and then something which resembles a domain / host name . * Subclasses can override this method to perform more specific validation . * @ param diags the list into which any validation diagnostics are added . */ protected void validateEmailAddress ( final List < Diagnostic > diags ) { } }
if ( ! isEmpty ( ) ) { String value = getValueAsString ( ) ; String errorMessage = getComponentModel ( ) . errorMessage ; // Email Pattern if ( ! Pattern . matches ( "^(?:\".+\"|[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+)@[a-zA-Z0-9-]+(?:\\.[a-zA-Z0-9-]+)+$" , value ) ) { diags . add ( createErrorDiagnostic ( errorMessage , this ) ) ; } // Maximum Length int max = getMaxLength ( ) ; if ( max > 0 && value . length ( ) > max ) { diags . add ( createErrorDiagnostic ( InternalMessages . DEFAULT_VALIDATION_ERROR_MAX_LENGTH , this , String . valueOf ( max ) ) ) ; } }
public class CommerceDiscountLocalServiceBaseImpl { /** * Returns the commerce discount matching the UUID and group . * @ param uuid the commerce discount ' s UUID * @ param groupId the primary key of the group * @ return the matching commerce discount * @ throws PortalException if a matching commerce discount could not be found */ @ Override public CommerceDiscount getCommerceDiscountByUuidAndGroupId ( String uuid , long groupId ) throws PortalException { } }
return commerceDiscountPersistence . findByUUID_G ( uuid , groupId ) ;
public class AsyncConverter1to1 { /** * Return a { @ link RecordStreamWithMetadata } with the appropriate modifications . * @ param inputStream * @ param workUnitState * @ return * @ throws SchemaConversionException * @ implNote this processStream does not handle { @ link org . apache . gobblin . stream . MetadataUpdateControlMessage } s */ @ Override public RecordStreamWithMetadata < DO , SO > processStream ( RecordStreamWithMetadata < DI , SI > inputStream , WorkUnitState workUnitState ) throws SchemaConversionException { } }
int maxConcurrentAsyncConversions = workUnitState . getPropAsInt ( MAX_CONCURRENT_ASYNC_CONVERSIONS_KEY , DEFAULT_MAX_CONCURRENT_ASYNC_CONVERSIONS ) ; SO outputSchema = convertSchema ( inputStream . getGlobalMetadata ( ) . getSchema ( ) , workUnitState ) ; Flowable < StreamEntity < DO > > outputStream = inputStream . getRecordStream ( ) . flatMapSingle ( in -> { if ( in instanceof ControlMessage ) { getMessageHandler ( ) . handleMessage ( ( ControlMessage ) in ) ; return Single . just ( ( ControlMessage < DO > ) in ) ; } else if ( in instanceof RecordEnvelope ) { RecordEnvelope < DI > recordEnvelope = ( RecordEnvelope < DI > ) in ; return new SingleAsync ( recordEnvelope , convertRecordAsync ( outputSchema , recordEnvelope . getRecord ( ) , workUnitState ) ) ; } else { throw new IllegalStateException ( "Expected ControlMessage or RecordEnvelope." ) ; } } , false , maxConcurrentAsyncConversions ) ; return inputStream . withRecordStream ( outputStream , GlobalMetadata . < SI , SO > builderWithInput ( inputStream . getGlobalMetadata ( ) , Optional . fromNullable ( outputSchema ) ) . build ( ) ) ;
public class StringUtil { /** * Returns true if the char is considered a " safe " char . Please see * documentation for isSafeString ( ) . * @ see # isSafeString ( java . lang . String ) */ public static boolean isSafeChar ( char ch ) { } }
if ( ch >= 'a' && ch <= 'z' ) return true ; if ( ch >= 'A' && ch <= 'Z' ) return true ; if ( ch >= '0' && ch <= '9' ) return true ; // loop thru our PRINTABLE string for ( int i = 0 ; i < SAFE . length ( ) ; i ++ ) { if ( ch == SAFE . charAt ( i ) ) return true ; } return false ;
public class OperatorFromFunctionals { /** * Subscriber function that invokes the callable and returns its value or * propagates its checked exception . */ public static < R > OnSubscribe < R > fromCallable ( Callable < ? extends R > callable ) { } }
return new InvokeAsync < R > ( callable ) ;
public class WeakHashMapPro { /** * Returns the entry associated with the specified key in this map . Returns null if the map contains * no mapping for this key . */ Entry < K , V > getEntry ( Object key ) { } }
Object k = maskNull ( key ) ; int h = hash ( k ) ; Entry < K , V > [ ] tab = getTable ( ) ; int index = indexFor ( h , tab . length ) ; Entry < K , V > e = tab [ index ] ; while ( e != null && ! ( e . hash == h && eq ( k , e . get ( ) ) ) ) e = e . next ; return e ;
public class HttpConnection { public void sendRequest ( String method , String uri , HeaderList headerList , Body body ) throws IOException { } }
String value ; output . writeRequestLine ( method , uri ) ; for ( Header header : headerList ) { output . writeAscii ( header . name ) ; output . writeAscii ( ": " ) ; value = header . value ; if ( value != null ) { output . writeAscii ( value ) ; } output . writeAsciiLn ( ) ; } output . writeAsciiLn ( ) ; if ( body != null ) { serialize ( output , headerList , body ) ; } output . flush ( ) ;
public class NodeImpl { /** * Helper method for nodes that have multiple default incoming connections */ public List < Connection > getDefaultIncomingConnections ( ) { } }
return getIncomingConnections ( org . jbpm . workflow . core . Node . CONNECTION_DEFAULT_TYPE ) ;
public class TwiML { /** * Get transformed attribute name for this Twiml element . */ private String getTransformedAttrName ( final String attrName ) { } }
return attrNameMapper . containsKey ( attrName ) ? attrNameMapper . get ( attrName ) : attrName ;
public class TerminalSyntax { /** * Validates terminal color code For reference check website : * http : / / misc . flogisoft . com / bash / tip _ colors _ and _ formatting * @ param code numeric value as a String */ @ Override public void validateColorCode ( String code ) { } }
Preconditions . checkNotEmpty ( code , "color code is empty" ) ; Integer numericColorCode = Integer . valueOf ( code ) ; boolean isColorCodeValid = numericColorCode > 0 && numericColorCode < 257 ; if ( ! isColorCodeValid ) { throw new IllegalArgumentException ( "color code should be a number between 1 and 256" ) ; }
public class Indexes { /** * Marks the given partition as unindexed by the given indexes . * @ param partitionId the ID of the partition to mark as unindexed . * @ param indexes the indexes by which the given partition is unindexed . */ public static void markPartitionAsUnindexed ( int partitionId , InternalIndex [ ] indexes ) { } }
for ( InternalIndex index : indexes ) { index . markPartitionAsUnindexed ( partitionId ) ; }
public class StoredResponse { /** * Retrieve a response header as an int . */ public int getIntHeader ( String name ) { } }
if ( _header != null ) { int headerVal = _header . getIntHeader ( name ) ; if ( headerVal != - 1 ) return headerVal ; } if ( this . headerTable != null ) { int i = 0 ; for ( Object obj : headerTable [ 0 ] ) { String strVal = ( String ) obj ; if ( name . equals ( strVal ) ) { return Integer . valueOf ( ( String ) headerTable [ 1 ] . get ( i ) ) ; } i ++ ; } } return - 1 ;
public class CmsSiteManagerImpl { /** * Returns the configured site if the given root path matches site in the " / sites / " folder , * or < code > null < / code > otherwise . < p > * @ param rootPath the root path to check * @ return the configured site if the given root path matches site in the " / sites / " folder , * or < code > null < / code > otherwise */ private CmsSite lookupSitesFolder ( String rootPath ) { } }
int pos = rootPath . indexOf ( '/' , SITES_FOLDER_POS ) ; if ( pos > 0 ) { // this assumes that the root path may likely start with something like " / sites / default / " // just cut the first 2 directories from the root path and do a direct lookup in the internal map return m_siteRootSites . get ( rootPath . substring ( 0 , pos ) ) ; } return null ;
public class ExpressionBuilder { /** * Appends a greater than or equals test to the condition . * @ param trigger the trigger field . * @ param compare the value to use in the compare . * @ return this ExpressionBuilder . */ public ExpressionBuilder greaterThanOrEquals ( final SubordinateTrigger trigger , final Object compare ) { } }
BooleanExpression exp = new CompareExpression ( CompareType . GREATER_THAN_OR_EQUAL , trigger , compare ) ; appendExpression ( exp ) ; return this ;
public class PTSaxton2006 { /** * Equation 16 for calculating Saturated conductivity ( matric soil ) , mm / h * @ param slsnd Sand weight percentage by layer ( [ 0,100 ] % ) * @ param slcly Clay weight percentage by layer ( [ 0,100 ] % ) * @ param omPct Organic matter weight percentage by layer ( [ 0,100 ] % ) , ( = * SLOC * 1.72) */ public static String calcSatMatric ( String slsnd , String slcly , String omPct ) { } }
String satMt = divide ( calcSaturatedMoisture ( slsnd , slcly , omPct ) , "100" ) ; String mt33 = divide ( calcMoisture33Kpa ( slsnd , slcly , omPct ) , "100" ) ; String lamda = calcLamda ( slsnd , slcly , omPct ) ; String ret = product ( "1930" , pow ( substract ( satMt , mt33 ) , substract ( "3" , lamda ) ) ) ; LOG . debug ( "Calculate result for Saturated conductivity (matric soil), mm/h is {}" , ret ) ; return ret ;
public class JsonArray { /** * Returns the { @ link java . lang . Boolean } at index or null if not found . * @ param index * @ return the value at index or null if not found * @ throws java . lang . IndexOutOfBoundsException if the index is out of the array bounds */ public Boolean getBoolean ( int index ) { } }
Object bool = list . get ( index ) ; if ( bool == null ) return null ; if ( bool instanceof Boolean ) return ( Boolean ) bool ; throw new JsonException ( "not a boolean" ) ;
public class CmsWebdavServlet { /** * Reads the information about a destination path out of the header of the * request . < p > * @ param req the servlet request we are processing * @ return the destination path */ private String parseDestinationHeader ( HttpServletRequest req ) { } }
// Parsing destination header String destinationPath = req . getHeader ( HEADER_DESTINATION ) ; if ( destinationPath == null ) { return null ; } // Remove url encoding from destination destinationPath = CmsEncoder . decode ( destinationPath , "UTF8" ) ; int protocolIndex = destinationPath . indexOf ( "://" ) ; if ( protocolIndex >= 0 ) { // if the Destination URL contains the protocol , we can safely // trim everything upto the first " / " character after " : / / " int firstSeparator = destinationPath . indexOf ( "/" , protocolIndex + 4 ) ; if ( firstSeparator < 0 ) { destinationPath = "/" ; } else { destinationPath = destinationPath . substring ( firstSeparator ) ; } } else { String hostName = req . getServerName ( ) ; if ( ( hostName != null ) && ( destinationPath . startsWith ( hostName ) ) ) { destinationPath = destinationPath . substring ( hostName . length ( ) ) ; } int portIndex = destinationPath . indexOf ( ":" ) ; if ( portIndex >= 0 ) { destinationPath = destinationPath . substring ( portIndex ) ; } if ( destinationPath . startsWith ( ":" ) ) { int firstSeparator = destinationPath . indexOf ( "/" ) ; if ( firstSeparator < 0 ) { destinationPath = "/" ; } else { destinationPath = destinationPath . substring ( firstSeparator ) ; } } } // Normalise destination path ( remove ' . ' and ' . . ' ) destinationPath = normalize ( destinationPath ) ; String contextPath = req . getContextPath ( ) ; if ( ( contextPath != null ) && ( destinationPath . startsWith ( contextPath ) ) ) { destinationPath = destinationPath . substring ( contextPath . length ( ) ) ; } String pathInfo = req . getPathInfo ( ) ; if ( pathInfo != null ) { String servletPath = req . getServletPath ( ) ; if ( ( servletPath != null ) && ( destinationPath . startsWith ( servletPath ) ) ) { destinationPath = destinationPath . substring ( servletPath . length ( ) ) ; } } return destinationPath ;
public class ReflectionHelper { /** * Returns true if the given type has a method with the given annotation */ public static boolean hasMethodWithAnnotation ( Class < ? > type , Class < ? extends Annotation > annotationType , boolean checkMetaAnnotations ) { } }
try { do { Method [ ] methods = type . getDeclaredMethods ( ) ; for ( Method method : methods ) { if ( hasAnnotation ( method , annotationType , checkMetaAnnotations ) ) { return true ; } } type = type . getSuperclass ( ) ; } while ( type != null ) ; } catch ( Throwable e ) { // ignore a class loading issue } return false ;
public class CacheProxy { /** * Attempts to close the resource . If an error occurs and an outermost exception is set , then adds * the error to the suppression list . * @ param o the resource to close if Closeable * @ param outer the outermost error , or null if unset * @ return the outermost error , or null if unset and successful */ private static @ Nullable Throwable tryClose ( Object o , @ Nullable Throwable outer ) { } }
if ( o instanceof Closeable ) { try { ( ( Closeable ) o ) . close ( ) ; } catch ( Throwable t ) { if ( outer == null ) { return t ; } outer . addSuppressed ( t ) ; return outer ; } } return null ;
public class ExampleUtils { /** * Method that lasts randomly from ~ 0 to the square of the specified amount of maxMsRoot . * This is just to avoid linear randomness . * @ param maxMsRoot square root of the maximal waiting time */ public static void waitRandomlySquared ( long maxMsRoot ) { } }
long random = ( long ) ( Math . random ( ) * maxMsRoot ) ; try { Thread . sleep ( random * random ) ; } catch ( InterruptedException e ) { e . printStackTrace ( ) ; }
public class LValue { /** * Convert ` value ` to a boolean . Note that only ` nil ` and ` false ` * are ` false ` , all other values are ` true ` . * @ param value * the value to convert . * @ return ` value ` as a boolean . */ public boolean asBoolean ( Object value ) { } }
if ( value == null ) { return false ; } if ( value instanceof Boolean ) { return ( Boolean ) value ; } return true ;
public class XSplitter { /** * Computes and returns the mbr of the specified nodes , only the nodes between * from and to index are considered . * @ param entries the array of node indices * @ param from the start index * @ param to the end index * @ return the mbr of the specified nodes */ private HyperBoundingBox mbr ( final int [ ] entries , final int from , final int to ) { } }
SpatialEntry first = this . node . getEntry ( entries [ from ] ) ; ModifiableHyperBoundingBox mbr = new ModifiableHyperBoundingBox ( first ) ; for ( int i = from + 1 ; i < to ; i ++ ) { mbr . extend ( this . node . getEntry ( entries [ i ] ) ) ; } return mbr ;
public class SARLJvmModelInferrer { /** * Generate the extended types for the given SARL statement . * @ param context the context of the generation . * @ param owner the JVM element to change . * @ param defaultJvmType the default JVM type . * @ param defaultSarlType the default SARL type . * @ param supertype the supertype . */ protected void appendConstrainedExtends ( GenerationContext context , JvmGenericType owner , Class < ? > defaultJvmType , Class < ? extends XtendTypeDeclaration > defaultSarlType , JvmParameterizedTypeReference supertype ) { } }
final List < ? extends JvmParameterizedTypeReference > supertypes ; if ( supertype == null ) { supertypes = Collections . emptyList ( ) ; } else { supertypes = Collections . singletonList ( supertype ) ; } appendConstrainedExtends ( context , owner , defaultJvmType , defaultSarlType , supertypes ) ;
public class S1ScriptEngine { /** * Run template * @ param template template text * @ param data context * @ return evaluated template * @ throws ScriptException * @ throws ScriptLimitException * @ throws SyntaxException */ public String template ( String template , Map < String , Object > data ) throws ScriptException , ScriptLimitException , SyntaxException { } }
return template ( null , template , data ) ;
public class HtmlGroupBaseTag { /** * This will create a new option in the HTML . */ protected void addOption ( AbstractRenderAppender buffer , String type , String optionValue , String optionDisplay , int idx , String altText , char accessKey , boolean disabled ) throws JspException { } }
ServletRequest req = pageContext . getRequest ( ) ; if ( _cr == null ) _cr = TagRenderingBase . Factory . getConstantRendering ( req ) ; assert ( buffer != null ) ; assert ( optionValue != null ) ; assert ( optionDisplay != null ) ; assert ( type != null ) ; if ( _orientation != null && isVertical ( ) ) { _cr . TR_TD ( buffer ) ; } _inputState . clear ( ) ; _inputState . type = type ; _inputState . name = getQualifiedDataSourceName ( ) ; _inputState . value = optionValue ; _inputState . style = _style ; _inputState . styleClass = _class ; if ( isMatched ( optionValue , null ) ) { _inputState . checked = true ; } _inputState . disabled = isDisabled ( ) ; _inputState . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , ALT , altText ) ; if ( accessKey != 0x00 ) _inputState . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , ACCESSKEY , Character . toString ( accessKey ) ) ; // if there are attributes defined push them to the options . if ( _attrs != null && _attrs . size ( ) > 0 ) { Iterator iterator = _attrs . keySet ( ) . iterator ( ) ; for ( ; iterator . hasNext ( ) ; ) { String key = ( String ) iterator . next ( ) ; if ( key == null ) continue ; String value = ( String ) _attrs . get ( key ) ; _inputState . registerAttribute ( AbstractHtmlState . ATTR_GENERAL , key , value ) ; } } TagRenderingBase br = TagRenderingBase . Factory . getRendering ( TagRenderingBase . INPUT_BOOLEAN_TAG , req ) ; br . doStartTag ( buffer , _inputState ) ; br . doEndTag ( buffer ) ; String ls = _labelStyle ; String lsc = _labelStyleClass ; _spanState . style = ls ; _spanState . styleClass = lsc ; br = TagRenderingBase . Factory . getRendering ( TagRenderingBase . SPAN_TAG , req ) ; br . doStartTag ( buffer , _spanState ) ; buffer . append ( optionDisplay ) ; br . doEndTag ( buffer ) ; // backward compatibility this is now overridden by the _ orientation if ( _orientation == null ) { _cr . BR ( buffer ) ; } else { if ( isVertical ( ) ) { _cr . TR_TD ( buffer ) ; } else { _cr . NBSP ( buffer ) ; } }
public class CubeConfigurator { /** * Add precedence - 10 because we need that ContainerRegistry is available in the Arquillian scope . */ public void configure ( @ Observes ( precedence = - 10 ) ArquillianDescriptor arquillianDescriptor ) { } }
Map < String , String > config = arquillianDescriptor . extension ( EXTENSION_NAME ) . getExtensionProperties ( ) ; CubeConfiguration cubeConfiguration = CubeConfiguration . fromMap ( config ) ; configurationProducer . set ( cubeConfiguration ) ;
public class ChargingStationEventListener { /** * Handles the { @ link AuthorizationListChangedEvent } . * @ param event the event to handle . */ @ EventHandler public void handle ( AuthorizationListChangedEvent event ) { } }
ChargingStation chargingStation = repository . findOne ( event . getChargingStationId ( ) . getId ( ) ) ; if ( chargingStation != null ) { Set < LocalAuthorization > updatedLocalAuthorizations = toLocalAuthorizationSet ( event . getIdentifyingTokens ( ) ) ; if ( AuthorizationListUpdateType . FULL . equals ( event . getUpdateType ( ) ) ) { chargingStation . setLocalAuths ( updatedLocalAuthorizations ) ; } else { updateAuthorizationList ( chargingStation , updatedLocalAuthorizations ) ; } chargingStation . setLocalAuthorizationListVersion ( event . getVersion ( ) ) ; repository . createOrUpdate ( chargingStation ) ; }
public class ParticlesDrawable { /** * { @ inheritDoc } */ @ Override public void setParticleRadiusRange ( @ FloatRange ( from = 0.5f ) final float minRadius , @ FloatRange ( from = 0.5f ) final float maxRadius ) { } }
scene . setParticleRadiusRange ( minRadius , maxRadius ) ;
public class CellPositioner { /** * Properly resizes the cell ' s node , and sets its " layoutY " value , so that is the last visible * node in the viewport , and further offsets this value by { @ code endOffStart } , so that * the node ' s < em > bottom < / em > edge appears ( if negative ) " above , " ( if 0 ) " at , " or ( if negative ) " below " the * viewport ' s " top " edge . See { @ link OrientationHelper } ' s javadoc for more explanation on what quoted terms mean . * < pre > < code > * - - - - - bottom of cell ' s node if startOffStart is negative * _ _ _ _ _ " top edge " of viewport / bottom of cell ' s node if startOffStart = 0 * | - - - - - bottom of cell ' s node if startOffStart is positive * < / code > < / pre > * @ param itemIndex the index of the item in the list of all ( not currently visible ) cells * @ param startOffEnd the amount by which to offset the " layoutY " value of the cell ' s node */ public C placeStartFromEnd ( int itemIndex , double startOffEnd ) { } }
C cell = getSizedCell ( itemIndex ) ; double y = sizeTracker . getViewportLength ( ) + startOffEnd ; relocate ( cell , 0 , y ) ; cell . getNode ( ) . setVisible ( true ) ; return cell ;
public class RuleSessionImpl { /** * / * ( non - Javadoc ) * @ see nz . co . senanque . rules . RuleSession # assign ( nz . co . senanque . validationengine . ProxyField , java . lang . Object , nz . co . senanque . rules . RuleContext , boolean ) */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) public void assign ( RuleProxyField target , Object value , RuleContext ruleContext , boolean dummy ) { ProxyField target1 = target . getProxyField ( ) ; Class < ? > clazz = target1 . getPropertyMetadata ( ) . getSetMethod ( ) . getParameterTypes ( ) [ 0 ] ; if ( value instanceof List < ? > ) { List < ProxyField > sourceList = ( List < ProxyField > ) value ; RuleProxyField rpf = target ; for ( ProxyField pf : sourceList ) { Object listValue = pf . getValue ( ) ; if ( rpf . isDifferent ( listValue ) ) { if ( isExcluded ( target1 , String . valueOf ( listValue ) ) ) { String message = m_messageSourceAccessor . getMessage ( "nz.co.senanque.rules.excluded.value" , new Object [ ] { listValue , target1 . getFieldName ( ) } ) ; throw new InferenceException ( message ) ; } if ( listValue != null && ! clazz . isAssignableFrom ( listValue . getClass ( ) ) ) { listValue = ConvertUtils . convertToComparable ( ( Class < Comparable > ) clazz , listValue , m_messageSourceAccessor ) ; } addAssignedField ( target1 ) ; target . assign ( listValue ) ; addToAgenda ( rpf ) ; } ruleContext . addAssign ( rpf ) ; } return ; } if ( value != null && ! clazz . isAssignableFrom ( value . getClass ( ) ) ) { value = ConvertUtils . convertToComparable ( ( Class < Comparable > ) clazz , value , m_messageSourceAccessor ) ; } RuleProxyField rpf = getRuleProxyField ( target1 ) ; if ( rpf . isDifferent ( value ) ) { if ( isExcluded ( target1 , String . valueOf ( value ) ) ) { String message = m_messageSourceAccessor . getMessage ( "nz.co.senanque.rules.excluded.value" , new Object [ ] { value , target1 . getFieldName ( ) } ) ; throw new InferenceException ( message ) ; } addAssignedField ( target1 ) ; removeUnknownField ( target1 ) ; target . assign ( value ) ; addToAgenda ( rpf ) ; } ruleContext . addAssign ( rpf ) ;
public class MoreGraphs { /** * Sorts a directed acyclic graph into a list . * < p > The particular order of elements without prerequisites is determined by the comparator . < / p > * @ param graph the graph to be sorted * @ param comparator the comparator * @ param < T > the node type * @ return the sorted list * @ throws CyclePresentException if the graph has cycles * @ throws IllegalArgumentException if the graph is not directed or allows self loops */ public static < T > @ NonNull List < T > orderedTopologicalSort ( final @ NonNull Graph < T > graph , final @ NonNull Comparator < T > comparator ) { } }
return topologicalSort ( graph , new ComparatorSortType < > ( comparator ) ) ;
public class TaskQueue { /** * Submits a non value - returning task for synchronous execution . It waits for all synchronous tasks to be * completed . * @ param task A task to be executed synchronously . */ public void submitSynchronous ( Runnable task ) { } }
lock . writeLock ( ) . lock ( ) ; try { awaitInner ( ) ; task . run ( ) ; } catch ( InterruptedException e ) { Log . error ( e , "Task queue isolated submission interrupted" ) ; throw new RuntimeException ( e ) ; } catch ( Exception e ) { Log . error ( e , "Task queue isolated submission failed" ) ; throw new RuntimeException ( e ) ; } finally { lock . writeLock ( ) . unlock ( ) ; }
public class BasicBlock { /** * Return whether or not the basic block contains the instruction with the * given bytecode offset . * @ param offset * the bytecode offset * @ return true if the block contains an instruction with the given offset , * false if it does not */ public boolean containsInstructionWithOffset ( int offset ) { } }
Iterator < InstructionHandle > i = instructionIterator ( ) ; while ( i . hasNext ( ) ) { if ( i . next ( ) . getPosition ( ) == offset ) { return true ; } } return false ;
public class JdbcRepository { /** * setUpdateProperties . * @ param id id * @ param needUpdateJsonObject needUpdateJsonObject * @ param paramList paramList * @ param sql sql * @ throws JSONException JSONException */ private void setUpdateProperties ( final String id , final JSONObject needUpdateJsonObject , final List < Object > paramList , final StringBuilder sql ) throws JSONException { } }
final Iterator < String > keys = needUpdateJsonObject . keys ( ) ; String key ; boolean isFirst = true ; final StringBuilder wildcardString = new StringBuilder ( ) ; while ( keys . hasNext ( ) ) { key = keys . next ( ) ; if ( isFirst ) { wildcardString . append ( " SET " ) . append ( key ) . append ( " = ?" ) ; isFirst = false ; } else { wildcardString . append ( ", " ) . append ( key ) . append ( " = ?" ) ; } paramList . add ( needUpdateJsonObject . get ( key ) ) ; } sql . append ( "UPDATE " ) . append ( getName ( ) ) . append ( wildcardString ) . append ( " WHERE " ) . append ( JdbcRepositories . getDefaultKeyName ( ) ) . append ( " = ?" ) ; paramList . add ( id ) ;
public class HourGlass { /** * Update the task share of the clusters * @ param clusters Two clusters with tasktrackers shares same nodes */ static private void updateShares ( Cluster clusters [ ] ) { } }
assert ( clusters . length == 2 ) ; if ( clusters [ 0 ] . runnableMaps == 0 && clusters [ 0 ] . runnableMaps == 0 && clusters [ 1 ] . runnableReduces == 0 && clusters [ 1 ] . runnableReduces == 0 ) { // Do nothing if both clusters are empty return ; } // Update target task shares using runnable tasks and weight if ( ! ( clusters [ 0 ] . runnableMaps == 0 && clusters [ 1 ] . runnableMaps == 0 ) ) { clusters [ 0 ] . targetMapShare = clusters [ 0 ] . runnableMaps * clusters [ 0 ] . weight / ( clusters [ 0 ] . runnableMaps * clusters [ 0 ] . weight + clusters [ 1 ] . runnableMaps * clusters [ 1 ] . weight ) ; clusters [ 1 ] . targetMapShare = 1 - clusters [ 0 ] . targetMapShare ; } if ( ! ( clusters [ 0 ] . runnableReduces == 0 && clusters [ 1 ] . runnableReduces == 0 ) ) { clusters [ 0 ] . targetReduceShare = clusters [ 0 ] . runnableReduces * clusters [ 0 ] . weight / ( clusters [ 0 ] . runnableReduces * clusters [ 0 ] . weight + clusters [ 1 ] . runnableReduces * clusters [ 1 ] . weight ) ; clusters [ 1 ] . targetReduceShare = 1 - clusters [ 0 ] . targetReduceShare ; } for ( int i = 0 ; i < 2 ; ++ i ) { LOG . info ( String . format ( "Update Shares. " + "cluster%s:%s runnableMaps:%s runnableReduces:%s " + "weight:%s targetMapShare:%s targetReduceShare:%s" , i , clusters [ i ] . address , clusters [ i ] . weight , clusters [ i ] . runnableMaps , clusters [ i ] . runnableReduces , clusters [ i ] . targetMapShare , clusters [ i ] . targetReduceShare ) ) ; }
public class JsonUtils { /** * Converts a given object to a Json string * @ param object The object to convert * @ return json string or null if conversion fails */ public static String toJson ( Object object ) { } }
Objects . requireNonNull ( object , Required . OBJECT . toString ( ) ) ; String json = null ; try { json = mapper . writeValueAsString ( object ) ; } catch ( JsonProcessingException e ) { LOG . error ( "Failed to convert object to json" , e ) ; } return json ;
public class UserFinderUtil { /** * Retrieves the users associated with the account * @ param account for which users should be gathered * @ return the set of users associated with an account */ public Set < DuracloudUser > getAccountUsers ( AccountInfo account ) { } }
DuracloudRightsRepo rightsRepo = repoMgr . getRightsRepo ( ) ; List < AccountRights > acctRights = rightsRepo . findByAccountId ( account . getId ( ) ) ; Set < DuracloudUser > users = new HashSet < > ( ) ; for ( AccountRights rights : acctRights ) { DuracloudUser user = rights . getUser ( ) ; // ensure account is loaded . rights . getAccount ( ) ; // make sure only the rights for this account are set Set < AccountRights > accountOnlyRightsSet = new HashSet < > ( ) ; accountOnlyRightsSet . add ( rights ) ; user . setAccountRights ( accountOnlyRightsSet ) ; users . add ( user ) ; } List < DuracloudUser > rootUsers = repoMgr . getUserRepo ( ) . findByRootTrueAndEnabledTrueAndAccountNonExpiredTrueAndCredentialsNonExpiredTrueAndAccountNonLockedTrue ( ) ; users . addAll ( rootUsers ) ; return users ;
public class Levenshtein { /** * Returns a new Q - Gram ( Ukkonen ) instance with compare target string and k - shingling * @ see QGram * @ param baseTarget * @ param compareTarget * @ param k * @ return */ @ SuppressWarnings ( "unchecked" ) public static < T extends Levenshtein > T QGram ( String baseTarget , String compareTarget , Integer k ) { } }
return ( T ) new QGram ( baseTarget , k ) . update ( compareTarget ) ;
public class TangoCommand { /** * Execute a command with argin which is an array * @ param < T > * @ param clazz * @ param value * @ return * @ throws DevFailed */ public < T > List < T > executeExtractList ( final Class < T > clazz , final Object ... value ) throws DevFailed { } }
final Object result = command . executeExtract ( value ) ; return extractList ( TypeConversionUtil . castToArray ( clazz , result ) ) ;
public class SampleWithTilesOverlay { /** * Called when the activity is first created . */ @ Override public void onCreate ( final Bundle savedInstanceState ) { } }
super . onCreate ( savedInstanceState ) ; // Setup base map rl = new RelativeLayout ( this ) ; this . mMapView = new MapView ( this ) ; this . mMapView . setTilesScaledToDpi ( true ) ; rl . addView ( this . mMapView , new RelativeLayout . LayoutParams ( LayoutParams . FILL_PARENT , LayoutParams . FILL_PARENT ) ) ; this . mMapView . setBuiltInZoomControls ( true ) ; // zoom to the netherlands this . mMapView . getController ( ) . setZoom ( 7 ) ; this . mMapView . getController ( ) . setCenter ( new GeoPoint ( 51500000 , 5400000 ) ) ; // Add tiles layer mProvider = new MapTileProviderBasic ( getApplicationContext ( ) ) ; mProvider . setTileSource ( TileSourceFactory . FIETS_OVERLAY_NL ) ; this . mTilesOverlay = new TilesOverlay ( mProvider , this . getBaseContext ( ) ) ; this . mMapView . getOverlays ( ) . add ( this . mTilesOverlay ) ; this . setContentView ( rl ) ;
public class BenchUtils { /** * Generating one single { @ link DumbData } with random values . * @ return one { @ link DumbData } with random values . */ public static final DumbData generateOne ( ) { } }
byte [ ] data = new byte [ 1024 ] ; random . nextBytes ( data ) ; return new DumbData ( random . nextLong ( ) , data ) ;
public class IntegratorHandlerImpl { /** * Read integrator protocol from content container . Default to AUTO . * @ param properties Content container * @ return Integrator protocol */ @ SuppressWarnings ( "null" ) private IntegratorProtocol getIntegratorProtocol ( ValueMap properties ) { } }
IntegratorProtocol protocol = IntegratorProtocol . AUTO ; try { String protocolString = properties . get ( IntegratorNameConstants . PN_INTEGRATOR_PROTOCOL , String . class ) ; if ( StringUtils . isNotEmpty ( protocolString ) ) { protocol = IntegratorProtocol . valueOf ( protocolString . toUpperCase ( ) ) ; } } catch ( IllegalArgumentException ex ) { // ignore } return protocol ;
public class KernelCore { /** * This methods will create both Global ModuleProviders : nominal and overriding . */ private void createMainModule ( ) { } }
for ( Plugin plugin : orderedPlugins ) { moduleHandler . handleUnitModule ( requestHandler , plugin ) ; moduleHandler . handleOverridingUnitModule ( requestHandler , plugin ) ; } KernelGuiceModuleInternal kernelGuiceModuleInternal = new KernelGuiceModuleInternal ( requestHandler ) ; KernelGuiceModuleInternal internalKernelGuiceModuleOverriding = new KernelGuiceModuleInternal ( requestHandler ) . overriding ( ) ; mainModule = Modules . override ( kernelGuiceModuleInternal ) . with ( internalKernelGuiceModuleOverriding ) ;
public class Descriptor { /** * Add the given topic calls to this service . * @ param topicCalls The topic calls to add . * @ return A copy of this descriptor with the new calls added . */ public Descriptor withTopics ( TopicCall < ? > ... topicCalls ) { } }
return new Descriptor ( name , calls , pathParamSerializers , messageSerializers , serializerFactory , exceptionSerializer , autoAcl , acls , headerFilter , locatableService , circuitBreaker , this . topicCalls . plusAll ( Arrays . asList ( topicCalls ) ) ) ;
public class GroupBasicAdapter { /** * remove a group * @ param group the group to be removed */ public void removeGroup ( @ Nullable L group ) { } }
if ( group == null ) { return ; } List < L > cards = getGroups ( ) ; boolean changed = cards . remove ( group ) ; if ( changed ) { setData ( cards ) ; }
public class HtmlMessages { /** * < p > Set the value of the < code > fatalClass < / code > property . < / p > */ public void setFatalClass ( java . lang . String fatalClass ) { } }
getStateHelper ( ) . put ( PropertyKeys . fatalClass , fatalClass ) ;
public class GoogleCalendarService { /** * Gets a list of entries belonging to the given calendar defined between the given range of time . Recurring events * are not expanded , always recurrence is handled manually within the framework . * @ param calendar The calendar owner of the entries . * @ param startDate The start date , not nullable . * @ param endDate The end date , not nullable * @ param zoneId The timezone in which the dates are represented . * @ return A non - null list of entries . * @ throws IOException For unexpected errors */ public List < GoogleEntry > getEntries ( GoogleCalendar calendar , LocalDate startDate , LocalDate endDate , ZoneId zoneId ) throws IOException { } }
if ( ! calendar . existsInGoogle ( ) ) { return new ArrayList < > ( 0 ) ; } ZonedDateTime st = ZonedDateTime . of ( startDate , LocalTime . MIN , zoneId ) ; ZonedDateTime et = ZonedDateTime . of ( endDate , LocalTime . MAX , zoneId ) ; String calendarId = URLDecoder . decode ( calendar . getId ( ) , "UTF-8" ) ; List < Event > events = dao . events ( ) . list ( calendarId ) . setTimeMin ( new DateTime ( Date . from ( st . toInstant ( ) ) ) ) . setTimeMax ( new DateTime ( Date . from ( et . toInstant ( ) ) ) ) . setSingleEvents ( false ) . setShowDeleted ( false ) . execute ( ) . getItems ( ) ; return toGoogleEntries ( events ) ;
public class BinderExtension { /** * / * @ Override */ public < S , T > Converter < S , T > findConverter ( Class < S > source , Class < T > target , Class < ? extends Annotation > qualifier ) { } }
return BINDING . findConverter ( source , target , qualifier ) ;
public class CmsRelationType { /** * Returns all weak relation types in the given list . < p > * @ param relationTypes the collection of relation types to filter * @ return a list of { @ link CmsRelationType } objects */ public static List < CmsRelationType > filterWeak ( Collection < CmsRelationType > relationTypes ) { } }
List < CmsRelationType > result = new ArrayList < CmsRelationType > ( relationTypes ) ; Iterator < CmsRelationType > it = result . iterator ( ) ; while ( it . hasNext ( ) ) { CmsRelationType type = it . next ( ) ; if ( type . isStrong ( ) ) { it . remove ( ) ; } } return result ;
public class AbstractJMSMessageProducer { /** * Sends message to the destination in non - transactional manner . * @ param destination where to send * @ param message what to send * Since non - transacted session is used , the message is send immediately without requiring to commit enclosing transaction . */ protected void sendNonTransacted ( Destination destination , Serializable message ) { } }
send ( destination , message , null , null , false ) ;
public class PollCachingESRegistry { /** * Stores a " dataversion " record in the ES store . There is only a single one of these . The * return value of the add will include the version number of the entity . This version * number is what we use to determine whether our cache is stale . */ protected void updateDataVersion ( ) { } }
DataVersionBean dv = new DataVersionBean ( ) ; dv . setUpdatedOn ( System . currentTimeMillis ( ) ) ; Index index = new Index . Builder ( dv ) . refresh ( false ) . index ( getDefaultIndexName ( ) ) . type ( "dataVersion" ) . id ( "instance" ) . build ( ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ getClient ( ) . executeAsync ( index , new JestResultHandler < JestResult > ( ) { @ Override public void completed ( JestResult result ) { dataVersion = null ; } @ Override public void failed ( Exception e ) { dataVersion = null ; } } ) ;
public class ContentValues { /** * Gets a value and converts it to a Byte . * @ param key the value to get * @ return the Byte value , or null if the value is missing or cannot be converted */ public Byte getAsByte ( String key ) { } }
Object value = mValues . get ( key ) ; try { return value != null ? ( ( Number ) value ) . byteValue ( ) : null ; } catch ( ClassCastException e ) { if ( value instanceof CharSequence ) { try { return Byte . valueOf ( value . toString ( ) ) ; } catch ( NumberFormatException e2 ) { logger . severe ( "Cannot parse Byte value for " + value + " at key " + key ) ; return null ; } } else { logger . log ( Level . SEVERE , "Cannot cast value for " + key + " to a Byte: " + value , e ) ; return null ; } }
public class CollationRootElements { /** * Finds the largest index i where elements [ i ] < = p . * Requires first primary < = p < 0xfffff00 ( PRIMARY _ SENTINEL ) . * Does not require that p is a root collator primary . */ private int findP ( long p ) { } }
// p need not occur as a root primary . // For example , it might be a reordering group boundary . assert ( ( p >> 24 ) != Collation . UNASSIGNED_IMPLICIT_BYTE ) ; // modified binary search int start = ( int ) elements [ IX_FIRST_PRIMARY_INDEX ] ; assert ( p >= elements [ start ] ) ; int limit = elements . length - 1 ; assert ( elements [ limit ] >= PRIMARY_SENTINEL ) ; assert ( p < elements [ limit ] ) ; while ( ( start + 1 ) < limit ) { // Invariant : elements [ start ] and elements [ limit ] are primaries , // and elements [ start ] < = p < = elements [ limit ] . int i = ( int ) ( ( ( long ) start + ( long ) limit ) / 2 ) ; long q = elements [ i ] ; if ( ( q & SEC_TER_DELTA_FLAG ) != 0 ) { // Find the next primary . int j = i + 1 ; for ( ; ; ) { if ( j == limit ) { break ; } q = elements [ j ] ; if ( ( q & SEC_TER_DELTA_FLAG ) == 0 ) { i = j ; break ; } ++ j ; } if ( ( q & SEC_TER_DELTA_FLAG ) != 0 ) { // Find the preceding primary . j = i - 1 ; for ( ; ; ) { if ( j == start ) { break ; } q = elements [ j ] ; if ( ( q & SEC_TER_DELTA_FLAG ) == 0 ) { i = j ; break ; } -- j ; } if ( ( q & SEC_TER_DELTA_FLAG ) != 0 ) { // No primary between start and limit . break ; } } } if ( p < ( q & 0xffffff00L ) ) { // Reset the " step " bits of a range end primary . limit = i ; } else { start = i ; } } return start ;
public class AABBf { /** * Set the maximum corner coordinates . * @ param max * the maximum coordinates * @ return this */ public AABBf setMax ( Vector3fc max ) { } }
return this . setMax ( max . x ( ) , max . y ( ) , max . z ( ) ) ;
public class SessionEntityTypesClient { /** * Deletes the specified session entity type . * < p > Sample code : * < pre > < code > * try ( SessionEntityTypesClient sessionEntityTypesClient = SessionEntityTypesClient . create ( ) ) { * SessionEntityTypeName name = SessionEntityTypeName . of ( " [ PROJECT ] " , " [ SESSION ] " , " [ ENTITY _ TYPE ] " ) ; * sessionEntityTypesClient . deleteSessionEntityType ( name ) ; * < / code > < / pre > * @ param name Required . The name of the entity type to delete . Format : ` projects / & lt ; Project * ID & gt ; / agent / sessions / & lt ; Session ID & gt ; / entityTypes / & lt ; Entity Type Display Name & gt ; ` or * ` projects / & lt ; Project ID & gt ; / agent / environments / & lt ; Environment ID & gt ; / users / & lt ; User * ID & gt ; / sessions / & lt ; Session ID & gt ; / entityTypes / & lt ; Entity Type Display Name & gt ; ` . If * ` Environment ID ` is not specified , we assume default ' draft ' environment . If ` User ID ` is * not specified , we assume default ' - ' user . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void deleteSessionEntityType ( SessionEntityTypeName name ) { } }
DeleteSessionEntityTypeRequest request = DeleteSessionEntityTypeRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; deleteSessionEntityType ( request ) ;
public class WorkManagerImpl { /** * Fire complete for HintsContext * @ param work The work instance */ private void fireHintsComplete ( Work work ) { } }
if ( work != null && work instanceof WorkContextProvider ) { WorkContextProvider wcProvider = ( WorkContextProvider ) work ; List < WorkContext > contexts = wcProvider . getWorkContexts ( ) ; if ( contexts != null && ! contexts . isEmpty ( ) ) { Iterator < WorkContext > it = contexts . iterator ( ) ; while ( it . hasNext ( ) ) { WorkContext wc = it . next ( ) ; if ( wc instanceof HintsContext ) { HintsContext hc = ( HintsContext ) wc ; if ( hc instanceof WorkContextLifecycleListener ) { WorkContextLifecycleListener listener = ( WorkContextLifecycleListener ) hc ; listener . contextSetupComplete ( ) ; } } } } }
public class Request { /** * Makes http POST request * @ param url url to makes request to * @ param params data to add to params field * @ param extraData data to send along with request body , outside of params field . * @ param files files to be uploaded along with the request . * @ return { @ link okhttp3 . Response } * @ throws RequestException * @ throws LocalOperationException */ okhttp3 . Response post ( String url , Map < String , Object > params , @ Nullable Map < String , String > extraData , @ Nullable Map < String , File > files , @ Nullable Map < String , InputStream > fileStreams ) throws RequestException , LocalOperationException { } }
Map < String , String > payload = toPayload ( params ) ; if ( extraData != null ) { payload . putAll ( extraData ) ; } okhttp3 . Request request = new okhttp3 . Request . Builder ( ) . url ( getFullUrl ( url ) ) . post ( getBody ( payload , files , fileStreams ) ) . addHeader ( "Transloadit-Client" , version ) . build ( ) ; try { return httpClient . newCall ( request ) . execute ( ) ; } catch ( IOException e ) { throw new RequestException ( e ) ; }
public class JavassistTransformerExecutor { /** * Evaluates and returns the output directory . * If the passed { @ code outputDir } is { @ code null } or empty , the passed { @ code inputDir } otherwise * the { @ code outputDir } will returned . * @ param outputDir could be { @ code null } or empty * @ param inputDir must not be { @ code null } * @ return never { @ code null } * @ throws NullPointerException if passed { @ code inputDir } is { @ code null } * @ since 1.2.0 */ protected String evaluateOutputDirectory ( final String outputDir , final String inputDir ) { } }
return outputDir != null && ! outputDir . trim ( ) . isEmpty ( ) ? outputDir : inputDir . trim ( ) ;
public class SimpleInjectionPoint { /** * Factory method to produce an { @ link InjectionPoint } that describes the * specified class . * @ param cls * @ return */ public static final < E > InjectionPoint < E > of ( Class < E > cls ) { } }
return new SimpleInjectionPoint < E > ( cls ) ;
public class ExecutionEnvironment { /** * Creates a DataSet from the given non - empty collection . The type of the data set is that * of the elements in the collection . * < p > The framework will try and determine the exact type from the collection elements . * In case of generic elements , it may be necessary to manually supply the type information * via { @ link # fromCollection ( Collection , TypeInformation ) } . * < p > Note that this operation will result in a non - parallel data source , i . e . a data source with * a parallelism of one . * @ param data The collection of elements to create the data set from . * @ return A DataSet representing the given collection . * @ see # fromCollection ( Collection , TypeInformation ) */ public < X > DataSource < X > fromCollection ( Collection < X > data ) { } }
if ( data == null ) { throw new IllegalArgumentException ( "The data must not be null." ) ; } if ( data . size ( ) == 0 ) { throw new IllegalArgumentException ( "The size of the collection must not be empty." ) ; } X firstValue = data . iterator ( ) . next ( ) ; TypeInformation < X > type = TypeExtractor . getForObject ( firstValue ) ; CollectionInputFormat . checkCollection ( data , type . getTypeClass ( ) ) ; return new DataSource < > ( this , new CollectionInputFormat < > ( data , type . createSerializer ( config ) ) , type , Utils . getCallLocationName ( ) ) ;
public class ObjectCacheFactory { /** * Creates a new { @ link ObjectCacheInternal } instance . Each < tt > ObjectCache < / tt > * implementation was wrapped by a { @ link CacheDistributor } and the distributor * was wrapped by { @ link MaterializationCache } . * @ param broker The PB instance to associate with the cache instance */ public MaterializationCache createObjectCache ( PersistenceBroker broker ) { } }
CacheDistributor cache = null ; try { log . info ( "Start creating new ObjectCache instance" ) ; /* if default cache was not found , create an new instance of the default cache specified in the configuration . Then instantiate AllocatorObjectCache to handle per connection / per class caching instances . To support intern operations we wrap ObjectCache with an InternalObjectCache implementation */ cache = new CacheDistributor ( broker ) ; log . info ( "Instantiate new " + cache . getClass ( ) . getName ( ) + " for PB instance " + broker ) ; } catch ( Exception e ) { log . error ( "Error while initiation, please check your configuration" + " files and the used implementation class" , e ) ; } log . info ( "New ObjectCache instance was created" ) ; return new MaterializationCache ( cache ) ;
public class SingleRxXian { /** * call the specified unit without parameters */ public static Single < UnitResponse > call ( String group , String unit ) { } }
return SingleRxXian . call ( group , unit , new HashMap < > ( ) ) ;
public class BroadcastConnectedStream { /** * Assumes as inputs a { @ link BroadcastStream } and a non - keyed { @ link DataStream } and applies the given * { @ link BroadcastProcessFunction } on them , thereby creating a transformed output stream . * @ param function The { @ link BroadcastProcessFunction } that is called for each element in the stream . * @ param outTypeInfo The type of the output elements . * @ param < OUT > The type of the output elements . * @ return The transformed { @ link DataStream } . */ @ PublicEvolving public < OUT > SingleOutputStreamOperator < OUT > process ( final BroadcastProcessFunction < IN1 , IN2 , OUT > function , final TypeInformation < OUT > outTypeInfo ) { } }
Preconditions . checkNotNull ( function ) ; Preconditions . checkArgument ( ! ( inputStream1 instanceof KeyedStream ) , "A BroadcastProcessFunction can only be used on a non-keyed stream." ) ; TwoInputStreamOperator < IN1 , IN2 , OUT > operator = new CoBroadcastWithNonKeyedOperator < > ( clean ( function ) , broadcastStateDescriptors ) ; return transform ( "Co-Process-Broadcast" , outTypeInfo , operator ) ;
public class PassConfig { /** * Create a type inference pass . */ final TypeInferencePass makeTypeInference ( AbstractCompiler compiler ) { } }
return new TypeInferencePass ( compiler , compiler . getReverseAbstractInterpreter ( ) , topScope , typedScopeCreator ) ;
public class NonVoltDBBackend { /** * Potentially returns the specified String , after replacing certain * " variables " , such as { table } or { column : pk } , in a QueryTransformer ' s * prefix , suffix or ( group ) replacement text , for which a corresponding * group value will be substituted . However , this base version just * returns the original String unchanged ; it may be overridden by * sub - classes , to determine appropriate changes for that non - VoltDB * backend database . */ protected String replaceGroupNameVariables ( String str , List < String > groupNames , List < String > groupValues , boolean debugPrint ) { } }
return str ;
public class LocalDateTime { /** * Returns a copy of this { @ code LocalDateTime } with the second - of - minute altered . * This instance is immutable and unaffected by this method call . * @ param second the second - of - minute to set in the result , from 0 to 59 * @ return a { @ code LocalDateTime } based on this date - time with the requested second , not null * @ throws DateTimeException if the second value is invalid */ public LocalDateTime withSecond ( int second ) { } }
LocalTime newTime = time . withSecond ( second ) ; return with ( date , newTime ) ;
public class CIType { /** * Tests , if this type the type in the parameter . * @ param _ type type to test * @ return true if this type otherwise false */ public boolean isType ( final org . efaps . admin . datamodel . Type _type ) { } }
return getType ( ) . equals ( _type ) ;
public class CommerceNotificationTemplateUserSegmentRelUtil { /** * Returns an ordered range of all the commerce notification template user segment rels where commerceUserSegmentEntryId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceNotificationTemplateUserSegmentRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param commerceUserSegmentEntryId the commerce user segment entry ID * @ param start the lower bound of the range of commerce notification template user segment rels * @ param end the upper bound of the range of commerce notification template user segment rels ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching commerce notification template user segment rels */ public static List < CommerceNotificationTemplateUserSegmentRel > findByCommerceUserSegmentEntryId ( long commerceUserSegmentEntryId , int start , int end , OrderByComparator < CommerceNotificationTemplateUserSegmentRel > orderByComparator ) { } }
return getPersistence ( ) . findByCommerceUserSegmentEntryId ( commerceUserSegmentEntryId , start , end , orderByComparator ) ;