signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ResourceLoader { /** * Finds a resource with a given name . This is a convenience method for accessing a resource * from a channel or from the uPortal framework . If a well - formed URL is passed in , this method * will use that URL unchanged to find the resource . If the URL is not well - formed , this method * will look for the desired resource relative to the classpath . If the resource name starts * with " / " , it is unchanged . Otherwise , the package name of the requesting class is prepended * to the resource name . * @ param requestingClass the java . lang . Class object of the class that is attempting to load the * resource * @ param resource a String describing the full or partial URL of the resource to load * @ return a URL identifying the requested resource * @ throws ResourceMissingException */ public static URL getResourceAsURL ( Class < ? > requestingClass , String resource ) throws ResourceMissingException { } }
final Tuple < Class < ? > , String > cacheKey = new Tuple < Class < ? > , String > ( requestingClass , resource ) ; // Look for a cached URL final Map < Tuple < Class < ? > , String > , URL > resourceUrlCache = ResourceLoader . resourceUrlCache ; URL resourceURL = resourceUrlCache != null ? resourceUrlCache . get ( cacheKey ) : null ; if ( resourceURL != null ) { return resourceURL ; } // Look for a failed lookup final Map < Tuple < Class < ? > , String > , ResourceMissingException > resourceUrlNotFoundCache = ResourceLoader . resourceUrlNotFoundCache ; ResourceMissingException exception = resourceUrlNotFoundCache != null ? resourceUrlNotFoundCache . get ( cacheKey ) : null ; if ( exception != null ) { throw new ResourceMissingException ( exception ) ; } try { resourceURL = new URL ( resource ) ; } catch ( MalformedURLException murle ) { // URL is invalid , now try to load from classpath resourceURL = requestingClass . getResource ( resource ) ; if ( resourceURL == null ) { String resourceRelativeToClasspath = null ; if ( resource . startsWith ( "/" ) ) resourceRelativeToClasspath = resource ; else resourceRelativeToClasspath = '/' + requestingClass . getPackage ( ) . getName ( ) . replace ( '.' , '/' ) + '/' + resource ; exception = new ResourceMissingException ( resource , resourceRelativeToClasspath , "Resource not found in classpath: " + resourceRelativeToClasspath ) ; if ( resourceUrlNotFoundCache != null ) { resourceUrlNotFoundCache . put ( cacheKey , exception ) ; } throw new ResourceMissingException ( exception ) ; } } if ( resourceUrlCache != null ) { resourceUrlCache . put ( cacheKey , resourceURL ) ; } return resourceURL ;
public class LambdaInvokerFactory { /** * Creates a new Lambda invoker implementing the given interface and wrapping the given { @ code AWSLambda } client . * @ param interfaceClass the interface to implement * @ param awsLambda the lambda client to use for making remote calls * @ deprecated Use { @ link LambdaInvokerFactory # builder ( ) } to configure invoker factory . */ @ Deprecated public static < T > T build ( Class < T > interfaceClass , AWSLambda awsLambda ) { } }
return build ( interfaceClass , awsLambda , new LambdaInvokerFactoryConfig ( ) ) ;
public class AbstractExtendedSet { /** * { @ inheritDoc } */ @ Override public T get ( int i ) { } }
int size = size ( ) ; if ( i < 0 || i >= size ) throw new IndexOutOfBoundsException ( ) ; Iterator < T > itr ; if ( i < ( size / 2 ) ) { itr = iterator ( ) ; for ( int j = 0 ; j <= i - 1 ; j ++ ) itr . next ( ) ; } else { itr = descendingIterator ( ) ; for ( int j = size - 1 ; j >= i + 1 ; j -- ) itr . next ( ) ; } return itr . next ( ) ;
public class PropertiesUtils { /** * Loads a comma - separated list of integers from Properties . The list cannot include any whitespace . */ public static int [ ] getIntArray ( Properties props , String key ) { } }
Integer [ ] result = MetaClass . cast ( props . getProperty ( key ) , Integer [ ] . class ) ; return ArrayUtils . toPrimitive ( result ) ;
public class ChannelFinderClientImpl { /** * Set tag < tt > tag < / tt > on the set of channels { channels } and remove it from * all others . * @ param tag * - the tag to be set . * @ param channelNames * - the list of channels to which this tag will be added and * removed from all others . * @ throws ChannelFinderException - channelfinder exception */ public void set ( Tag . Builder tag , Collection < String > channelNames ) throws ChannelFinderException { } }
wrappedSubmit ( new SetTag ( tag . toXml ( ) , channelNames ) ) ;
public class ApiOvhMe { /** * Get this object properties * REST : GET / me / bill / { billId } * @ param billId [ required ] */ public OvhBill bill_billId_GET ( String billId ) throws IOException { } }
String qPath = "/me/bill/{billId}" ; StringBuilder sb = path ( qPath , billId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhBill . class ) ;
public class ConfigSettings { /** * Applies the default single thread configuration of the DiffTool to this * settings . */ public void defaultConfiguration ( ) { } }
clear ( ) ; setConfigParameter ( ConfigurationKeys . VALUE_MINIMUM_LONGEST_COMMON_SUBSTRING , 12 ) ; setConfigParameter ( ConfigurationKeys . COUNTER_FULL_REVISION , 1000 ) ; setConfigParameter ( ConfigurationKeys . LIMIT_TASK_SIZE_REVISIONS , 5000000l ) ; setConfigParameter ( ConfigurationKeys . LIMIT_TASK_SIZE_DIFFS , 1000000l ) ; setConfigParameter ( ConfigurationKeys . LIMIT_SQLSERVER_MAX_ALLOWED_PACKET , 1000000l ) ; setConfigParameter ( ConfigurationKeys . MODE_SURROGATES , SurrogateModes . DISCARD_REVISION ) ; setConfigParameter ( ConfigurationKeys . WIKIPEDIA_ENCODING , StandardCharsets . UTF_8 . toString ( ) ) ; setConfigParameter ( ConfigurationKeys . MODE_OUTPUT , OutputType . BZIP2 ) ; setConfigParameter ( ConfigurationKeys . MODE_DATAFILE_OUTPUT , false ) ; setConfigParameter ( ConfigurationKeys . MODE_ZIP_COMPRESSION_ENABLED , true ) ; setConfigParameter ( ConfigurationKeys . LIMIT_SQL_FILE_SIZE , 1000000000l ) ; setConfigParameter ( ConfigurationKeys . LOGGING_PATH_DIFFTOOL , "logs" ) ; setConfigParameter ( ConfigurationKeys . LOGGING_LOGLEVEL_DIFFTOOL , Level . INFO ) ; setConfigParameter ( ConfigurationKeys . VERIFICATION_DIFF , false ) ; setConfigParameter ( ConfigurationKeys . VERIFICATION_ENCODING , false ) ; setConfigParameter ( ConfigurationKeys . MODE_DEBUG_OUTPUT , false ) ; setConfigParameter ( ConfigurationKeys . MODE_STATISTICAL_OUTPUT , false ) ; Set < Integer > defaultNamespaces = new HashSet < Integer > ( ) ; defaultNamespaces . add ( 0 ) ; defaultNamespaces . add ( 1 ) ; setConfigParameter ( ConfigurationKeys . NAMESPACES_TO_KEEP , defaultNamespaces ) ; this . type = ConfigEnum . DEFAULT ;
public class AuthIdentificationResult { /** * Factory method for success authentication . * @ param aAuthToken * The auth token . May not be < code > null < / code > . * @ return Never < code > null < / code > . */ @ Nonnull public static AuthIdentificationResult createSuccess ( @ Nonnull final IAuthToken aAuthToken ) { } }
ValueEnforcer . notNull ( aAuthToken , "AuthToken" ) ; return new AuthIdentificationResult ( aAuthToken , null ) ;
public class SerializedFormBuilder { /** * Build the serialization overview for the given class . * @ param classDoc the class to print the overview for . * @ param classContentTree content tree to which the documentation will be added */ public void buildFieldSerializationOverview ( ClassDoc classDoc , Content classContentTree ) { } }
if ( classDoc . definesSerializableFields ( ) ) { FieldDoc serialPersistentField = classDoc . serializableFields ( ) [ 0 ] ; // Check to see if there are inline comments , tags or deprecation // information to be printed . if ( fieldWriter . shouldPrintOverview ( serialPersistentField ) ) { Content serializableFieldsTree = fieldWriter . getSerializableFieldsHeader ( ) ; Content fieldsOverviewContentTree = fieldWriter . getFieldsContentHeader ( true ) ; fieldWriter . addMemberDeprecatedInfo ( serialPersistentField , fieldsOverviewContentTree ) ; if ( ! configuration . nocomment ) { fieldWriter . addMemberDescription ( serialPersistentField , fieldsOverviewContentTree ) ; fieldWriter . addMemberTags ( serialPersistentField , fieldsOverviewContentTree ) ; } serializableFieldsTree . addContent ( fieldsOverviewContentTree ) ; classContentTree . addContent ( fieldWriter . getSerializableFields ( configuration . getText ( "doclet.Serialized_Form_class" ) , serializableFieldsTree ) ) ; } }
public class OAHashSet { /** * Adds the specified element to this set if it is not already present . * This variant of { @ link # add ( Object ) } acts as an optimisation to * enable avoiding { @ link # hashCode ( ) } calls if the hash is already * known on the caller side . * @ param elementToAdd element to be added to this set * @ param hash the hash of the element to be added * @ return < tt > true < / tt > if this set did not already contain the specified * element * @ see # add ( Object ) */ public boolean add ( E elementToAdd , int hash ) { } }
checkNotNull ( elementToAdd ) ; int index = hash & mask ; // using the hashes array for looping and comparison if possible , hence we ' re cache friendly while ( hashes [ index ] != 0 || table [ index ] != null ) { if ( hash == hashes [ index ] && elementToAdd . equals ( table [ index ] ) ) { return false ; } index = ++ index & mask ; } size ++ ; version ++ ; table [ index ] = elementToAdd ; hashes [ index ] = hash ; if ( size > resizeThreshold ) { increaseCapacity ( ) ; } return true ;
public class Nodes { /** * Replace node of given name . * @ return { @ code true } if node was replaced . * @ since 2.8 */ public boolean replaceNode ( final Node oldOne , final @ Nonnull Node newOne ) throws IOException { } }
if ( oldOne == nodes . get ( oldOne . getNodeName ( ) ) ) { // use the queue lock until Nodes has a way of directly modifying a single node . Queue . withLock ( new Runnable ( ) { public void run ( ) { Nodes . this . nodes . remove ( oldOne . getNodeName ( ) ) ; Nodes . this . nodes . put ( newOne . getNodeName ( ) , newOne ) ; jenkins . updateComputerList ( ) ; jenkins . trimLabels ( ) ; } } ) ; updateNode ( newOne ) ; if ( ! newOne . getNodeName ( ) . equals ( oldOne . getNodeName ( ) ) ) { Util . deleteRecursive ( new File ( getNodesDir ( ) , oldOne . getNodeName ( ) ) ) ; } NodeListener . fireOnUpdated ( oldOne , newOne ) ; return true ; } else { return false ; }
public class ConsistencyCheck { /** * Convert a key - version - nodeSet information to string * @ param key The key * @ param versionMap mapping versions to set of PrefixNodes * @ param storeName store ' s name * @ param partitionId partition scanned * @ return a string that describe the information passed in */ public static String keyVersionToString ( ByteArray key , Map < Value , Set < ClusterNode > > versionMap , String storeName , Integer partitionId ) { } }
StringBuilder record = new StringBuilder ( ) ; for ( Map . Entry < Value , Set < ClusterNode > > versionSet : versionMap . entrySet ( ) ) { Value value = versionSet . getKey ( ) ; Set < ClusterNode > nodeSet = versionSet . getValue ( ) ; record . append ( "BAD_KEY," ) ; record . append ( storeName + "," ) ; record . append ( partitionId + "," ) ; record . append ( ByteUtils . toHexString ( key . get ( ) ) + "," ) ; record . append ( nodeSet . toString ( ) . replace ( ", " , ";" ) + "," ) ; record . append ( value . toString ( ) ) ; } return record . toString ( ) ;
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 6649:1 : ruleValidID returns [ AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ] : this _ ID _ 0 = RULE _ ID ; */ public final AntlrDatatypeRuleToken ruleValidID ( ) throws RecognitionException { } }
AntlrDatatypeRuleToken current = new AntlrDatatypeRuleToken ( ) ; Token this_ID_0 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 6655:2 : ( this _ ID _ 0 = RULE _ ID ) // InternalXbaseWithAnnotations . g : 6656:2 : this _ ID _ 0 = RULE _ ID { this_ID_0 = ( Token ) match ( input , RULE_ID , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current . merge ( this_ID_0 ) ; } if ( state . backtracking == 0 ) { newLeafNode ( this_ID_0 , grammarAccess . getValidIDAccess ( ) . getIDTerminalRuleCall ( ) ) ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class FacesContext { /** * < p class = " changed _ added _ 2_0 " > Return < code > true < / code > if the * current { @ link ProjectStage } as returned by the { @ link * Application } instance is equal to < code > stage < / code > , otherwise * return < code > false < / code > < / p > * @ param stage the { @ link ProjectStage } to check * @ throws IllegalStateException if this method is called after * this instance has been released * @ throws NullPointerException if < code > stage < / code > is < code > null < / code > */ public boolean isProjectStage ( ProjectStage stage ) { } }
if ( stage == null ) { throw new NullPointerException ( ) ; } return ( stage . equals ( getApplication ( ) . getProjectStage ( ) ) ) ;
public class CommerceDiscountRelServiceBaseImpl { /** * Sets the commerce discount rel local service . * @ param commerceDiscountRelLocalService the commerce discount rel local service */ public void setCommerceDiscountRelLocalService ( com . liferay . commerce . discount . service . CommerceDiscountRelLocalService commerceDiscountRelLocalService ) { } }
this . commerceDiscountRelLocalService = commerceDiscountRelLocalService ;
public class SchemaTableTree { /** * Constructs the from clause with the required selected fields needed to make the join between the previous and the next SchemaTable * @ param columnList * @ param distinctQueryStack / / * @ param firstSchemaTableTree This is the first SchemaTable in the current sql stack . If it is an Edge table then its foreign key * / / * field to the previous table need to be in the select clause in order for the join statement to * / / * reference it . * / / * @ param lastSchemaTableTree * @ param previousSchemaTableTree The previous schemaTableTree that will be joined to . * @ param nextSchemaTableTree represents the table to join to . it is null for the last table as there is nothing to join to . @ return */ private static String constructSelectClause ( SqlgGraph sqlgGraph , boolean dropStep , ColumnList columnList , LinkedList < SchemaTableTree > distinctQueryStack , SchemaTableTree previousSchemaTableTree , SchemaTableTree nextSchemaTableTree ) { } }
SchemaTableTree firstSchemaTableTree = distinctQueryStack . getFirst ( ) ; SchemaTableTree lastSchemaTableTree = distinctQueryStack . getLast ( ) ; SchemaTable firstSchemaTable = firstSchemaTableTree . getSchemaTable ( ) ; SchemaTable lastSchemaTable = lastSchemaTableTree . getSchemaTable ( ) ; Preconditions . checkState ( ! ( previousSchemaTableTree != null && previousSchemaTableTree . direction == Direction . BOTH ) , "Direction should never be BOTH" ) ; Preconditions . checkState ( ! ( nextSchemaTableTree != null && nextSchemaTableTree . direction == Direction . BOTH ) , "Direction should never be BOTH" ) ; // The join is always between an edge and vertex or vertex and edge table . Preconditions . checkState ( ! ( nextSchemaTableTree != null && lastSchemaTable . getTable ( ) . startsWith ( VERTEX_PREFIX ) && nextSchemaTableTree . getSchemaTable ( ) . getTable ( ) . startsWith ( VERTEX_PREFIX ) ) , "Join can not be between 2 vertex tables!" ) ; Preconditions . checkState ( ! ( nextSchemaTableTree != null && lastSchemaTable . getTable ( ) . startsWith ( EDGE_PREFIX ) && nextSchemaTableTree . getSchemaTable ( ) . getTable ( ) . startsWith ( EDGE_PREFIX ) ) , "Join can not be between 2 edge tables!" ) ; Preconditions . checkState ( ! ( previousSchemaTableTree != null && firstSchemaTable . getTable ( ) . startsWith ( VERTEX_PREFIX ) && previousSchemaTableTree . getSchemaTable ( ) . getTable ( ) . startsWith ( VERTEX_PREFIX ) ) , "Join can not be between 2 vertex tables!" ) ; Preconditions . checkState ( ! ( previousSchemaTableTree != null && firstSchemaTable . getTable ( ) . startsWith ( EDGE_PREFIX ) && previousSchemaTableTree . getSchemaTable ( ) . getTable ( ) . startsWith ( EDGE_PREFIX ) ) , "Join can not be between 2 edge tables!" ) ; // Remove all temp fake labels . // As the same schemaTableTree instance is used for many query stacks and the temp fake labels are just for the particular query we need to ensure they do not leak to other queries . // join to the previous label / table if ( previousSchemaTableTree != null && firstSchemaTable . getTable ( ) . startsWith ( EDGE_PREFIX ) ) { Preconditions . checkState ( previousSchemaTableTree . getSchemaTable ( ) . getTable ( ) . startsWith ( VERTEX_PREFIX ) , "Expected table to start with %s" , VERTEX_PREFIX ) ; String previousRawLabel = previousSchemaTableTree . getSchemaTable ( ) . getTable ( ) . substring ( VERTEX_PREFIX . length ( ) ) ; if ( firstSchemaTableTree . direction == Direction . OUT ) { if ( previousSchemaTableTree . hasIDPrimaryKey ) { columnList . add ( firstSchemaTable , previousSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + previousRawLabel + Topology . OUT_VERTEX_COLUMN_END , previousSchemaTableTree . stepDepth , firstSchemaTableTree . calculatedAliasVertexForeignKeyColumnEnd ( previousSchemaTableTree , firstSchemaTableTree . direction ) ) ; } else { ListOrderedSet < String > identifiers = previousSchemaTableTree . getIdentifiers ( ) ; for ( String identifier : identifiers ) { if ( previousSchemaTableTree . isDistributed ( ) && previousSchemaTableTree . distributionColumn . equals ( identifier ) ) { columnList . add ( firstSchemaTable , identifier , previousSchemaTableTree . stepDepth , firstSchemaTableTree . calculatedAliasVertexForeignKeyColumnEnd ( previousSchemaTableTree , firstSchemaTableTree . direction , identifier ) ) ; } else { columnList . add ( firstSchemaTable , previousSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + previousRawLabel + "." + identifier + Topology . OUT_VERTEX_COLUMN_END , previousSchemaTableTree . stepDepth , firstSchemaTableTree . calculatedAliasVertexForeignKeyColumnEnd ( previousSchemaTableTree , firstSchemaTableTree . direction , identifier ) ) ; } } } } else { if ( previousSchemaTableTree . hasIDPrimaryKey ) { columnList . add ( firstSchemaTable , previousSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + previousRawLabel + Topology . IN_VERTEX_COLUMN_END , previousSchemaTableTree . stepDepth , firstSchemaTableTree . calculatedAliasVertexForeignKeyColumnEnd ( previousSchemaTableTree , firstSchemaTableTree . direction ) ) ; } else { ListOrderedSet < String > identifiers = previousSchemaTableTree . getIdentifiers ( ) ; for ( String identifier : identifiers ) { if ( previousSchemaTableTree . isDistributed ( ) && previousSchemaTableTree . distributionColumn . equals ( identifier ) ) { columnList . add ( firstSchemaTable , identifier , previousSchemaTableTree . stepDepth , firstSchemaTableTree . calculatedAliasVertexForeignKeyColumnEnd ( previousSchemaTableTree , firstSchemaTableTree . direction , identifier ) ) ; } else { columnList . add ( firstSchemaTable , previousSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + previousRawLabel + "." + identifier + Topology . IN_VERTEX_COLUMN_END , previousSchemaTableTree . stepDepth , firstSchemaTableTree . calculatedAliasVertexForeignKeyColumnEnd ( previousSchemaTableTree , firstSchemaTableTree . direction , identifier ) ) ; } } } } } else if ( previousSchemaTableTree != null && firstSchemaTable . getTable ( ) . startsWith ( VERTEX_PREFIX ) ) { // if user defined identifiers then the regular properties make up the ids . if ( firstSchemaTableTree . hasIDPrimaryKey ) { columnList . add ( firstSchemaTable , Topology . ID , firstSchemaTableTree . stepDepth , firstSchemaTableTree . calculatedAliasId ( ) ) ; } } // join to the next table / label if ( nextSchemaTableTree != null && lastSchemaTable . getTable ( ) . startsWith ( EDGE_PREFIX ) ) { Preconditions . checkState ( nextSchemaTableTree . getSchemaTable ( ) . getTable ( ) . startsWith ( VERTEX_PREFIX ) , "Expected table to start with %s" , VERTEX_PREFIX ) ; String nextRawLabel = nextSchemaTableTree . getSchemaTable ( ) . getTable ( ) . substring ( VERTEX_PREFIX . length ( ) ) ; if ( nextSchemaTableTree . isEdgeVertexStep ( ) ) { if ( nextSchemaTableTree . hasIDPrimaryKey ) { columnList . add ( lastSchemaTable , nextSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + nextRawLabel + ( nextSchemaTableTree . direction == Direction . OUT ? Topology . OUT_VERTEX_COLUMN_END : Topology . IN_VERTEX_COLUMN_END ) , nextSchemaTableTree . stepDepth , lastSchemaTable . getSchema ( ) + "." + lastSchemaTable . getTable ( ) + "." + nextSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + nextRawLabel + ( nextSchemaTableTree . direction == Direction . OUT ? Topology . OUT_VERTEX_COLUMN_END : Topology . IN_VERTEX_COLUMN_END ) ) ; } else { ListOrderedSet < String > identifiers = nextSchemaTableTree . getIdentifiers ( ) ; for ( String identifier : identifiers ) { if ( nextSchemaTableTree . isDistributed ( ) && nextSchemaTableTree . distributionColumn . equals ( identifier ) ) { columnList . add ( lastSchemaTable , identifier , nextSchemaTableTree . stepDepth , lastSchemaTable . getSchema ( ) + "." + lastSchemaTable . getTable ( ) + "." + identifier ) ; } else { columnList . add ( lastSchemaTable , nextSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + nextRawLabel + "." + identifier + ( nextSchemaTableTree . direction == Direction . OUT ? Topology . OUT_VERTEX_COLUMN_END : Topology . IN_VERTEX_COLUMN_END ) , nextSchemaTableTree . stepDepth , lastSchemaTable . getSchema ( ) + "." + lastSchemaTable . getTable ( ) + "." + nextSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + nextRawLabel + "." + identifier + ( nextSchemaTableTree . direction == Direction . OUT ? Topology . OUT_VERTEX_COLUMN_END : Topology . IN_VERTEX_COLUMN_END ) ) ; } } } constructAllLabeledFromClause ( distinctQueryStack , columnList ) ; } else { if ( nextSchemaTableTree . hasIDPrimaryKey ) { columnList . add ( lastSchemaTable , nextSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + nextRawLabel + ( nextSchemaTableTree . direction == Direction . OUT ? Topology . IN_VERTEX_COLUMN_END : Topology . OUT_VERTEX_COLUMN_END ) , nextSchemaTableTree . stepDepth , lastSchemaTable . getSchema ( ) + "." + lastSchemaTable . getTable ( ) + "." + nextSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + nextRawLabel + ( nextSchemaTableTree . direction == Direction . OUT ? Topology . IN_VERTEX_COLUMN_END : Topology . OUT_VERTEX_COLUMN_END ) ) ; } else { ListOrderedSet < String > identifiers = nextSchemaTableTree . getIdentifiers ( ) ; for ( String identifier : identifiers ) { if ( nextSchemaTableTree . isDistributed ( ) && nextSchemaTableTree . distributionColumn . equals ( identifier ) ) { columnList . add ( lastSchemaTable , identifier , nextSchemaTableTree . stepDepth , lastSchemaTable . getSchema ( ) + "." + lastSchemaTable . getTable ( ) + "." + identifier ) ; } else { columnList . add ( lastSchemaTable , nextSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + nextRawLabel + "." + identifier + ( nextSchemaTableTree . direction == Direction . OUT ? Topology . IN_VERTEX_COLUMN_END : Topology . OUT_VERTEX_COLUMN_END ) , nextSchemaTableTree . stepDepth , lastSchemaTable . getSchema ( ) + "." + lastSchemaTable . getTable ( ) + "." + nextSchemaTableTree . getSchemaTable ( ) . getSchema ( ) + "." + nextRawLabel + "." + identifier + ( nextSchemaTableTree . direction == Direction . OUT ? Topology . IN_VERTEX_COLUMN_END : Topology . OUT_VERTEX_COLUMN_END ) ) ; } } } constructAllLabeledFromClause ( distinctQueryStack , columnList ) ; constructEmitEdgeIdFromClause ( distinctQueryStack , columnList ) ; } } else if ( nextSchemaTableTree != null && lastSchemaTable . getTable ( ) . startsWith ( VERTEX_PREFIX ) ) { if ( lastSchemaTableTree . hasIDPrimaryKey ) { columnList . add ( lastSchemaTable , Topology . ID , nextSchemaTableTree . stepDepth , lastSchemaTable . getSchema ( ) + "." + lastSchemaTable . getTable ( ) + "." + Topology . ID ) ; } else { ListOrderedSet < String > identifiers = lastSchemaTableTree . getIdentifiers ( ) ; for ( String identifier : identifiers ) { columnList . add ( lastSchemaTable , identifier , nextSchemaTableTree . stepDepth , lastSchemaTable . getSchema ( ) + "." + lastSchemaTable . getTable ( ) + "." + identifier ) ; } } constructAllLabeledFromClause ( distinctQueryStack , columnList ) ; } // The last schemaTableTree in the call stack has no nextSchemaTableTree . // This last element ' s properties need to be returned , including all labeled properties for this path if ( nextSchemaTableTree == null ) { if ( ! dropStep && lastSchemaTableTree . getSchemaTable ( ) . isEdgeTable ( ) ) { printEdgeInOutVertexIdFromClauseFor ( sqlgGraph , firstSchemaTableTree , lastSchemaTableTree , columnList ) ; } if ( ! lastSchemaTableTree . hasLabels ( ) ) { lastSchemaTableTree . addLabel ( lastSchemaTableTree . getStepDepth ( ) + BaseStrategy . PATH_LABEL_SUFFIX + BaseStrategy . SQLG_PATH_TEMP_FAKE_LABEL ) ; } constructAllLabeledFromClause ( dropStep , distinctQueryStack , columnList ) ; constructEmitFromClause ( distinctQueryStack , columnList ) ; } return columnList . toString ( ) ;
public class ParaClient { /** * Refreshes the JWT access token . This requires a valid existing token . * Call { @ link # signIn ( java . lang . String , java . lang . String ) } first . * @ return true if token was refreshed */ protected boolean refreshToken ( ) { } }
long now = System . currentTimeMillis ( ) ; boolean notExpired = tokenKeyExpires != null && tokenKeyExpires > now ; boolean canRefresh = tokenKeyNextRefresh != null && ( tokenKeyNextRefresh < now || tokenKeyNextRefresh > tokenKeyExpires ) ; // token present and NOT expired if ( tokenKey != null && notExpired && canRefresh ) { Map < String , Object > result = getEntity ( invokeGet ( JWT_PATH , null ) , Map . class ) ; if ( result != null && result . containsKey ( "user" ) && result . containsKey ( "jwt" ) ) { Map < ? , ? > jwtData = ( Map < ? , ? > ) result . get ( "jwt" ) ; tokenKey = ( String ) jwtData . get ( "access_token" ) ; tokenKeyExpires = ( Long ) jwtData . get ( "expires" ) ; tokenKeyNextRefresh = ( Long ) jwtData . get ( "refresh" ) ; return true ; } else { clearAccessToken ( ) ; } } return false ;
public class ClusTree { /** * insert newPoint into the tree using the BreadthFirst strategy , i . e . : insert into * the closest entry in a leaf node . * @ param newPoint * @ param budget * @ param timestamp * @ return */ private Entry insertBreadthFirst ( ClusKernel newPoint , Budget budget , long timestamp ) { } }
// check all leaf nodes and get the one with the closest entry to newPoint Node bestFit = findBestLeafNode ( newPoint ) ; bestFit . makeOlder ( timestamp , negLambda ) ; Entry parent = bestFit . getEntries ( ) [ 0 ] . getParentEntry ( ) ; // Search for an Entry with a weight under the threshold . Entry irrelevantEntry = bestFit . getIrrelevantEntry ( this . weightThreshold ) ; int numFreeEntries = bestFit . numFreeEntries ( ) ; Entry newEntry = new Entry ( newPoint . getCenter ( ) . length , newPoint , timestamp , parent , bestFit ) ; // if there is space , add it to the node ( doesn ' t ever occur , since nodes are created with 3 entries ) if ( numFreeEntries > 0 ) { bestFit . addEntry ( newEntry , timestamp ) ; } // if outdated cluster in this best fitting node , replace it else if ( irrelevantEntry != null ) { irrelevantEntry . overwriteOldEntry ( newEntry ) ; } // if there is space / outdated cluster on path to top , split . Else merge without split else { if ( existsOutdatedEntryOnPath ( bestFit ) || ! this . hasMaximalSize ( ) ) { // We have to split . insertHereWithSplit ( newEntry , bestFit , timestamp ) ; } else { mergeEntryWithoutSplit ( bestFit , newEntry , timestamp ) ; } } // update all nodes on path to top . if ( bestFit . getEntries ( ) [ 0 ] . getParentEntry ( ) != null ) updateToTop ( bestFit . getEntries ( ) [ 0 ] . getParentEntry ( ) . getNode ( ) ) ; return null ;
public class GraphAnalysisLoader { /** * Finds the next { @ code ' $ ' } in a class name which can be changed to a { @ code ' . ' } when computing * a canonical class name . */ private static int nextDollar ( String className , CharSequence current , int searchStart ) { } }
while ( true ) { int index = className . indexOf ( '$' , searchStart ) ; if ( index == - 1 ) { return - 1 ; } // We ' ll never have two dots nor will a type name end or begin with dot . So no need to // consider dollars at the beginning , end , or adjacent to dots . if ( index == 0 || index == className . length ( ) - 1 || current . charAt ( index - 1 ) == '.' || current . charAt ( index + 1 ) == '.' ) { searchStart = index + 1 ; continue ; } return index ; }
public class Endpoint { /** * Creates a new { @ link Endpoint } instance with an additional path segment . * Typical usage is to add an ID to an endpoint path , which is why this method returns a new instance , rather than modifying the existing one . * This allows you add different ID at different times without affecting the original instance . * @ param segment The segment to be added . * @ return A copy of this instance , with the additional path segment . */ public Endpoint addPathSegment ( String segment ) { } }
Endpoint configured = new Endpoint ( this ) ; if ( StringUtils . isNotBlank ( segment ) ) { configured . path . add ( segment ) ; } return configured ;
public class VariantMetadataManager { /** * Retrieve the pedigree objects related to the input study ID . * @ param studyId Study ID * @ return List of Pedigree objects */ public List < Pedigree > getPedigree ( String studyId ) { } }
Member dest ; Map < String , Pedigree > pedigreeMap = new HashMap < > ( ) ; Map < String , Member > individualMap = new HashMap < > ( ) ; VariantStudyMetadata variantStudyMetadata = getVariantStudyMetadata ( studyId ) ; if ( variantStudyMetadata != null ) { // first loop for ( org . opencb . biodata . models . metadata . Individual src : variantStudyMetadata . getIndividuals ( ) ) { String pedigreeName = src . getFamily ( ) ; if ( ! pedigreeMap . containsKey ( pedigreeName ) ) { pedigreeMap . put ( pedigreeName , new Pedigree ( pedigreeName , new ArrayList < > ( ) , new HashMap < > ( ) ) ) ; } // main fields dest = new Member ( src . getId ( ) , Member . Sex . getEnum ( src . getSex ( ) ) , Member . AffectionStatus . getEnum ( src . getPhenotype ( ) ) ) ; // attributes if ( src . getSamples ( ) != null && src . getSamples ( ) . size ( ) > 0 ) { Map < String , String > annotation = src . getSamples ( ) . get ( 0 ) . getAnnotations ( ) ; if ( annotation != null ) { Map < String , Object > variables = new HashMap < > ( ) ; for ( String key : annotation . keySet ( ) ) { if ( key . equals ( INDIVIDUAL_ID ) || key . equals ( INDIVIDUAL_FAMILY ) || key . equals ( INDIVIDUAL_FATHER ) || key . equals ( INDIVIDUAL_MOTHER ) || key . equals ( INDIVIDUAL_SEX ) || key . equals ( INDIVIDUAL_PHENOTYPE ) ) { continue ; } String fields [ ] = key . split ( ":" ) ; if ( fields . length > 1 ) { switch ( fields [ 1 ] . toLowerCase ( ) ) { case "i" : variables . put ( fields [ 0 ] , Integer . parseInt ( annotation . get ( key ) ) ) ; break ; case "d" : variables . put ( fields [ 0 ] , Double . parseDouble ( annotation . get ( key ) ) ) ; break ; case "b" : variables . put ( fields [ 0 ] , Boolean . parseBoolean ( annotation . get ( key ) ) ) ; break ; default : variables . put ( fields [ 0 ] , annotation . get ( key ) ) ; } } else { variables . put ( fields [ 0 ] , annotation . get ( key ) ) ; } } dest . setAttributes ( variables ) ; } } pedigreeMap . get ( pedigreeName ) . getMembers ( ) . add ( dest ) ; individualMap . put ( pedigreeName + "_" + dest . getName ( ) , dest ) ; } // second loop : setting fathers , mothers , partners and children for ( org . opencb . biodata . models . metadata . Individual src : variantStudyMetadata . getIndividuals ( ) ) { // update father , mother and child Member father = individualMap . get ( src . getFamily ( ) + "_" + src . getFather ( ) ) ; Member mother = individualMap . get ( src . getFamily ( ) + "_" + src . getMother ( ) ) ; Member child = individualMap . get ( src . getFamily ( ) + "_" + src . getId ( ) ) ; // setting father and children if ( father != null ) { child . setFather ( father ) ; if ( father . getMultiples ( ) == null ) { Multiples multiples = new Multiples ( ) . setType ( "children" ) . setSiblings ( new ArrayList < > ( ) ) ; father . setMultiples ( multiples ) ; } father . getMultiples ( ) . getSiblings ( ) . add ( child . getName ( ) ) ; } // setting mother and children if ( mother != null ) { child . setMother ( mother ) ; if ( mother . getMultiples ( ) == null ) { Multiples multiples = new Multiples ( ) . setType ( "children" ) . setSiblings ( new ArrayList < > ( ) ) ; mother . setMultiples ( multiples ) ; } mother . getMultiples ( ) . getSiblings ( ) . add ( child . getName ( ) ) ; } } } // create the list of Pedigree objects from the map return new ArrayList < > ( pedigreeMap . values ( ) ) ;
public class CalendarCodeGenerator { /** * Build a method that will format a date and time ( named or skeleton ) in a * localized wrapper . */ private MethodSpec buildWrapperMethod ( Format format ) { } }
MethodSpec . Builder method = MethodSpec . methodBuilder ( "formatWrapped" ) . addAnnotation ( Override . class ) . addModifiers ( PUBLIC ) . addParameter ( CALENDAR_FORMAT , "wrapperType" ) . addParameter ( CALENDAR_FORMAT , "dateType" ) . addParameter ( CALENDAR_FORMAT , "timeType" ) . addParameter ( String . class , "dateSkel" ) . addParameter ( String . class , "timeSkel" ) . addParameter ( ZonedDateTime . class , "d" ) . addParameter ( StringBuilder . class , "b" ) ; Map < String , List < String > > map = deduplicateFormats ( format ) ; method . beginControlFlow ( "switch (wrapperType)" ) ; for ( Map . Entry < String , List < String > > entry : map . entrySet ( ) ) { for ( String type : entry . getValue ( ) ) { method . addCode ( "case $L:\n" , type ) ; } method . beginControlFlow ( "" ) ; addWrapper ( method , entry . getKey ( ) ) ; method . addStatement ( "break" ) ; method . endControlFlow ( ) ; } method . endControlFlow ( ) ; return method . build ( ) ;
public class ScriptRuntime { /** * Convert the value to an object . * See ECMA 9.9. */ public static Scriptable toObject ( Context cx , Scriptable scope , Object val ) { } }
if ( val == null ) { throw typeError0 ( "msg.null.to.object" ) ; } if ( Undefined . isUndefined ( val ) ) { throw typeError0 ( "msg.undef.to.object" ) ; } if ( isSymbol ( val ) ) { NativeSymbol result = new NativeSymbol ( ( NativeSymbol ) val ) ; setBuiltinProtoAndParent ( result , scope , TopLevel . Builtins . Symbol ) ; return result ; } if ( val instanceof Scriptable ) { return ( Scriptable ) val ; } if ( val instanceof CharSequence ) { // FIXME we want to avoid toString ( ) here , especially for concat ( ) NativeString result = new NativeString ( ( CharSequence ) val ) ; setBuiltinProtoAndParent ( result , scope , TopLevel . Builtins . String ) ; return result ; } if ( val instanceof Number ) { NativeNumber result = new NativeNumber ( ( ( Number ) val ) . doubleValue ( ) ) ; setBuiltinProtoAndParent ( result , scope , TopLevel . Builtins . Number ) ; return result ; } if ( val instanceof Boolean ) { NativeBoolean result = new NativeBoolean ( ( ( Boolean ) val ) . booleanValue ( ) ) ; setBuiltinProtoAndParent ( result , scope , TopLevel . Builtins . Boolean ) ; return result ; } // Extension : Wrap as a LiveConnect object . Object wrapped = cx . getWrapFactory ( ) . wrap ( cx , scope , val , null ) ; if ( wrapped instanceof Scriptable ) return ( Scriptable ) wrapped ; throw errorWithClassName ( "msg.invalid.type" , val ) ;
public class BerkeleyDbStorageProvider { /** * Cleans the parent directory of this storage provider . This action will * delete all previous collection storages made in this directory , and thus * it should only be invoked either before any collections has been made or * when all collections are ensured to be unused . */ public void cleanDirectory ( ) { } }
File [ ] directories = _parentDirectory . listFiles ( new FilenameFilter ( ) { @ Override public boolean accept ( File dir , String name ) { return name . startsWith ( DIRECTORY_PREFIX ) ; } } ) ; for ( File directory : directories ) { delete ( directory ) ; }
public class Try { /** * Creates a CompletionStage that , when it completes , will capture into a Try whether the given completionStage * was successful or not * @ param completionStage the completion stage that will complete * @ param < V > the value type * @ return a Try which is the result of the call */ public static < V > CompletionStage < Try < V > > tryStage ( CompletionStage < V > completionStage ) { } }
return completionStage . handle ( ( value , throwable ) -> { if ( throwable != null ) { return failed ( throwable ) ; } return succeeded ( value ) ; } ) ;
public class Expressions { /** * Create a new Template expression * @ param cl type of expression * @ param template template * @ param args template parameters * @ return template expression */ public static < T extends Enum < T > > EnumTemplate < T > enumTemplate ( Class < ? extends T > cl , Template template , Object ... args ) { } }
return enumTemplate ( cl , template , ImmutableList . copyOf ( args ) ) ;
public class RNAToAminoAcidTranslator { /** * Performs the trimming of stop codons and the conversion of a valid start * amino acid to M */ @ Override protected void postProcessCompoundLists ( List < List < AminoAcidCompound > > compoundLists ) { } }
for ( List < AminoAcidCompound > compounds : compoundLists ) { if ( trimStops ) { trimStop ( compounds ) ; } }
public class managed_device { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
managed_device_responses result = ( managed_device_responses ) service . get_payload_formatter ( ) . string_to_resource ( managed_device_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . managed_device_response_array ) ; } managed_device [ ] result_managed_device = new managed_device [ result . managed_device_response_array . length ] ; for ( int i = 0 ; i < result . managed_device_response_array . length ; i ++ ) { result_managed_device [ i ] = result . managed_device_response_array [ i ] . managed_device [ 0 ] ; } return result_managed_device ;
public class Index { /** * Get settings of this index * @ param requestOptions Options to pass to this request */ public JSONObject getSettings ( RequestOptions requestOptions ) throws AlgoliaException { } }
return client . getRequest ( "/1/indexes/" + encodedIndexName + "/settings?getVersion=2" , false , requestOptions ) ;
public class CountryCodeSelectorView { /** * Sets the current Country to the one given . * @ param country the country to set . */ public void setSelectedCountry ( @ NonNull Country country ) { } }
Log . d ( TAG , "Selected country changed to " + country . getDisplayName ( ) ) ; countryNameTextView . setText ( country . getDisplayName ( ) ) ; countryCodeTextView . setText ( country . getDialCode ( ) ) ; selectedCountry = country ;
public class StyleUtils { /** * Create new polyline options populated with the feature row style * @ param geoPackage GeoPackage * @ param featureRow feature row * @ param density display density : { @ link android . util . DisplayMetrics # density } * @ return polyline options populated with the feature style */ public static PolylineOptions createPolylineOptions ( GeoPackage geoPackage , FeatureRow featureRow , float density ) { } }
PolylineOptions polylineOptions = new PolylineOptions ( ) ; setFeatureStyle ( polylineOptions , geoPackage , featureRow , density ) ; return polylineOptions ;
public class BaseNeo4jAssociationQueries { /** * Example : MATCH ( owner : ENTITY : table { id : { 0 } } ) */ private static String initMatchOwnerEntityNode ( EntityKeyMetadata ownerEntityKeyMetadata ) { } }
StringBuilder queryBuilder = new StringBuilder ( ) ; appendMatchOwnerEntityNode ( queryBuilder , ownerEntityKeyMetadata ) ; return queryBuilder . toString ( ) ;
public class LookaheadChainingListener { /** * { @ inheritDoc } * @ since 2.0RC1 */ @ Override public void endDefinitionList ( Map < String , String > parameters ) { } }
this . previousEvents . endDefinitionList ( parameters ) ; firePreviousEvent ( ) ;
public class APIClient { /** * Copy an existing index . * @ param srcIndexName the name of index to copy . * @ param dstIndexName the new index name that will contains a copy of srcIndexName ( destination will be overriten if it already exist ) . * @ param scopes the list of scopes to copy */ public JSONObject copyIndex ( String srcIndexName , String dstIndexName , List < String > scopes ) throws AlgoliaException { } }
return operationOnIndex ( "copy" , srcIndexName , dstIndexName , scopes , RequestOptions . empty ) ;
public class NfsGetAttrResponse { /** * ( non - Javadoc ) * @ see com . emc . ecs . nfsclient . nfs . NfsResponseBase # unmarshalling ( com . emc . ecs . * nfsclient . rpc . Xdr ) */ public void unmarshalling ( Xdr xdr ) throws RpcException { } }
super . unmarshalling ( xdr ) ; if ( stateIsOk ( ) ) { unmarshallingAttributes ( xdr , true ) ; }
public class CollectionUtils { /** * Adapts the { @ link Enumeration } into an instance of the { @ link Iterator } interface . * @ param < T > Class type of the elements in the { @ link Enumeration } . * @ param enumeration { @ link Enumeration } to adapt into an { @ link Iterator } . * @ return an { @ link Iterator } implementation iterating over the elements in the { @ link Enumeration } . * @ see java . util . Collections # emptyIterator ( ) * @ see # asEnumeration ( java . util . Iterator ) * @ see java . util . Enumeration * @ see java . util . Iterator */ @ NullSafe public static < T > Iterator < T > asIterator ( Enumeration < T > enumeration ) { } }
return ( enumeration == null ? Collections . emptyIterator ( ) : new Iterator < T > ( ) { @ Override public boolean hasNext ( ) { return enumeration . hasMoreElements ( ) ; } @ Override public T next ( ) { return enumeration . nextElement ( ) ; } } ) ;
public class OpenTSDBMain { /** * The OpenTSDB fat - jar main entry point * @ param args See usage banner { @ link OpenTSDBMain # mainUsage ( PrintStream ) } */ public static void main ( String [ ] args ) { } }
log . info ( "Starting." ) ; log . info ( BuildData . revisionString ( ) ) ; log . info ( BuildData . buildString ( ) ) ; try { System . in . close ( ) ; // Release a FD we don ' t need . } catch ( Exception e ) { log . warn ( "Failed to close stdin" , e ) ; } if ( args . length == 0 ) { log . error ( "No command supplied" ) ; mainUsage ( System . err ) ; System . exit ( - 1 ) ; } // This is not normally needed since values passed on the CL are auto - trimmed , // but since the Main may be called programatically in some embedded scenarios , // let ' s save us some time and trim the values here . for ( int i = 0 ; i < args . length ; i ++ ) { args [ i ] = args [ i ] . trim ( ) ; } String targetTool = args [ 0 ] . toLowerCase ( ) ; if ( ! COMMANDS . containsKey ( targetTool ) ) { log . error ( "Command not recognized: [" + targetTool + "]" ) ; mainUsage ( System . err ) ; System . exit ( - 1 ) ; } process ( targetTool , shift ( args ) ) ;
public class EvalFuncDodcfg { /** * This subroutine computes the function psi ( t ) and the scaled * functions psi ' ( t ) / t and psi ' ' ( t ) / t for the optimal design * with composite materials problem . * @ param t * @ param mu1 * @ param mu2 * @ param t1 * @ param t2 * @ param option * @ param lambda * @ return */ public double dodcps ( double t , double mu1 , double mu2 , double t1 , double t2 , int option , double lambda ) { } }
double sqrtt ; double result = Double . NaN ; sqrtt = Math . sqrt ( t ) ; if ( option == 0 ) { if ( sqrtt <= t1 ) result = p5 * mu2 * t ; else if ( sqrtt > t1 && sqrtt < t2 ) result = mu2 * t1 * sqrtt - lambda * mu1 ; else if ( sqrtt >= t2 ) result = p5 * mu1 * t + lambda * ( mu2 - mu1 ) ; } else if ( option == 1 ) { if ( sqrtt <= t1 ) result = p5 * mu2 ; else if ( sqrtt > t1 && sqrtt < t2 ) result = p5 * mu2 * t1 / sqrtt ; else if ( sqrtt >= t2 ) result = p5 * mu1 ; } else if ( option == 2 ) { if ( sqrtt <= t1 ) result = zero ; else if ( sqrtt > t1 && sqrtt < t2 ) result = - p25 * mu2 * t1 / ( sqrtt * t ) ; else if ( sqrtt >= t2 ) result = zero ; } if ( Double . isNaN ( result ) ) { throw new RuntimeException ( "Bad" ) ; } return result ;
public class CompareComply { /** * Submit a batch - processing request . * Run Compare and Comply methods over a collection of input documents . * * * Important : * * Batch processing requires the use of the [ IBM Cloud Object Storage * service ] ( https : / / cloud . ibm . com / docs / services / cloud - object - storage / about - cos . html # about - ibm - cloud - object - storage ) . * The use of IBM Cloud Object Storage with Compare and Comply is discussed at [ Using batch * processing ] ( https : / / cloud . ibm . com / docs / services / compare - comply / batching . html # before - you - batch ) . * @ param createBatchOptions the { @ link CreateBatchOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link BatchStatus } */ public ServiceCall < BatchStatus > createBatch ( CreateBatchOptions createBatchOptions ) { } }
Validator . notNull ( createBatchOptions , "createBatchOptions cannot be null" ) ; String [ ] pathSegments = { "v1/batches" } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments ) ) ; builder . query ( "version" , versionDate ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "compare-comply" , "v1" , "createBatch" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; builder . query ( "function" , createBatchOptions . function ( ) ) ; if ( createBatchOptions . model ( ) != null ) { builder . query ( "model" , createBatchOptions . model ( ) ) ; } MultipartBody . Builder multipartBuilder = new MultipartBody . Builder ( ) ; multipartBuilder . setType ( MultipartBody . FORM ) ; RequestBody inputCredentialsFileBody = RequestUtils . inputStreamBody ( createBatchOptions . inputCredentialsFile ( ) , "application/json" ) ; multipartBuilder . addFormDataPart ( "input_credentials_file" , "filename" , inputCredentialsFileBody ) ; multipartBuilder . addFormDataPart ( "input_bucket_location" , createBatchOptions . inputBucketLocation ( ) ) ; multipartBuilder . addFormDataPart ( "input_bucket_name" , createBatchOptions . inputBucketName ( ) ) ; RequestBody outputCredentialsFileBody = RequestUtils . inputStreamBody ( createBatchOptions . outputCredentialsFile ( ) , "application/json" ) ; multipartBuilder . addFormDataPart ( "output_credentials_file" , "filename" , outputCredentialsFileBody ) ; multipartBuilder . addFormDataPart ( "output_bucket_location" , createBatchOptions . outputBucketLocation ( ) ) ; multipartBuilder . addFormDataPart ( "output_bucket_name" , createBatchOptions . outputBucketName ( ) ) ; builder . body ( multipartBuilder . build ( ) ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( BatchStatus . class ) ) ;
public class ElasticsearchRestClientFactoryBean { /** * Init mapping if needed . * < p > Note that you can force to reinit mapping using { @ link # setForceMapping ( boolean ) } */ private void initMappings ( ) throws Exception { } }
checkClient ( ) ; // We extract indexes and mappings to manage from mappings definition if ( mappings != null && mappings . length > 0 ) { Map < String , Collection < String > > indices = getIndexMappings ( mappings ) ; // Let ' s initialize indexes and mappings if needed for ( String index : indices . keySet ( ) ) { createIndex ( client . getLowLevelClient ( ) , classpathRoot , index , forceMapping ) ; if ( mergeSettings ) { updateSettings ( client . getLowLevelClient ( ) , classpathRoot , index ) ; } createMapping ( client , classpathRoot , index , mergeMapping ) ; } }
public class StringParser { /** * Parse the given { @ link String } as { @ link BigDecimal } . * @ param sStr * The String to parse . May be < code > null < / code > . * @ param nScale * The scaling ( decimal places ) to be used for the result . Must be & ge ; * @ param eRoundingMode * The rounding mode to be used to achieve the scale . May not be * < code > null < / code > . * @ return < code > null < / code > if the string does not represent a valid value . */ @ Nullable public static BigDecimal parseBigDecimal ( @ Nullable final String sStr , @ Nonnegative final int nScale , @ Nonnull final RoundingMode eRoundingMode ) { } }
return parseBigDecimal ( sStr , nScale , eRoundingMode , null ) ;
public class JobApi { /** * Get a list of jobs in a project . * < pre > < code > GitLab Endpoint : GET / projects / : id / jobs < / code > < / pre > * @ param projectIdOrPath id , path of the project , or a Project instance holding the project ID or path to get the jobs for * @ param scope the scope of jobs , one of : CREATED , PENDING , RUNNING , FAILED , SUCCESS , CANCELED , SKIPPED , MANUAL * @ param itemsPerPage the number of Job instances that will be fetched per page * @ return a list containing the jobs for the specified project ID * @ throws GitLabApiException if any exception occurs during execution */ public Pager < Job > getJobs ( Object projectIdOrPath , JobScope scope , int itemsPerPage ) throws GitLabApiException { } }
GitLabApiForm formData = new GitLabApiForm ( ) . withParam ( "scope" , scope ) ; return ( new Pager < Job > ( this , Job . class , itemsPerPage , formData . asMap ( ) , "projects" , getProjectIdOrPath ( projectIdOrPath ) , "jobs" ) ) ;
public class MvelExpression { /** * { @ inheritDoc } */ public T evaluate ( Object target , Map < String , Object > variables ) { } }
return ( T ) MVEL . executeExpression ( getCompiledExpression ( ) , target , variables ) ;
public class AtomicDoubleArray { /** * Atomically adds the given value to the element at index { @ code i } . * @ param i the index * @ param delta the value to add * @ return the previous value */ public double getAndAdd ( int i , double delta ) { } }
while ( true ) { double orig = get ( i ) ; double newVal = orig + delta ; if ( compareAndSet ( i , orig , newVal ) ) return orig ; }
public class InternalLocaleBuilder { /** * Reset Builder ' s internal state with the given language tag */ public InternalLocaleBuilder setLanguageTag ( LanguageTag langtag ) { } }
clear ( ) ; if ( langtag . getExtlangs ( ) . size ( ) > 0 ) { _language = langtag . getExtlangs ( ) . get ( 0 ) ; } else { String language = langtag . getLanguage ( ) ; if ( ! language . equals ( LanguageTag . UNDETERMINED ) ) { _language = language ; } } _script = langtag . getScript ( ) ; _region = langtag . getRegion ( ) ; List < String > bcpVariants = langtag . getVariants ( ) ; if ( bcpVariants . size ( ) > 0 ) { StringBuilder var = new StringBuilder ( bcpVariants . get ( 0 ) ) ; for ( int i = 1 ; i < bcpVariants . size ( ) ; i ++ ) { var . append ( BaseLocale . SEP ) . append ( bcpVariants . get ( i ) ) ; } _variant = var . toString ( ) ; } setExtensions ( langtag . getExtensions ( ) , langtag . getPrivateuse ( ) ) ; return this ;
public class OAuth20Utils { /** * Gets response mode type . * @ param context the context * @ return the response type */ public static OAuth20ResponseModeTypes getResponseModeType ( final J2EContext context ) { } }
val responseType = context . getRequestParameter ( OAuth20Constants . RESPONSE_MODE ) ; val type = Arrays . stream ( OAuth20ResponseModeTypes . values ( ) ) . filter ( t -> t . getType ( ) . equalsIgnoreCase ( responseType ) ) . findFirst ( ) . orElse ( OAuth20ResponseModeTypes . NONE ) ; LOGGER . debug ( "OAuth response type is [{}]" , type ) ; return type ;
public class TaxinvoiceServiceImp { /** * / * ( non - Javadoc ) * @ see com . popbill . api . TaxinvoiceService # sendEmail ( java . lang . String , com . popbill . api . taxinvoice . MgtKeyType , java . lang . String , java . lang . String ) */ @ Override public Response sendEmail ( String CorpNum , MgtKeyType KeyType , String MgtKey , String Receiver ) throws PopbillException { } }
return sendEmail ( CorpNum , KeyType , MgtKey , Receiver , null ) ;
public class BeanParserFactory { /** * 获取 { @ link BeanParser } */ public static BeanParser getBeanParser ( Method method ) { } }
Type returnType = method . getReturnType ( ) ; if ( isCompletableFutureType ( method . getReturnType ( ) ) ) { returnType = getActualReturnType ( method ) ; } BeanParser beanParser = typeBeanParserMap . get ( returnType ) ; if ( beanParser != null ) { return beanParser ; } return defaultBeanParser ;
public class PermissionsBuilder { /** * Sets the state for the given types to { @ link PermissionState # UNSET } . * @ param types The types to change . * @ return The current instance in order to chain call methods . */ public PermissionsBuilder setUnset ( PermissionType ... types ) { } }
for ( PermissionType type : types ) { setState ( type , PermissionState . UNSET ) ; } return this ;
public class StorePackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getDatabaseInformationItem ( ) { } }
if ( databaseInformationItemEClass == null ) { databaseInformationItemEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 26 ) ; } return databaseInformationItemEClass ;
public class Syslog { /** * destroyInstance ( ) gracefully shuts down the specified Syslog protocol and * removes the instance from Syslog4j . * @ param protocol - the Syslog protocol to destroy * @ throws SyslogRuntimeException */ public synchronized static final void destroyInstance ( String protocol ) throws SyslogRuntimeException { } }
if ( protocol == null || "" . equals ( protocol . trim ( ) ) ) { return ; } String _protocol = protocol . toLowerCase ( ) ; if ( instances . containsKey ( _protocol ) ) { SyslogUtility . sleep ( SyslogConstants . THREAD_LOOP_INTERVAL_DEFAULT ) ; SyslogIF syslog = ( SyslogIF ) instances . get ( _protocol ) ; try { syslog . shutdown ( ) ; } finally { instances . remove ( _protocol ) ; } } else { throwRuntimeException ( "Cannot destroy protocol \"" + protocol + "\" instance; call shutdown instead" ) ; return ; }
public class SwingApplication { /** * Popup menu will become invisible popup menu listener . * @ param consumer the consumer * @ return the popup menu listener */ public static PopupMenuListener popupMenuWillBecomeInvisible ( @ NonNull Consumer < PopupMenuEvent > consumer ) { } }
return new PopupMenuListener ( ) { @ Override public void popupMenuCanceled ( PopupMenuEvent e ) { } @ Override public void popupMenuWillBecomeInvisible ( PopupMenuEvent e ) { consumer . accept ( e ) ; } @ Override public void popupMenuWillBecomeVisible ( PopupMenuEvent e ) { } } ;
public class LicenseResolver { /** * Attempts to perform high - confidence license resolution with unstructured text as input . * @ param licenseString the license string ( not the actual license text ) * @ return a LicenseChoice object if resolved , otherwise null */ private static LicenseChoice resolveViaAlternativeMapping ( String licenseString ) { } }
if ( licenseString == null ) { return null ; } try { for ( final Map . Entry < String , List < String > > mapping : mappings . entrySet ( ) ) { final List < String > names = mapping . getValue ( ) ; if ( names != null ) { for ( final String name : names ) { if ( licenseString . equalsIgnoreCase ( name ) ) { return resolveSpdxLicenseString ( mapping . getKey ( ) ) ; } } } } } catch ( InvalidLicenseStringException e ) { // throw it away } return null ;
public class QueryUtil { /** * return the value at the given position ( row ) , returns the default empty value ( " " or null ) for * wrong row or null values . this method only exist for backward compatibility and should not be * used for new functinality * @ param column * @ param row * @ return * @ deprecated use instead QueryColumn . get ( int , Object ) */ @ Deprecated public static Object getValue ( QueryColumn column , int row ) { } }
// print . ds ( ) ; if ( NullSupportHelper . full ( ) ) return column . get ( row , null ) ; Object v = column . get ( row , "" ) ; return v == null ? "" : v ;
public class SegmentIndexBufferFileIO { /** * Writes to the specified segment index buffer file . * @ param sib - the segment index buffer * @ param sibFile - the segment index buffer file to write to * @ throws IOException */ @ Override public int write ( SegmentIndexBuffer sib , File sibFile ) throws IOException { } }
create ( sibFile ) ; RandomAccessFile raf = new RandomAccessFile ( sibFile , "rw" ) ; FileChannel channel = raf . getChannel ( ) ; writeVersion ( channel ) ; int length = sib . write ( channel ) ; length += STORAGE_VERSION_LENGTH ; raf . setLength ( length ) ; channel . force ( true ) ; channel . close ( ) ; raf . close ( ) ; if ( _logger . isTraceEnabled ( ) ) { _logger . trace ( "write " + sibFile . getAbsolutePath ( ) ) ; } return length ;
public class ImageModerationsImpl { /** * Fuzzily match an image against one of your custom Image Lists . You can create and manage your custom image lists using & lt ; a href = " / docs / services / 578ff44d2703741568569ab9 / operations / 578ff7b12703741568569abe " & gt ; this & lt ; / a & gt ; API . * Returns ID and tags of matching image . & lt ; br / & gt ; * & lt ; br / & gt ; * Note : Refresh Index must be run on the corresponding Image List before additions and removals are reflected in the response . * @ param imageStream The image file . * @ param matchFileInputOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the MatchResponse object */ public Observable < MatchResponse > matchFileInputAsync ( byte [ ] imageStream , MatchFileInputOptionalParameter matchFileInputOptionalParameter ) { } }
return matchFileInputWithServiceResponseAsync ( imageStream , matchFileInputOptionalParameter ) . map ( new Func1 < ServiceResponse < MatchResponse > , MatchResponse > ( ) { @ Override public MatchResponse call ( ServiceResponse < MatchResponse > response ) { return response . body ( ) ; } } ) ;
public class GeometryPainter { /** * The actual painting function . Draws the circles with the object ' s id . * @ param paintable * A { @ link org . geomajas . gwt . client . gfx . paintable . Text } object . * @ param group * The group where the object resides in ( optional ) . * @ param context * A MapContext object , responsible for actual drawing . */ public void paint ( Paintable paintable , Object group , MapContext context ) { } }
if ( paintable != null ) { GfxGeometry gfxGeometry = ( GfxGeometry ) paintable ; Geometry geometry = gfxGeometry . getGeometry ( ) ; ShapeStyle shapeStyle = gfxGeometry . getStyle ( ) ; if ( geometry instanceof LineString ) { context . getVectorContext ( ) . drawLine ( group , gfxGeometry . getId ( ) , ( LineString ) geometry , shapeStyle ) ; } else if ( geometry instanceof MultiLineString ) { MultiLineString m = ( MultiLineString ) geometry ; String gfxId = gfxGeometry . getId ( ) ; GraphicsContext gc = context . getVectorContext ( ) ; for ( int i = 0 ; i < m . getNumGeometries ( ) ; i ++ ) { gc . drawLine ( group , gfxId + "." + i , ( LineString ) m . getGeometryN ( i ) , shapeStyle ) ; } } else if ( geometry instanceof Polygon ) { context . getVectorContext ( ) . drawPolygon ( group , gfxGeometry . getId ( ) , ( Polygon ) geometry , shapeStyle ) ; } else if ( geometry instanceof MultiPolygon ) { MultiPolygon m = ( MultiPolygon ) geometry ; String gfxId = gfxGeometry . getId ( ) ; GraphicsContext gc = context . getVectorContext ( ) ; for ( int i = 0 ; i < m . getNumGeometries ( ) ; i ++ ) { gc . drawPolygon ( group , gfxId + "." + i , ( Polygon ) m . getGeometryN ( i ) , shapeStyle ) ; } } else if ( geometry instanceof Point ) { context . getVectorContext ( ) . drawSymbolDefinition ( group , gfxGeometry . getId ( ) + ".def" , gfxGeometry . getSymbolInfo ( ) , shapeStyle , null ) ; context . getVectorContext ( ) . drawSymbol ( group , gfxGeometry . getId ( ) , geometry . getCoordinate ( ) , shapeStyle , gfxGeometry . getId ( ) + ".def" ) ; } else if ( geometry instanceof MultiPoint ) { Coordinate [ ] coordinates = geometry . getCoordinates ( ) ; String gfxId = gfxGeometry . getId ( ) ; GraphicsContext gc = context . getVectorContext ( ) ; String styleTypeDef = gfxGeometry . getId ( ) + ".def" ; context . getVectorContext ( ) . drawSymbolDefinition ( group , styleTypeDef , gfxGeometry . getSymbolInfo ( ) , shapeStyle , null ) ; for ( int i = 0 ; i < coordinates . length ; i ++ ) { gc . drawSymbol ( group , gfxId + "." + i , coordinates [ i ] , shapeStyle , styleTypeDef ) ; } } }
public class FileUtil { /** * Filters files based on their file extension . * @ return Object for filtering files */ public static FileFilter getFileFilter ( ) { } }
return new FileFilter ( ) { @ Override public boolean accept ( File pathname ) { // Accept if input is a non - hidden file with registered extension // or if a non - hidden and not - ignored directory return ! pathname . isHidden ( ) && ( pathname . isFile ( ) && Engines . getRecognizedExtensions ( ) . contains ( fileExt ( pathname ) ) ) || ( directoryOnlyIfNotIgnored ( pathname ) ) ; } } ;
public class KeywordUtils { /** * Retrieves the given keyword ' s parameter description . * @ param keyword keyword name * @ param context Spring application context * @ param beanMap keyword name to bean name mapping * @ return the parameter description string of the given keyword , or { " * args " } if unavailable . */ public static String [ ] getParameters ( String keyword , ApplicationContext context , Map < String , String > beanMap ) { } }
KeywordInfo keywordInfo = getKeywordInfo ( keyword , context , beanMap ) ; return keywordInfo == null ? DEFAULT_PARAMS : keywordInfo . parameters ( ) ;
public class OCommandExecutorSQLDropClass { /** * Execute the DROP CLASS . */ public Object execute ( final Map < Object , Object > iArgs ) { } }
if ( className == null ) throw new OCommandExecutionException ( "Cannot execute the command because it has not been parsed yet" ) ; final ODatabaseRecord database = getDatabase ( ) ; final OClass oClass = database . getMetadata ( ) . getSchema ( ) . getClass ( className ) ; if ( oClass == null ) return null ; for ( final OIndex < ? > oIndex : oClass . getClassIndexes ( ) ) { database . getMetadata ( ) . getIndexManager ( ) . dropIndex ( oIndex . getName ( ) ) ; } final OClass superClass = oClass . getSuperClass ( ) ; final int [ ] clustersToIndex = oClass . getPolymorphicClusterIds ( ) ; final String [ ] clusterNames = new String [ clustersToIndex . length ] ; for ( int i = 0 ; i < clustersToIndex . length ; i ++ ) { clusterNames [ i ] = database . getClusterNameById ( clustersToIndex [ i ] ) ; } final int clusterId = oClass . getDefaultClusterId ( ) ; ( ( OSchemaProxy ) database . getMetadata ( ) . getSchema ( ) ) . dropClassInternal ( className ) ; ( ( OSchemaProxy ) database . getMetadata ( ) . getSchema ( ) ) . saveInternal ( ) ; database . getMetadata ( ) . getSchema ( ) . reload ( ) ; deleteDefaultCluster ( clusterId ) ; if ( superClass == null ) return true ; for ( final OIndex < ? > oIndex : superClass . getIndexes ( ) ) { for ( final String clusterName : clusterNames ) oIndex . getInternal ( ) . removeCluster ( clusterName ) ; OLogManager . instance ( ) . info ( "Index %s is used in super class of %s and should be rebuilt." , oIndex . getName ( ) , className ) ; oIndex . rebuild ( ) ; } return true ;
public class Frustum { /** * Sets the planes and bounding box of the frustum based on its vertices . */ protected void updateDerivedState ( ) { } }
_planes [ 0 ] . fromPoints ( _vertices [ 0 ] , _vertices [ 1 ] , _vertices [ 2 ] ) ; // near _planes [ 1 ] . fromPoints ( _vertices [ 5 ] , _vertices [ 4 ] , _vertices [ 7 ] ) ; // far _planes [ 2 ] . fromPoints ( _vertices [ 1 ] , _vertices [ 5 ] , _vertices [ 6 ] ) ; // left _planes [ 3 ] . fromPoints ( _vertices [ 4 ] , _vertices [ 0 ] , _vertices [ 3 ] ) ; // right _planes [ 4 ] . fromPoints ( _vertices [ 3 ] , _vertices [ 2 ] , _vertices [ 6 ] ) ; // top _planes [ 5 ] . fromPoints ( _vertices [ 4 ] , _vertices [ 5 ] , _vertices [ 1 ] ) ; // bottom _bounds . fromPoints ( _vertices ) ;
public class AmebaFeature { /** * < p > subscribeSystemEvent . < / p > * @ param eventClass a { @ link java . lang . Class } object . * @ param listenerClass a { @ link java . lang . Class } object . * @ return a { @ link ameba . event . Listener } object . * @ since 0.1.6e * @ param < E > a E object . */ protected < E extends Event > Listener subscribeSystemEvent ( Class < E > eventClass , final Class < ? extends Listener < E > > listenerClass ) { } }
Listener < E > listener = locator . createAndInitialize ( listenerClass ) ; SystemEventBus . subscribe ( eventClass , listener ) ; return listener ;
public class MalisisCore { /** * Pre - initialization event * @ param event the event */ @ EventHandler public void preInit ( FMLPreInitializationEvent event ) { } }
asmDataTable = event . getAsmData ( ) ; autoLoadClasses ( ) ; // register this to the EVENT _ BUS for onGuiClose ( ) MinecraftForge . EVENT_BUS . register ( this ) ; settings = new MalisisCoreSettings ( event . getSuggestedConfigurationFile ( ) ) ; Registries . processFMLStateEvent ( event ) ;
public class ValidationResult { /** * Adds a validation result to this . * @ param validation the validation to add */ public void add ( ValidationResult validation ) { } }
correct &= validation . correct ; if ( ! validate ) return ; errors . addAll ( validation . errors ) ; warnings . addAll ( validation . warnings ) ;
public class ReferenceCountingResourceHolder { /** * Decrements the reference count by 1 . If it reaches to 0 , then closes { @ link # closer } . */ @ Override public void close ( ) { } }
if ( closed . compareAndSet ( false , true ) ) { decrement ( ) ; } else { log . warn ( new ISE ( "Already closed" ) , "Already closed" ) ; }
public class ErrorReporter { /** * For CompilationFailedException . */ protected void report ( CompilationFailedException e , boolean child ) { } }
println ( e . toString ( ) ) ; stacktrace ( e , false ) ;
public class JobUploader { /** * Convenience override for int ids . * @ param finalJobFolderPath * @ return * @ throws IOException */ public JobFolder createJobFolder ( final String finalJobFolderPath ) throws IOException { } }
LOG . log ( Level . FINE , "Final job submission Directory: " + finalJobFolderPath ) ; return new JobFolder ( this . fileSystem , new Path ( finalJobFolderPath ) ) ;
public class AsynchronousRequest { /** * For more info on transactions API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / commerce / transactions " > here < / a > < br / > * Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions * @ param API API key * @ param time current | History * @ param type buy | sell * @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) } * @ throws GuildWars2Exception invalid API key * @ throws NullPointerException if given { @ link Callback } is empty * @ see Transaction transaction info */ public void getTPTransaction ( String API , Transaction . Time time , Transaction . Type type , Callback < List < Transaction > > callback ) throws GuildWars2Exception , NullPointerException { } }
isParamValid ( new ParamChecker ( ParamType . API , API ) ) ; if ( time == null || type == null ) throw new GuildWars2Exception ( ErrorCode . TransTime , "Transaction time/type cannot be empty" ) ; gw2API . getTPTransaction ( time . getValue ( ) , type . getValue ( ) , API ) . enqueue ( callback ) ;
public class ShapeAppearanceModel { /** * Sets the corner treatment for the top - left corner . * @ param cornerFamily the family to use to create the corner treatment * @ param cornerSize the size to use to create the corner treatment */ public void setTopLeftCorner ( @ CornerFamily int cornerFamily , @ Dimension int cornerSize ) { } }
setTopLeftCorner ( MaterialShapeUtils . createCornerTreatment ( cornerFamily , cornerSize ) ) ;
public class AbstractBaseProcessLauncher { /** * Shuts down the instance represented by this launcher . Uses the { @ link ProcessHandlerFactory } to find sub * processes . */ public void shutdown ( ) { } }
shutdownCalled = true ; if ( isRunning ( ) ) { watchdog . waitForProcessStarted ( ) ; watchdog . destroyProcess ( ) ; watchdog . waitForTerminationAfterDestroy ( 2 , SECONDS ) ; if ( isRunning ( ) ) { watchdog . destroyProcessForcefully ( ) ; watchdog . waitForTerminationAfterDestroy ( 1 , SECONDS ) ; if ( isRunning ( ) ) { LOGGER . severe ( String . format ( "Unable to kill process with PID %s" , watchdog . getProcessId ( ) ) ) ; } } } // if shutdown ( ) was called by something other than the shutdown hook , we don ' t need the shutdown hook anymore try { if ( shutDownHook != null ) { Runtime . getRuntime ( ) . removeShutdownHook ( shutDownHook ) ; } } catch ( IllegalStateException e ) { // ignore . . happens when the shutdown hook is in use , that ' s okay }
public class ClassLoaderResolver { /** * This method selects the best classloader instance to be used for * class / resource loading by whoever calls this method . The decision * typically involves choosing between the caller ' s current , thread context , * system , and other classloaders in the JVM and is made by the { @ link IClassLoadStrategy } * instance established by the last call to { @ link # setStrategy } . * @ return classloader to be used by the caller [ ' null ' indicates the * primordial loader ] */ public static synchronized ClassLoader getClassLoader ( ) { } }
final Class caller = getCallerClass ( 0 ) ; final ClassLoadContext ctx = new ClassLoadContext ( caller ) ; return s_strategy . getClassLoader ( ctx ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getMFCMFCScpe ( ) { } }
if ( mfcmfcScpeEEnum == null ) { mfcmfcScpeEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 49 ) ; } return mfcmfcScpeEEnum ;
public class XlsUtil { /** * workbook转换为list * @ param config 配置 * @ param wb excel * @ return * @ throws Exception */ public static List < ? > workbook2List ( ExcelConfig config , Workbook wb ) throws Exception { } }
String [ ] names = config . getNames ( ) ; String [ ] types = config . getTypes ( ) ; Field key = config . getKey ( ) ; List < Object > list = new ArrayList < Object > ( ) ; Sheet sheet = wb . getSheet ( config . getSheetNum ( ) ) ; int length = sheet . getColumns ( ) < names . length ? sheet . getColumns ( ) : names . length ; // 计算行数 int rowLength = sheet . getRows ( ) < config . getMaxRow ( ) ? sheet . getRows ( ) : ( config . getMaxRow ( ) > 0 ? ( config . getMaxRow ( ) + config . getStartRow ( ) ) : sheet . getRows ( ) ) ; for ( int i = config . getStartRow ( ) ; i < rowLength ; i ++ ) { // Map类型要特殊处理 Class clazz = Class . forName ( config . getClazz ( ) ) ; Object obj = null ; if ( Map . class . isAssignableFrom ( clazz ) ) { obj = new HashMap ( ) ; } else { obj = clazz . newInstance ( ) ; } for ( int j = 0 ; j < length ; j ++ ) { setValue ( obj , names [ j ] , types [ j ] , sheet . getCell ( j , i ) ) ; } // checkKey if ( key != null ) { // 当主键为空时 , 不在继续读取excel if ( key . get ( obj ) == null || "" . equals ( String . valueOf ( key . get ( obj ) ) ) ) { break ; } } list . add ( obj ) ; } return list ;
public class AmebaFeature { /** * < p > subscribeSystemEvent . < / p > * @ param eventClass a { @ link java . lang . Class } object . * @ param listener a { @ link ameba . event . Listener } object . * @ since 0.1.6e * @ param < E > a E object . */ protected < E extends Event > void subscribeSystemEvent ( Class < E > eventClass , final Listener < E > listener ) { } }
locator . inject ( listener ) ; locator . postConstruct ( listener ) ; SystemEventBus . subscribe ( eventClass , listener ) ;
public class ComponentUtils { /** * Algorithm works as follows ; * - If it ' s an input component , submitted value is checked first since it ' d be the value to be used in case validation errors * terminates jsf lifecycle * - Finally the value of the component is retrieved from backing bean and if there ' s a converter , converted value is returned * @ param context FacesContext instance * @ param component UIComponent instance whose value will be returned * @ param value The value of UIComponent if already evaluated outside . E . g . in the renderer . * @ return End text */ public static String getValueToRender ( FacesContext context , UIComponent component , Object value ) { } }
if ( component instanceof ValueHolder ) { if ( component instanceof EditableValueHolder ) { EditableValueHolder input = ( EditableValueHolder ) component ; Object submittedValue = input . getSubmittedValue ( ) ; PrimeConfiguration config = PrimeApplicationContext . getCurrentInstance ( context ) . getConfig ( ) ; if ( config . isInterpretEmptyStringAsNull ( ) && submittedValue == null && ! input . isLocalValueSet ( ) && context . isValidationFailed ( ) && ! input . isValid ( ) ) { return null ; } else if ( submittedValue != null ) { return submittedValue . toString ( ) ; } } ValueHolder valueHolder = ( ValueHolder ) component ; if ( value == UNDEFINED_VALUE ) { value = valueHolder . getValue ( ) ; } // format the value as string if ( value != null ) { Converter converter = valueHolder . getConverter ( ) ; if ( converter == null ) { Class valueType = value . getClass ( ) ; if ( valueType == String . class && ! PrimeApplicationContext . getCurrentInstance ( context ) . getConfig ( ) . isStringConverterAvailable ( ) ) { return ( String ) value ; } converter = context . getApplication ( ) . createConverter ( valueType ) ; } if ( converter != null ) { return converter . getAsString ( context , component , value ) ; } else { return value . toString ( ) ; // Use toString as a fallback if there is no explicit or implicit converter } } else { // component is a value holder but has no value return null ; } } // component it not a value holder return null ;
public class CmpUtil { /** * Lexicographically compares two { @ link Iterable } s , whose element types are comparable . */ public static < U extends Comparable < ? super U > > int lexCompare ( Iterable < ? extends U > o1 , Iterable < ? extends U > o2 ) { } }
Iterator < ? extends U > it1 = o1 . iterator ( ) , it2 = o2 . iterator ( ) ; while ( it1 . hasNext ( ) && it2 . hasNext ( ) ) { int cmp = it1 . next ( ) . compareTo ( it2 . next ( ) ) ; if ( cmp != 0 ) { return cmp ; } } if ( it1 . hasNext ( ) ) { return 1 ; } else if ( it2 . hasNext ( ) ) { return - 1 ; } return 0 ;
public class PackedSpriteSheet { /** * Get a single named sprite from the sheet * @ param name The name of the sprite to retrieve * @ return The sprite requested ( image of ) */ public Image getSprite ( String name ) { } }
Section section = ( Section ) sections . get ( name ) ; if ( section == null ) { throw new RuntimeException ( "Unknown sprite from packed sheet: " + name ) ; } return image . getSubImage ( section . x , section . y , section . width , section . height ) ;
public class WordVectorSerializer { /** * This method restores ParagraphVectors model previously saved with writeParagraphVectors ( ) * @ return */ public static ParagraphVectors readParagraphVectors ( InputStream stream ) throws IOException { } }
File tmpFile = DL4JFileUtils . createTempFile ( "restore" , "paravec" ) ; try { FileUtils . copyInputStreamToFile ( stream , tmpFile ) ; return readParagraphVectors ( tmpFile ) ; } finally { tmpFile . delete ( ) ; }
public class ScriptableUtils { /** * Deep - hash of an object . * < em > DOES NOT DEAL WITH CIRCULAR REFERENCES ! < / em > * @ param jsObj * @ return { @ code o1 } ' s hash code . */ public static int jsHash ( Object jsObj ) { } }
if ( jsObj == null ) { return 1 ; } // Concatenated strings in Rhino have a different type . We need to manually // resolve to String semantics , which is what the following lines do . if ( jsObj instanceof ConsString ) { return jsObj . toString ( ) . hashCode ( ) ; } return ( jsObj instanceof ScriptableObject ) ? jsScriptableObjectHashCode ( ( ScriptableObject ) jsObj ) : jsObj . hashCode ( ) ;
public class DefaultEntityManager { /** * Returns the effective namespace . If a namespace was specified using { @ link Tenant } , it will be * returned . Otherwise , the namespace of this EntityManager is returned . * @ return the effective namespace . */ String getEffectiveNamespace ( ) { } }
String namespace = Tenant . getNamespace ( ) ; if ( namespace == null ) { namespace = datastore . getOptions ( ) . getNamespace ( ) ; } return namespace ;
public class ClearIdentityVisitor { /** * Visits any Visitable object implementing the Identifiable interface , clearing the Identifiable objects identifier . * @ param visitable the Visitable object visited by this Visitor . * @ see org . cp . elements . lang . Identifiable # setId ( Comparable ) * @ see org . cp . elements . lang . Visitable */ @ Override @ SuppressWarnings ( "unchecked" ) public void visit ( final Visitable visitable ) { } }
if ( visitable instanceof Identifiable ) { ( ( Identifiable ) visitable ) . setId ( null ) ; }
public class CmsPushButton { /** * Disables the button and changes the button title attribute to the disabled reason . < p > * @ param disabledReason the disabled reason */ public void disable ( String disabledReason ) { } }
setDown ( false ) ; setEnabled ( false ) ; m_disabledReason = disabledReason ; super . setTitle ( disabledReason ) ;
public class NumberList { /** * unchecked but we know we can go from Map < String , Long > to Map < String , Object > */ @ SuppressWarnings ( "unchecked" ) public static NumberList delta ( NumberList currentMap , NumberList previousMap ) { } }
return delta ( ( Map ) currentMap . numbers , ( Map ) previousMap . numbers ) ;
public class CPDefinitionSpecificationOptionValueUtil { /** * Returns the first cp definition specification option value in the ordered set where CPOptionCategoryId = & # 63 ; . * @ param CPOptionCategoryId the cp option category ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp definition specification option value * @ throws NoSuchCPDefinitionSpecificationOptionValueException if a matching cp definition specification option value could not be found */ public static CPDefinitionSpecificationOptionValue findByCPOptionCategoryId_First ( long CPOptionCategoryId , OrderByComparator < CPDefinitionSpecificationOptionValue > orderByComparator ) throws com . liferay . commerce . product . exception . NoSuchCPDefinitionSpecificationOptionValueException { } }
return getPersistence ( ) . findByCPOptionCategoryId_First ( CPOptionCategoryId , orderByComparator ) ;
public class OfflineImageDecompressor { /** * Process image file . */ private void go ( ) throws IOException { } }
long start = System . currentTimeMillis ( ) ; System . out . println ( "Decompressing image file: " + inputFile + " to " + outputFile ) ; DataInputStream in = null ; DataOutputStream out = null ; try { // setup in PositionTrackingInputStream ptis = new PositionTrackingInputStream ( new FileInputStream ( new File ( inputFile ) ) ) ; in = new DataInputStream ( ptis ) ; // read header information int imgVersion = in . readInt ( ) ; if ( ! LayoutVersion . supports ( Feature . FSIMAGE_COMPRESSION , imgVersion ) ) { System . out . println ( "Image is not compressed. No output will be produced." ) ; return ; } int namespaceId = in . readInt ( ) ; long numFiles = in . readLong ( ) ; long genstamp = in . readLong ( ) ; long imgTxId = - 1 ; if ( LayoutVersion . supports ( Feature . STORED_TXIDS , imgVersion ) ) { imgTxId = in . readLong ( ) ; } FSImageCompression compression = FSImageCompression . readCompressionHeader ( new Configuration ( ) , in ) ; if ( compression . isNoOpCompression ( ) ) { System . out . println ( "Image is not compressed. No output will be produced." ) ; return ; } in = BufferedByteInputStream . wrapInputStream ( compression . unwrapInputStream ( in ) , FSImage . LOAD_SAVE_BUFFER_SIZE , FSImage . LOAD_SAVE_CHUNK_SIZE ) ; System . out . println ( "Starting decompression." ) ; // setup output out = new DataOutputStream ( new BufferedOutputStream ( new FileOutputStream ( outputFile ) ) ) ; // write back the uncompressed information out . writeInt ( imgVersion ) ; out . writeInt ( namespaceId ) ; out . writeLong ( numFiles ) ; out . writeLong ( genstamp ) ; if ( LayoutVersion . supports ( Feature . STORED_TXIDS , imgVersion ) ) { out . writeLong ( imgTxId ) ; } // no compression out . writeBoolean ( false ) ; // copy the data long size = new File ( inputFile ) . length ( ) ; // read in 1MB chunks byte [ ] block = new byte [ 1024 * 1024 ] ; while ( true ) { int bytesRead = in . read ( block ) ; if ( bytesRead <= 0 ) break ; out . write ( block , 0 , bytesRead ) ; printProgress ( ptis . getPos ( ) , size ) ; } out . close ( ) ; long stop = System . currentTimeMillis ( ) ; System . out . println ( "Input file : " + inputFile + " size: " + size ) ; System . out . println ( "Output file: " + outputFile + " size: " + new File ( outputFile ) . length ( ) ) ; System . out . println ( "Decompression completed in " + ( stop - start ) + " ms." ) ; } finally { if ( in != null ) in . close ( ) ; if ( out != null ) out . close ( ) ; }
public class JSONObject { /** * Put a key / double pair in the JSONObject . * @ param key A key string . * @ param value A double which is the value . * @ return this * @ throws JSONException If the key is null or if the number is invalid . */ public JSONObject put ( String key , double value ) throws JSONException { } }
put ( key , Double . valueOf ( value ) ) ; return this ;
public class SpringUtil { /** * Returns a property value from the application context . * @ param name Property name . * @ return Property value , or null if not found . */ public static String getProperty ( String name ) { } }
if ( propertyProvider == null ) { initPropertyProvider ( ) ; } return propertyProvider . getProperty ( name ) ;
public class vrid_binding { /** * Use this API to fetch vrid _ binding resource of given name . */ public static vrid_binding get ( nitro_service service , Long id ) throws Exception { } }
vrid_binding obj = new vrid_binding ( ) ; obj . set_id ( id ) ; vrid_binding response = ( vrid_binding ) obj . get_resource ( service ) ; return response ;
public class Configuration { /** * Extracts the specified file from this configuration ' s zip file , if applicable . * @ param targetFile * The target file . The file name without the path information is taken in order to * identify the zip entry to extract . * @ param forceOverwrite * If { @ code true } , an existing file is overwritten . */ public void extractFromArchive ( final File targetFile , final boolean forceOverwrite ) throws IOException { } }
if ( zipArchive != null ) { ZipEntry entry = zipArchive . getEntry ( targetFile . getName ( ) ) ; if ( entry != null ) { if ( ! targetFile . exists ( ) ) { try { targetFile . createNewFile ( ) ; } catch ( IOException ex ) { throw new JFunkException ( "Error creating file: " + targetFile , ex ) ; } } else if ( ! forceOverwrite ) { return ; } logger . info ( "Loading file '{}' from zip archive..." , targetFile ) ; OutputStream out = null ; InputStream in = null ; try { out = new FileOutputStream ( targetFile ) ; in = zipArchive . getInputStream ( entry ) ; IOUtils . copy ( in , out ) ; } finally { IOUtils . closeQuietly ( in ) ; IOUtils . closeQuietly ( out ) ; } } else { logger . error ( "Could not find file '{}' in zip archive" , targetFile ) ; } }
public class Utils { /** * Converts spaces to dashes . * @ param str a string with spaces * @ param replaceWith a string to replace spaces with * @ return a string with dashes */ public static String noSpaces ( String str , String replaceWith ) { } }
return StringUtils . isBlank ( str ) ? "" : str . trim ( ) . replaceAll ( "[\\p{C}\\p{Z}]+" , StringUtils . trimToEmpty ( replaceWith ) ) . toLowerCase ( ) ;
public class DListImpl { /** * Creates a new < code > DList < / code > object that contains the contents of this * < code > DList < / code > object concatenated * with the contents of the < code > otherList < / code > object . * @ paramotherListThe list whose elements are placed at the end of the list * returned by this method . * @ returnA new < code > DList < / code > that is the concatenation of this list and * the list referenced by < code > otherList < / code > . */ public DList concat ( DList otherList ) { } }
DListImpl result = new DListImpl ( pbKey ) ; result . addAll ( this ) ; result . addAll ( otherList ) ; return result ;
public class DomainRuntimeView { /** * / * @ Override * public void onClearActiveSelection ( ClearFinderSelectionEvent event ) { * serverColWidget . getElement ( ) . removeClassName ( " active " ) ; * subsysColWidget . getElement ( ) . removeClassName ( " active " ) ; * statusColWidget . getElement ( ) . removeClassName ( " active " ) ; */ @ Override public Widget createWidget ( ) { } }
serverColumn = new FinderColumn < Server > ( FinderColumn . FinderId . RUNTIME , "Server" , new FinderColumn . Display < Server > ( ) { @ Override public boolean isFolder ( Server data ) { return data . isStarted ( ) ; } @ Override public SafeHtml render ( String baseCss , Server server ) { String context = presenter . getFilter ( ) . equals ( FilterType . HOST ) ? server . getGroup ( ) : server . getHostName ( ) ; return SERVER_TEMPLATE . item ( baseCss , "" , server . getName ( ) , context ) ; } @ Override public String rowCss ( Server server ) { String css = "" ; // TODO : reload state if ( ! server . isStarted ( ) ) { css = "paused" ; } else if ( server . getServerState ( ) == SrvState . RELOAD_REQUIRED ) { css = "warn" ; } else if ( server . getServerState ( ) == SrvState . RESTART_REQUIRED ) { css = "warn" ; } else if ( server . getSuspendState ( ) == SuspendState . SUSPENDED ) { css = "info" ; } else if ( server . isStarted ( ) ) { css = "good" ; } return css ; } } , new ProvidesKey < Server > ( ) { @ Override public Object getKey ( Server item ) { return item . getName ( ) + item . getHostName ( ) + ":" + item . getServerState ( ) ; } } , presenter . getProxy ( ) . getNameToken ( ) ) ; serverColumn . setShowSize ( true ) ; serverColumn . setFilter ( ( item , token ) -> { return item . getName ( ) . contains ( token ) ; } ) ; serverColumn . setValueProvider ( new ValueProvider < Server > ( ) { @ Override public String get ( Server item ) { return item . getName ( ) ; } } ) ; serverColumn . setTopMenuItems ( new MenuDelegate < Server > ( Console . CONSTANTS . common_label_add ( ) , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( Server server ) { presenter . launchNewConfigDialoge ( ) ; } } , MenuDelegate . Role . Operation ) , new MenuDelegate < Server > ( Console . CONSTANTS . common_label_refresh ( ) , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( Server server ) { presenter . refreshServer ( ) ; } } , MenuDelegate . Role . Navigation ) ) ; serverColumn . setPreviewFactory ( new PreviewFactory < Server > ( ) { @ Override public void createPreview ( Server data , AsyncCallback < SafeHtml > callback ) { SafeHtmlBuilder html = new SafeHtmlBuilder ( ) ; html . appendHtmlConstant ( "<div class='preview-content'>" ) ; html . appendHtmlConstant ( "<h2>" ) ; html . appendEscaped ( "Server Configuration" ) ; html . appendHtmlConstant ( "</h2>" ) ; html . appendEscaped ( Console . CONSTANTS . serverDescription ( ) ) ; // TODO : reload state if ( ! data . isStarted ( ) ) { PreviewState . paused ( html , "Server is stopped" ) ; } else if ( data . getServerState ( ) == SrvState . RELOAD_REQUIRED ) { PreviewState . warn ( html , Console . CONSTANTS . server_instance_reloadRequired ( ) ) ; } else if ( data . getServerState ( ) == SrvState . RESTART_REQUIRED ) { PreviewState . warn ( html , "Server needs to be restarted" ) ; } else if ( data . getSuspendState ( ) == SuspendState . SUSPENDED ) { PreviewState . info ( html , "Server is suspended" ) ; } else if ( data . getServerState ( ) == SrvState . RUNNING ) { String id = "port-offset-" + data . getGroup ( ) + "-" + data . getName ( ) ; html . appendHtmlConstant ( "<p>" ) . appendEscaped ( "Port offset: " ) . appendHtmlConstant ( "<span id=\"" + id + "\">" ) . appendHtmlConstant ( "</span>" ) . appendHtmlConstant ( "</p>" ) ; new ReadPortOffsetOp ( dispatcher ) . execute ( data , id ) ; } html . appendHtmlConstant ( "</div>" ) ; callback . onSuccess ( html . toSafeHtml ( ) ) ; } } ) ; serverColumn . setMenuItems ( new MenuDelegate < Server > ( "View" , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( final Server server ) { placeManager . revealRelativePlace ( new PlaceRequest ( NameTokens . ServerPresenter ) . with ( "action" , "edit" ) ) ; } } ) , new MenuDelegate < Server > ( "Remove" , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( final Server server ) { Feedback . confirm ( "Remove server" , "Do you really want to remove server " + server . getName ( ) + "?" , new Feedback . ConfirmationHandler ( ) { @ Override public void onConfirmation ( boolean isConfirmed ) { if ( isConfirmed ) { presenter . tryDelete ( presenter . getSelectedServer ( ) ) ; } else { presenter . closeWindow ( ) ; } } } ) ; } } , MenuDelegate . Role . Operation ) , new MenuDelegate < Server > ( "Copy" , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( Server server ) { presenter . onLaunchCopyWizard ( server ) ; } } , MenuDelegate . Role . Operation ) , new MenuDelegate < Server > ( "Start/Stop" , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( Server server ) { LifecycleOperation op = ( server . getRuntimeState ( ) == RuntimeState . STOPPED || server . getRuntimeState ( ) == RuntimeState . DISABLED ) ? LifecycleOperation . START : LifecycleOperation . STOP ; if ( LifecycleOperation . START == op ) { Feedback . confirm ( "Server " + op . name ( ) , "Do you really want to " + op . name ( ) + " server " + server . getName ( ) + "?" , new Feedback . ConfirmationHandler ( ) { @ Override public void onConfirmation ( boolean isConfirmed ) { if ( isConfirmed ) { presenter . onServerInstanceLifecycle ( server . getHostName ( ) , server . getName ( ) , op ) ; } } } ) ; } else { presenter . onLaunchStopDialogue ( server ) ; } } } , MenuDelegate . Role . Operation ) { @ Override public String render ( Server server ) { return server . isStarted ( ) ? "Stop" : "Start" ; } } . setOperationAddress ( "/{implicit.host}/server-config=*" , "start" ) , new MenuDelegate < Server > ( "Suspend or not" , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( Server server ) { LifecycleOperation op = server . getSuspendState ( ) == SuspendState . SUSPENDED ? LifecycleOperation . RESUME : LifecycleOperation . SUSPEND ; if ( LifecycleOperation . RESUME == op ) { presenter . onServerInstanceLifecycle ( server . getHostName ( ) , server . getName ( ) , op ) ; } else { presenter . onLaunchSuspendDialogue ( server ) ; } } } , MenuDelegate . Role . Operation ) { @ Override public String render ( Server server ) { return server . getSuspendState ( ) == SuspendState . SUSPENDED ? "Resume" : "Suspend" ; } } . setOperationAddress ( "/{implicit.host}/server-config=*" , "resume" ) , new MenuDelegate < Server > ( "Reload" , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( Server server ) { Feedback . confirm ( "Reload Server" , "Do you really want to reload server " + server . getName ( ) + "?" , new Feedback . ConfirmationHandler ( ) { @ Override public void onConfirmation ( boolean isConfirmed ) { if ( isConfirmed ) { presenter . onServerInstanceLifecycle ( server . getHostName ( ) , server . getName ( ) , LifecycleOperation . RELOAD ) ; } } } ) ; } } , MenuDelegate . Role . Operation ) . setOperationAddress ( "/{implicit.host}/server-config=*" , "reload" ) , new MenuDelegate < Server > ( "Restart" , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( Server server ) { Feedback . confirm ( "Restart Server" , "Do you really want to restart server " + server . getName ( ) + "?" , new Feedback . ConfirmationHandler ( ) { @ Override public void onConfirmation ( boolean isConfirmed ) { if ( isConfirmed ) { presenter . onServerInstanceLifecycle ( server . getHostName ( ) , server . getName ( ) , LifecycleOperation . RESTART ) ; } } } ) ; } } , MenuDelegate . Role . Operation ) . setOperationAddress ( "/{implicit.host}/server-config=*" , "restart" ) , new MenuDelegate < Server > ( "Force Shutdown" , new ContextualCommand < Server > ( ) { @ Override public void executeOn ( Server server ) { Feedback . confirm ( "Shutdown Server" , "Do you really want to shutdown server " + server . getName ( ) + "?" , new Feedback . ConfirmationHandler ( ) { @ Override public void onConfirmation ( boolean isConfirmed ) { if ( isConfirmed ) { presenter . onServerInstanceLifecycle ( server . getHostName ( ) , server . getName ( ) , LifecycleOperation . KILL ) ; } } } ) ; } } , MenuDelegate . Role . Operation ) . setOperationAddress ( "/{implicit.host}/server-config=*" , "kill" ) ) ; serverColumn . setTooltipDisplay ( new FinderColumn . TooltipDisplay < Server > ( ) { @ Override public SafeHtml render ( Server server ) { String message = server . isStarted ( ) ? "running" : "not running" ; SafeHtmlBuilder sb = new SafeHtmlBuilder ( ) ; /* if ( data . isStarted ( ) ) sb . appendHtmlConstant ( " < i class = \ " icon - ok \ " style = ' color : # 3F9C35 ' > < / i > & nbsp ; " ) ; else sb . appendHtmlConstant ( " < i class = \ " icon - ban - circle \ " style = ' color : # CC0000 ' > < / i > & nbsp ; " ) ; */ sb . appendEscaped ( "Server is " ) . appendEscaped ( message ) ; if ( server . getServerState ( ) == SrvState . RELOAD_REQUIRED ) { sb . appendEscaped ( ". " + Console . CONSTANTS . server_instance_reloadRequired ( ) ) ; } else if ( server . getServerState ( ) == SrvState . RESTART_REQUIRED ) { sb . appendEscaped ( ". " + Console . CONSTANTS . server_instance_servers_needRestart ( ) ) ; } else if ( server . getSuspendState ( ) == SuspendState . SUSPENDED ) { sb . appendEscaped ( ", but suspended" ) ; } return sb . toSafeHtml ( ) ; } } ) ; serverColWidget = serverColumn . asWidget ( ) ; statusColumn = new FinderColumn < FinderItem > ( FinderColumn . FinderId . RUNTIME , "Monitor" , new FinderColumn . Display < FinderItem > ( ) { @ Override public boolean isFolder ( FinderItem data ) { return data . isFolder ( ) ; } @ Override public SafeHtml render ( String baseCss , FinderItem data ) { String icon = data . isFolder ( ) ? "icon-folder-close-alt" : "icon-file-alt" ; return STATUS_TEMPLATE . item ( baseCss , icon , data . getTitle ( ) ) ; } @ Override public String rowCss ( FinderItem data ) { return data . getTitle ( ) . equals ( "Subsystems" ) ? "no-menu" : "" ; } } , new ProvidesKey < FinderItem > ( ) { @ Override public Object getKey ( FinderItem item ) { return item . getTitle ( ) ; } } , presenter . getProxy ( ) . getNameToken ( ) ) ; statusColumn . setMenuItems ( new MenuDelegate < FinderItem > ( "View" , new ContextualCommand < FinderItem > ( ) { @ Override public void executeOn ( FinderItem link ) { link . getCmd ( ) . execute ( ) ; } } ) ) ; statusColWidget = statusColumn . asWidget ( ) ; subsystemColumn = new FinderColumn < PlaceLink > ( FinderColumn . FinderId . RUNTIME , "Subsystem" , new FinderColumn . Display < PlaceLink > ( ) { @ Override public boolean isFolder ( PlaceLink data ) { return false ; } @ Override public SafeHtml render ( String baseCss , PlaceLink data ) { return SUBSYSTEM_TEMPLATE . item ( baseCss , "icon-file-alt" , data . getTitle ( ) ) ; } @ Override public String rowCss ( PlaceLink data ) { return "" ; } } , new ProvidesKey < PlaceLink > ( ) { @ Override public Object getKey ( PlaceLink item ) { return item . getTitle ( ) ; } } , presenter . getProxy ( ) . getNameToken ( ) ) ; subsystemColumn . setPreviewFactory ( new PreviewFactory < PlaceLink > ( ) { @ Override public void createPreview ( PlaceLink data , AsyncCallback < SafeHtml > callback ) { PreviewContent content = PreviewContent . INSTANCE ; ExternalTextResource resource = ( ExternalTextResource ) content . getResource ( "runtime_" + data . getToken ( ) . replace ( "-" , "_" ) ) ; if ( resource != null ) { contentFactory . createContent ( resource , callback ) ; } else { SafeHtmlBuilder builder = new SafeHtmlBuilder ( ) ; builder . appendHtmlConstant ( "<div class='preview-content'><span style='font-size:24px;'><i class='icon-bar-chart' style='font-size:48px;vertical-align:middle'></i>&nbsp;" + data . getTitle ( ) + "</span></center>" ) ; builder . appendHtmlConstant ( "</div>" ) ; callback . onSuccess ( builder . toSafeHtml ( ) ) ; } } } ) ; subsystemColumn . setMenuItems ( new MenuDelegate < PlaceLink > ( "View" , new ContextualCommand < PlaceLink > ( ) { @ Override public void executeOn ( PlaceLink link ) { link . getCmd ( ) . execute ( ) ; } } ) ) ; subsysColWidget = subsystemColumn . asWidget ( ) ; // server column is always present columnManager . addWest ( serverColWidget ) ; columnManager . addWest ( statusColWidget ) ; columnManager . addWest ( subsysColWidget ) ; columnManager . add ( contentCanvas ) ; columnManager . setInitialVisible ( 1 ) ; // selection handling serverColumn . addSelectionChangeHandler ( new SelectionChangeEvent . Handler ( ) { @ Override public void onSelectionChange ( SelectionChangeEvent event ) { // column handling columnManager . reduceColumnsTo ( 1 ) ; if ( serverColumn . hasSelectedItem ( ) ) { // selection columnManager . updateActiveSelection ( serverColWidget ) ; final Server selectedServer = serverColumn . getSelectedItem ( ) ; // action if ( selectedServer . isStarted ( ) ) { columnManager . appendColumn ( statusColWidget ) ; } Console . getCircuit ( ) . dispatch ( new SelectServer ( selectedServer . getHostName ( ) , selectedServer . getName ( ) ) ) ; } } } ) ; statusColumn . addSelectionChangeHandler ( new SelectionChangeEvent . Handler ( ) { @ Override public void onSelectionChange ( SelectionChangeEvent event ) { if ( statusColumn . hasSelectedItem ( ) ) { columnManager . updateActiveSelection ( statusColWidget ) ; Scheduler . get ( ) . scheduleDeferred ( new Scheduler . ScheduledCommand ( ) { @ Override public void execute ( ) { if ( statusColumn . getSelectedItem ( ) . getTitle ( ) . equals ( "Subsystems" ) ) { columnManager . appendColumn ( subsysColWidget ) ; updateSubsystemColumn ( subsystems ) ; } else { columnManager . reduceColumnsTo ( 2 ) ; } } } ) ; } else { columnManager . reduceColumnsTo ( 2 ) ; } } } ) ; subsystemColumn . addSelectionChangeHandler ( new SelectionChangeEvent . Handler ( ) { @ Override public void onSelectionChange ( SelectionChangeEvent event ) { if ( subsystemColumn . hasSelectedItem ( ) ) { columnManager . updateActiveSelection ( subsysColWidget ) ; } } } ) ; return splitlayout . asWidget ( ) ;
public class Mutation { /** * Specify a column family name and the corresponding column * family object . * param @ cf - column family name * param @ columnFamily - the column family . */ public void add ( ColumnFamily columnFamily ) { } }
assert columnFamily != null ; ColumnFamily prev = modifications . put ( columnFamily . id ( ) , columnFamily ) ; if ( prev != null ) // developer error throw new IllegalArgumentException ( "ColumnFamily " + columnFamily + " already has modifications in this mutation: " + prev ) ;
public class StringUtilities { /** * Returns < CODE > true < / CODE > if the given string matches the given regular expression . * @ param str The string against which the expression is to be matched * @ param expr The regular expression to match with the input string * @ param whole Indicates that a whole word match is required * @ return < CODE > true < / CODE > if a match was found */ public static boolean isWildcardMatch ( String str , String expr , boolean whole ) { } }
return getWildcardMatcher ( str , expr , whole ) . find ( ) ;
public class BasicFunctionsRuntime { /** * Returns the largest ( closest to positive infinity ) integer value that is less than or equal to * the argument . */ public static long floor ( SoyValue arg ) { } }
if ( arg instanceof IntegerData ) { return ( ( IntegerData ) arg ) . longValue ( ) ; } else { return ( long ) Math . floor ( arg . floatValue ( ) ) ; }
public class Compose { /** * Transformation operation * @ param fn Transformation function * @ param ds Datastructure to transform * @ return Transformed data structure */ public < T , R > Higher < CRE , Higher < C2 , R > > map ( Function < ? super T , ? extends R > fn , Higher < CRE , Higher < C2 , T > > ds ) { } }
return f . map ( h -> g . map ( fn , h ) , ds ) ;
public class ClassScanner { /** * Find all classes that match the class name pattern . * @ param classNamePattern the class name pattern * @ param scannedClasses the Map for scanned classes * @ throws IOException if an I / O error has occurred */ public void scan ( String classNamePattern , final Map < String , Class < ? > > scannedClasses ) throws IOException { } }
scan ( classNamePattern , scannedClasses :: put ) ;
public class AbnormalFinallyBlockReturn { /** * overrides the visitor to find return / exceptions from the finally block . * @ param seen * the opcode that is being visited */ @ Override public void sawOpcode ( int seen ) { } }
FinallyBlockInfo fbi = fbInfo . get ( 0 ) ; if ( getPC ( ) < fbi . startPC ) { return ; } if ( getPC ( ) == fbi . startPC ) { if ( OpcodeUtils . isAStore ( seen ) ) { fbi . exReg = RegisterUtils . getAStoreReg ( this , seen ) ; } else { removeEarliestFinallyBlock ( ) ; sawOpcode ( seen ) ; return ; } return ; } if ( seen == Const . MONITORENTER ) { fbi . monitorCount ++ ; } else if ( seen == Const . MONITOREXIT ) { fbi . monitorCount -- ; if ( fbi . monitorCount < 0 ) { removeEarliestFinallyBlock ( ) ; sawOpcode ( seen ) ; return ; } } if ( ( seen == Const . ATHROW ) && ( loadedReg == fbi . exReg ) ) { removeEarliestFinallyBlock ( ) ; sawOpcode ( seen ) ; return ; } else if ( OpcodeUtils . isALoad ( seen ) ) { loadedReg = RegisterUtils . getALoadReg ( this , seen ) ; } else { loadedReg = - 1 ; } if ( OpcodeUtils . isReturn ( seen ) || ( seen == Const . ATHROW ) ) { bugReporter . reportBug ( new BugInstance ( this , BugType . AFBR_ABNORMAL_FINALLY_BLOCK_RETURN . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this ) ) ; removeEarliestFinallyBlock ( ) ; } else if ( OpcodeUtils . isStandardInvoke ( seen ) ) { try { JavaClass cls = Repository . lookupClass ( getClassConstantOperand ( ) ) ; Method m = findMethod ( cls , getNameConstantOperand ( ) , getSigConstantOperand ( ) ) ; if ( m != null ) { ExceptionTable et = m . getExceptionTable ( ) ; if ( ( et != null ) && ( et . getLength ( ) > 0 ) && ! catchBlockInFinally ( fbi ) ) { bugReporter . reportBug ( new BugInstance ( this , BugType . AFBR_ABNORMAL_FINALLY_BLOCK_RETURN . name ( ) , LOW_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this ) ) ; removeEarliestFinallyBlock ( ) ; } } } catch ( ClassNotFoundException cnfe ) { bugReporter . reportMissingClass ( cnfe ) ; } }
public class ColumnListEditor { /** * Checks that if only single column used in data set - > it cannot be unselected . */ private boolean checkSingleColumnEditorDisabled ( ) { } }
final int size = listEditor . getList ( ) . size ( ) ; final boolean hasEditors = ! listEditor . getEditors ( ) . isEmpty ( ) ; if ( size == 1 && hasEditors ) { setEditorEnabled ( 0 , false , DataSetEditorConstants . INSTANCE . dataSetMustHaveAtLeastOneColumn ( ) ) ; return true ; } return false ;
public class TransactionSavedRecentResult { protected String deriveOtherTypeValueTitle ( BehaviorCommandMeta meta ) { } }
final String valueTitle ; if ( meta . isInsert ( ) ) { valueTitle = "inserted" ; } else if ( meta . isUpdate ( ) ) { valueTitle = "updated" ; } else if ( meta . isDelete ( ) ) { valueTitle = "deleted" ; } else if ( meta . isSelectCount ( ) ) { valueTitle = "count" ; } else { valueTitle = "value" ; } return valueTitle ;