signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Manager { /** * in CBLManager . m * - ( NSString * ) pathForDatabaseNamed : ( NSString * ) name * @ exclude */ @ InterfaceAudience . Private private String pathForDatabaseNamed ( String name ) { } }
if ( ( name == null ) || ( name . length ( ) == 0 ) || Pattern . matches ( LEGAL_CHARACTERS , name ) ) return null ; // NOTE : CouchDB allows forward slash as part of database name . // However , ' : ' is illegal character on Windows platform . // For Windows , substitute with period ' . ' name = isWindows ( ) ? name . replace ( '/' , '.' ) : name . replace ( '/' , ':' ) ; String result = directoryFile . getPath ( ) + File . separator + name + Manager . kDBExtension ; return result ;
public class LoganSquare { /** * Serialize a map of objects to a JSON String . * @ param map The map of objects to serialize . * @ param jsonObjectClass The @ JsonObject class of the list elements */ public static < E > String serialize ( Map < String , E > map , Class < E > jsonObjectClass ) throws IOException { } }
return mapperFor ( jsonObjectClass ) . serialize ( map ) ;
public class Identity { /** * Reads this Function . * @ param in source to read from */ private void readObject ( ObjectInputStream in ) throws ClassNotFoundException , NoSuchMethodException , IOException { } }
result = ClassRef . read ( in ) ; arg = ClassRef . read ( in ) ; name = in . readUTF ( ) ;
public class RatelimitManager { /** * Queues the given request . * This method is automatically called when using { @ link RestRequest # execute ( Function ) } ! * @ param request The request to queue . */ public void queueRequest ( RestRequest < ? > request ) { } }
final RatelimitBucket bucket ; final boolean alreadyInQueue ; synchronized ( buckets ) { // Search for a bucket that fits to this request bucket = buckets . stream ( ) . filter ( b -> b . equals ( request . getEndpoint ( ) , request . getMajorUrlParameter ( ) . orElse ( null ) ) ) . findAny ( ) . orElseGet ( ( ) -> new RatelimitBucket ( api , request . getEndpoint ( ) , request . getMajorUrlParameter ( ) . orElse ( null ) ) ) ; // Must be executed BEFORE adding the request to the queue alreadyInQueue = bucket . peekRequestFromQueue ( ) != null ; // Add the bucket to the set of buckets ( does nothing if it ' s already in the set ) buckets . add ( bucket ) ; // Add the request to the bucket ' s queue bucket . addRequestToQueue ( request ) ; } // If the bucket is already in the queue , there ' s nothing more to do if ( alreadyInQueue ) { return ; } // Start working of the queue api . getThreadPool ( ) . getExecutorService ( ) . submit ( ( ) -> { RestRequest < ? > currentRequest = bucket . peekRequestFromQueue ( ) ; RestRequestResult result = null ; long responseTimestamp = System . currentTimeMillis ( ) ; while ( currentRequest != null ) { try { int sleepTime = bucket . getTimeTillSpaceGetsAvailable ( ) ; if ( sleepTime > 0 ) { logger . debug ( "Delaying requests to {} for {}ms to prevent hitting ratelimits" , bucket , sleepTime ) ; } // Sleep until space is available while ( sleepTime > 0 ) { try { Thread . sleep ( sleepTime ) ; } catch ( InterruptedException e ) { logger . warn ( "We got interrupted while waiting for a rate limit!" , e ) ; } // Update in case something changed ( e . g . because we hit a global ratelimit ) sleepTime = bucket . getTimeTillSpaceGetsAvailable ( ) ; } // Execute the request result = currentRequest . executeBlocking ( ) ; // Calculate the time offset , if it wasn ' t done before responseTimestamp = System . currentTimeMillis ( ) ; } catch ( Throwable t ) { responseTimestamp = System . currentTimeMillis ( ) ; if ( currentRequest . getResult ( ) . isDone ( ) ) { logger . warn ( "Received exception for a request that is already done. " + "This should not be able to happen!" , t ) ; } // Try to get the response from the exception if it exists if ( t instanceof DiscordException ) { result = ( ( DiscordException ) t ) . getResponse ( ) . map ( RestRequestResponseInformationImpl . class :: cast ) . map ( RestRequestResponseInformationImpl :: getRestRequestResult ) . orElse ( null ) ; } // Complete the request currentRequest . getResult ( ) . completeExceptionally ( t ) ; } finally { try { // Calculate offset calculateOffset ( responseTimestamp , result ) ; // Handle the response handleResponse ( currentRequest , result , bucket , responseTimestamp ) ; } catch ( Throwable t ) { logger . warn ( "Encountered unexpected exception." , t ) ; } // The request didn ' t finish , so let ' s try again if ( ! currentRequest . getResult ( ) . isDone ( ) ) { continue ; } // Poll a new quest synchronized ( buckets ) { bucket . pollRequestFromQueue ( ) ; currentRequest = bucket . peekRequestFromQueue ( ) ; if ( currentRequest == null ) { buckets . remove ( bucket ) ; } } } } } ) ;
public class MatrixUtil { /** * Embed type information . On success , modify the matrix . */ static void embedTypeInfo ( ErrorCorrectionLevel ecLevel , int maskPattern , ByteMatrix matrix ) throws WriterException { } }
BitArray typeInfoBits = new BitArray ( ) ; makeTypeInfoBits ( ecLevel , maskPattern , typeInfoBits ) ; for ( int i = 0 ; i < typeInfoBits . getSize ( ) ; ++ i ) { // Place bits in LSB to MSB order . LSB ( least significant bit ) is the last value in // " typeInfoBits " . boolean bit = typeInfoBits . get ( typeInfoBits . getSize ( ) - 1 - i ) ; // Type info bits at the left top corner . See 8.9 of JISX0510:2004 ( p . 46 ) . int [ ] coordinates = TYPE_INFO_COORDINATES [ i ] ; int x1 = coordinates [ 0 ] ; int y1 = coordinates [ 1 ] ; matrix . set ( x1 , y1 , bit ) ; if ( i < 8 ) { // Right top corner . int x2 = matrix . getWidth ( ) - i - 1 ; int y2 = 8 ; matrix . set ( x2 , y2 , bit ) ; } else { // Left bottom corner . int x2 = 8 ; int y2 = matrix . getHeight ( ) - 7 + ( i - 8 ) ; matrix . set ( x2 , y2 , bit ) ; } }
public class KeyVaultClientBaseImpl { /** * Gets the creation operation of a certificate . * Gets the creation operation associated with a specified certificate . This operation requires the certificates / get permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param certificateName The name of the certificate . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the CertificateOperation object if successful . */ public CertificateOperation getCertificateOperation ( String vaultBaseUrl , String certificateName ) { } }
return getCertificateOperationWithServiceResponseAsync ( vaultBaseUrl , certificateName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class NamedPathPruner { /** * Simple recursive descent parser : */ static Tree parse ( String spec ) throws IllegalArgumentException { } }
Reader r = new Reader ( spec ) ; Tree t = new Tree ( ) ; list ( r , t ) ; r . expect ( Token . EOF ) ; return t ;
public class FaceletCompositionContextImpl { /** * Remove a component from the last level of components marked to be deleted . * @ param id */ private UIComponent removeComponentForDeletion ( String id ) { } }
UIComponent removedComponent = _componentsMarkedForDeletion . get ( _deletionLevel ) . remove ( id ) ; if ( removedComponent != null && _deletionLevel > 0 ) { _componentsMarkedForDeletion . get ( _deletionLevel - 1 ) . remove ( id ) ; } return removedComponent ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcRampTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class SecretDetector { /** * Find all the positions of aws key id and aws secret key . * The time complexity is O ( n ) * @ param text the sql text which may contain aws key * @ return Return a list of begin / end positions of aws key id and * aws secret key . */ private static List < SecretRange > getAWSSecretPos ( String text ) { } }
// log before and after in case this is causing StackOverflowError LOGGER . debug ( "pre-regex getAWSSecretPos" ) ; Matcher matcher = AWS_KEY_PATTERN . matcher ( text ) ; ArrayList < SecretRange > awsSecretRanges = new ArrayList < > ( ) ; while ( matcher . find ( ) ) { int beginPos = Math . min ( matcher . end ( ) + LOOK_AHEAD , text . length ( ) ) ; while ( beginPos > 0 && beginPos < text . length ( ) && isBase64 ( text . charAt ( beginPos ) ) ) { beginPos -- ; } int endPos = Math . min ( matcher . end ( ) + LOOK_AHEAD , text . length ( ) ) ; while ( endPos < text . length ( ) && isBase64 ( text . charAt ( endPos ) ) ) { endPos ++ ; } if ( beginPos < text . length ( ) && endPos <= text . length ( ) && beginPos >= 0 && endPos >= 0 ) { awsSecretRanges . add ( new SecretRange ( beginPos + 1 , endPos ) ) ; } } LOGGER . debug ( "post-regex getAWSSecretPos" ) ; return awsSecretRanges ;
public class StaticNodeIdManager { /** * Lasy - create staticNodeId for the given node id . * @ param nodeId the node id generated by the node itself . * @ return the staticNodeId eg . payment - - - 0 */ public static String getStaticNodeId ( String nodeId ) { } }
String application = NodeIdBean . parse ( nodeId ) . getApplication ( ) ; if ( nodeId_to_StaticFinalId_map . get ( nodeId ) == null ) { Instance < NodeStatus > instance = ApplicationDiscovery . singleton . instance ( nodeId ) ; if ( instance != null ) { LOG . debug ( "Instance with given nodeId is found online, then initialize its static node id and put it into the map." ) ; onEvent ( nodeId ) ; } else { LOG . warn ( new IllegalArgumentException ( "Bad node id or the node is offline: " + nodeId ) ) ; } } return nodeId_to_StaticFinalId_map . get ( nodeId ) == null ? application + NodeIdBean . splitter + "null" : // it represents a none exited static id . application + NodeIdBean . splitter + nodeId_to_StaticFinalId_map . get ( nodeId ) ;
public class TypesResource { /** * Fetch the complete definition of a given type name which is unique . * @ param typeName name of a type which is unique . */ @ GET @ Path ( "{typeName}" ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public Response getDefinition ( @ Context HttpServletRequest request , @ PathParam ( "typeName" ) String typeName ) { } }
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "==> TypesResource.getDefinition({})" , typeName ) ; } AtlasPerfTracer perf = null ; if ( AtlasPerfTracer . isPerfTraceEnabled ( PERF_LOG ) ) { perf = AtlasPerfTracer . getPerfTracer ( PERF_LOG , "TypesResource.getDefinition(" + typeName + ")" ) ; } JSONObject response = new JSONObject ( ) ; try { TypesDef typesDef = TypeConverterUtil . toTypesDef ( typeRegistry . getType ( typeName ) , typeRegistry ) ; ; String typeDefinition = TypesSerialization . toJson ( typesDef ) ; response . put ( AtlasClient . TYPENAME , typeName ) ; response . put ( AtlasClient . DEFINITION , new JSONObject ( typeDefinition ) ) ; response . put ( AtlasClient . REQUEST_ID , Servlets . getRequestId ( ) ) ; return Response . ok ( response ) . build ( ) ; } catch ( AtlasBaseException e ) { LOG . error ( "Unable to get type definition for type {}" , typeName , e ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e ) ) ; } catch ( JSONException | IllegalArgumentException e ) { LOG . error ( "Unable to get type definition for type {}" , typeName , e ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e , Response . Status . BAD_REQUEST ) ) ; } catch ( WebApplicationException e ) { LOG . error ( "Unable to get type definition for type {}" , typeName , e ) ; throw e ; } catch ( Throwable e ) { LOG . error ( "Unable to get type definition for type {}" , typeName , e ) ; throw new WebApplicationException ( Servlets . getErrorResponse ( e , Response . Status . INTERNAL_SERVER_ERROR ) ) ; } finally { AtlasPerfTracer . log ( perf ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "<== TypesResource.getDefinition({})" , typeName ) ; } }
public class ExemptionMechanism { /** * Returns whether the result blob has been generated successfully by this * exemption mechanism . * < p > The method also makes sure that the key passed in is the same as * the one this exemption mechanism used in initializing and generating * phases . * @ param key the key the crypto is going to use . * @ return whether the result blob of the same key has been generated * successfully by this exemption mechanism ; false if < code > key < / code > * is null . * @ exception ExemptionMechanismException if problem ( s ) encountered * while determining whether the result blob has been generated successfully * by this exemption mechanism object . */ public final boolean isCryptoAllowed ( Key key ) throws ExemptionMechanismException { } }
boolean ret = false ; if ( done && ( key != null ) ) { // Check if the key passed in is the same as the one // this exemption mechanism used . ret = keyStored . equals ( key ) ; } return ret ;
public class IconicsDrawable { /** * Update the Padding Bounds */ private void updatePaddingBounds ( @ NonNull Rect viewBounds ) { } }
if ( mIconPadding >= 0 && mIconPadding * 2 <= viewBounds . width ( ) && mIconPadding * 2 <= viewBounds . height ( ) ) { mPaddingBounds . set ( viewBounds . left + mIconPadding , viewBounds . top + mIconPadding , viewBounds . right - mIconPadding , viewBounds . bottom - mIconPadding ) ; }
public class PlatformDescription { /** * The programming languages supported by the platform . * @ param programmingLanguages * The programming languages supported by the platform . */ public void setProgrammingLanguages ( java . util . Collection < PlatformProgrammingLanguage > programmingLanguages ) { } }
if ( programmingLanguages == null ) { this . programmingLanguages = null ; return ; } this . programmingLanguages = new com . amazonaws . internal . SdkInternalList < PlatformProgrammingLanguage > ( programmingLanguages ) ;
public class TraversalStep { /** * End the traversal of the domain object graph matching a specific type of domain objects * @ param domainObjectType * @ return a DomainObjectMatch */ public < T > DomainObjectMatch < T > TO ( Class < T > domainObjectType ) { } }
TraversalExpression te = ( TraversalExpression ) this . astObject ; DomainObjectMatch < T > ret = APIAccess . createDomainObjectMatch ( domainObjectType , te . getQueryExecutor ( ) . getDomainObjectMatches ( ) . size ( ) , te . getQueryExecutor ( ) . getMappingInfo ( ) ) ; te . getQueryExecutor ( ) . getDomainObjectMatches ( ) . add ( ret ) ; te . setEnd ( ret ) ; QueryRecorder . recordAssignment ( this , "TO" , ret , QueryRecorder . literal ( domainObjectType . getName ( ) ) ) ; return ret ;
public class VarOptItemsSketch { /** * Returns a sketch instance of this class from the given srcMem , * which must be a Memory representation of this sketch class . * @ param < T > The type of item this sketch contains * @ param srcMem a Memory representation of a sketch of this class . * < a href = " { @ docRoot } / resources / dictionary . html # mem " > See Memory < / a > * @ param serDe An instance of ArrayOfItemsSerDe * @ return a sketch instance of this class */ @ SuppressWarnings ( "null" ) public static < T > VarOptItemsSketch < T > heapify ( final Memory srcMem , final ArrayOfItemsSerDe < T > serDe ) { } }
final int numPreLongs = getAndCheckPreLongs ( srcMem ) ; final ResizeFactor rf = ResizeFactor . getRF ( extractResizeFactor ( srcMem ) ) ; final int serVer = extractSerVer ( srcMem ) ; final int familyId = extractFamilyID ( srcMem ) ; final int flags = extractFlags ( srcMem ) ; final boolean isEmpty = ( flags & EMPTY_FLAG_MASK ) != 0 ; final boolean isGadget = ( flags & GADGET_FLAG_MASK ) != 0 ; // Check values if ( ( numPreLongs != Family . VAROPT . getMinPreLongs ( ) ) && ( numPreLongs != Family . VAROPT . getMaxPreLongs ( ) ) && ( numPreLongs != PreambleUtil . VO_WARMUP_PRELONGS ) ) { throw new SketchesArgumentException ( "Possible corruption: Must have " + Family . VAROPT . getMinPreLongs ( ) + ", " + PreambleUtil . VO_WARMUP_PRELONGS + ", or " + Family . VAROPT . getMaxPreLongs ( ) + " preLongs. Found: " + numPreLongs ) ; } if ( serVer != SER_VER ) { throw new SketchesArgumentException ( "Possible Corruption: Ser Ver must be " + SER_VER + ": " + serVer ) ; } final int reqFamilyId = Family . VAROPT . getID ( ) ; if ( familyId != reqFamilyId ) { throw new SketchesArgumentException ( "Possible Corruption: FamilyID must be " + reqFamilyId + ": " + familyId ) ; } final int k = extractK ( srcMem ) ; if ( k < 1 ) { throw new SketchesArgumentException ( "Possible Corruption: k must be at least 1: " + k ) ; } if ( isEmpty ) { assert numPreLongs == Family . VAROPT . getMinPreLongs ( ) ; return new VarOptItemsSketch < > ( k , rf ) ; } final long n = extractN ( srcMem ) ; if ( n < 0 ) { throw new SketchesArgumentException ( "Possible Corruption: n cannot be negative: " + n ) ; } // get rest of preamble final int hCount = extractHRegionItemCount ( srcMem ) ; final int rCount = extractRRegionItemCount ( srcMem ) ; if ( hCount < 0 ) { throw new SketchesArgumentException ( "Possible Corruption: H region count cannot be " + "negative: " + hCount ) ; } if ( rCount < 0 ) { throw new SketchesArgumentException ( "Possible Corruption: R region count cannot be " + "negative: " + rCount ) ; } double totalRWeight = 0.0 ; if ( numPreLongs == Family . VAROPT . getMaxPreLongs ( ) ) { if ( rCount > 0 ) { totalRWeight = extractTotalRWeight ( srcMem ) ; } else { throw new SketchesArgumentException ( "Possible Corruption: " + Family . VAROPT . getMaxPreLongs ( ) + " preLongs but no items in R region" ) ; } } final int preLongBytes = numPreLongs << 3 ; final int totalItems = hCount + rCount ; int allocatedItems = k + 1 ; // default to full if ( rCount == 0 ) { // Not in sampling mode , so determine size to allocate , using ceilingLog2 ( hCount ) as minimum final int ceilingLgK = Util . toLog2 ( Util . ceilingPowerOf2 ( k ) , "heapify" ) ; final int minLgSize = Util . toLog2 ( Util . ceilingPowerOf2 ( hCount ) , "heapify" ) ; final int initialLgSize = SamplingUtil . startingSubMultiple ( ceilingLgK , rf . lg ( ) , Math . max ( minLgSize , MIN_LG_ARR_ITEMS ) ) ; allocatedItems = SamplingUtil . getAdjustedSize ( k , 1 << initialLgSize ) ; if ( allocatedItems == k ) { ++ allocatedItems ; } } // allocate full - sized ArrayLists , but we store only hCount weights at any moment final long weightOffsetBytes = TOTAL_WEIGHT_R_DOUBLE + ( rCount > 0 ? Double . BYTES : 0 ) ; final ArrayList < Double > weightList = new ArrayList < > ( allocatedItems ) ; final double [ ] wts = new double [ allocatedItems ] ; srcMem . getDoubleArray ( weightOffsetBytes , wts , 0 , hCount ) ; // can ' t use Arrays . asList ( wts ) since double [ ] rather than Double [ ] for ( int i = 0 ; i < hCount ; ++ i ) { if ( wts [ i ] <= 0.0 ) { throw new SketchesArgumentException ( "Possible Corruption: " + "Non-positive weight in heapify(): " + wts [ i ] ) ; } weightList . add ( wts [ i ] ) ; } // marks , if we have a gadget long markBytes = 0 ; int markCount = 0 ; ArrayList < Boolean > markList = null ; if ( isGadget ) { final long markOffsetBytes = preLongBytes + ( ( long ) hCount * Double . BYTES ) ; markBytes = ArrayOfBooleansSerDe . computeBytesNeeded ( hCount ) ; markList = new ArrayList < > ( allocatedItems ) ; final ArrayOfBooleansSerDe booleansSerDe = new ArrayOfBooleansSerDe ( ) ; final Boolean [ ] markArray = booleansSerDe . deserializeFromMemory ( srcMem . region ( markOffsetBytes , ( hCount >>> 3 ) + 1 ) , hCount ) ; for ( Boolean mark : markArray ) { if ( mark ) { ++ markCount ; } } markList . addAll ( Arrays . asList ( markArray ) ) ; } final long offsetBytes = preLongBytes + ( ( long ) hCount * Double . BYTES ) + markBytes ; final T [ ] data = serDe . deserializeFromMemory ( srcMem . region ( offsetBytes , srcMem . getCapacity ( ) - offsetBytes ) , totalItems ) ; final List < T > wrappedData = Arrays . asList ( data ) ; final ArrayList < T > dataList = new ArrayList < > ( allocatedItems ) ; dataList . addAll ( wrappedData . subList ( 0 , hCount ) ) ; // Load items in R as needed if ( rCount > 0 ) { weightList . add ( - 1.0 ) ; // the gap if ( isGadget ) { markList . add ( false ) ; } // the gap for ( int i = 0 ; i < rCount ; ++ i ) { weightList . add ( - 1.0 ) ; if ( isGadget ) { markList . add ( false ) ; } } dataList . add ( null ) ; // the gap dataList . addAll ( wrappedData . subList ( hCount , totalItems ) ) ; } final VarOptItemsSketch < T > sketch = new VarOptItemsSketch < > ( dataList , weightList , k , n , allocatedItems , rf , hCount , rCount , totalRWeight ) ; if ( isGadget ) { sketch . marks_ = markList ; sketch . numMarksInH_ = markCount ; } return sketch ;
public class TreeElement { /** * Helper routine that will recursively search the tree for the node . * @ param fullName The full name that we are looking for * @ param currentName The name of the current node . * @ return The node matching the name or < code > null < / code > */ private TreeElement findNodeRecurse ( String fullName , String currentName ) { } }
String remainingName = null ; if ( ( currentName == null ) || ( fullName == null ) ) { return null ; } if ( getName ( ) . equals ( fullName ) ) { return this ; } if ( currentName . indexOf ( '.' ) > 0 ) { remainingName = currentName . substring ( currentName . indexOf ( '.' ) + 1 ) ; int nextIndex = - 1 ; if ( remainingName . indexOf ( "." ) > - 1 ) { nextIndex = new Integer ( remainingName . substring ( 0 , remainingName . indexOf ( '.' ) ) ) . intValue ( ) ; } else { nextIndex = new Integer ( remainingName ) . intValue ( ) ; } TreeElement child = getChild ( nextIndex ) ; if ( child != null ) { return child . findNodeRecurse ( fullName , remainingName ) ; } else { return null ; } } return null ;
public class TargetHttpsProxyClient { /** * Creates a TargetHttpsProxy resource in the specified project using the data included in the * request . * < p > Sample code : * < pre > < code > * try ( TargetHttpsProxyClient targetHttpsProxyClient = TargetHttpsProxyClient . create ( ) ) { * ProjectName project = ProjectName . of ( " [ PROJECT ] " ) ; * TargetHttpsProxy targetHttpsProxyResource = TargetHttpsProxy . newBuilder ( ) . build ( ) ; * Operation response = targetHttpsProxyClient . insertTargetHttpsProxy ( project . toString ( ) , targetHttpsProxyResource ) ; * < / code > < / pre > * @ param project Project ID for this request . * @ param targetHttpsProxyResource A TargetHttpsProxy resource . This resource defines an HTTPS * proxy . ( = = resource _ for beta . targetHttpsProxies = = ) ( = = resource _ for v1 . targetHttpsProxies * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation insertTargetHttpsProxy ( String project , TargetHttpsProxy targetHttpsProxyResource ) { } }
InsertTargetHttpsProxyHttpRequest request = InsertTargetHttpsProxyHttpRequest . newBuilder ( ) . setProject ( project ) . setTargetHttpsProxyResource ( targetHttpsProxyResource ) . build ( ) ; return insertTargetHttpsProxy ( request ) ;
public class AStatisticsSupporter { /** * TODO : refactor the code to move the engine - specific computation to the corresponding ddf - on - x */ @ Override public FiveNumSummary [ ] getFiveNumSummary ( List < String > columnNames ) throws DDFException { } }
FiveNumSummary [ ] fivenums = new FiveNumSummary [ columnNames . size ( ) ] ; List < String > numericColumns = new ArrayList < String > ( ) ; // Filter non - numeric columns for ( String columnName : columnNames ) { if ( ColumnType . isNumeric ( this . getDDF ( ) . getColumn ( columnName ) . getType ( ) ) ) { numericColumns . add ( columnName ) ; } } String [ ] rs = null ; if ( numericColumns . size ( ) > 0 ) { if ( this . getDDF ( ) . getEngineType ( ) . equals ( DDFManager . EngineType . SPARK ) ) { List < String > specs = Lists . newArrayList ( ) ; for ( String columnName : columnNames ) { String query = fiveNumHiveFunction ( columnName ) ; if ( query != null && query . length ( ) > 0 ) { specs . add ( query ) ; } } String command = String . format ( "SELECT %s FROM %%s" , StringUtils . join ( specs . toArray ( new String [ 0 ] ) , ',' ) ) ; mLog . info ( ">>>> command = " + command ) ; // a fivenumsummary of an Int / Long column is in the format " [ min , max , 1st _ quantile , median , 3rd _ quantile ] " // each value can be a NULL // a fivenumsummary of an Double / Float column is in the format " min \ t max \ t [ 1st _ quantile , median , 3rd _ quantile ] " // or " min \ t max \ t null " s rs = this . getDDF ( ) . sql ( command , String . format ( "Unable to get fivenum summary of the given columns from table %%s" ) ) . getRows ( ) . get ( 0 ) . replaceAll ( "\\[|\\]| " , "" ) . replaceAll ( "," , "\t" ) . split ( "\t| " ) ; } else if ( this . getDDF ( ) . getEngineType ( ) . equals ( DDFManager . EngineType . POSTGRES ) || this . getDDF ( ) . getEngineType ( ) . equals ( DDFManager . EngineType . REDSHIFT ) ) { rs = new String [ numericColumns . size ( ) * 5 ] ; int k = 0 ; // Need to separately compute each column ' s five - num values // Otherwise , combining all into a query will produce incorrect result in Redshift case // May be due to the nature of its window functions for ( String column : numericColumns ) { String sql = buildPostgresFiveNumSql ( column , this . getDDF ( ) . getTableName ( ) ) ; String [ ] ret = this . getDDF ( ) . getManager ( ) . sql ( sql , false ) . getRows ( ) . get ( 0 ) . split ( "\t" ) ; System . arraycopy ( ret , 0 , rs , k , 5 ) ; k += 5 ; } } else { throw new DDFException ( "Unsupported engine" ) ; } } int k = 0 ; for ( int i = 0 ; i < columnNames . size ( ) ; i ++ ) { if ( ! numericColumns . contains ( columnNames . get ( i ) ) ) { fivenums [ i ] = new FiveNumSummary ( Double . NaN , Double . NaN , Double . NaN , Double . NaN , Double . NaN ) ; } else { fivenums [ i ] = new FiveNumSummary ( parseDouble ( rs [ 5 * k ] ) , parseDouble ( rs [ 5 * k + 1 ] ) , parseDouble ( rs [ 5 * k + 2 ] ) , parseDouble ( rs [ 5 * k + 3 ] ) , parseDouble ( rs [ 5 * k + 4 ] ) ) ; k ++ ; } } return fivenums ;
public class X509CRLEntryImpl { /** * get an extension * @ param oid ObjectIdentifier of extension desired * @ returns Extension of type < extension > or null , if not found */ public Extension getExtension ( ObjectIdentifier oid ) { } }
if ( extensions == null ) return null ; // following returns null if no such OID in map // XXX consider cloning this return extensions . get ( OIDMap . getName ( oid ) ) ;
public class Arguments { /** * Gets the first value for the key . * @ param key the key to check for the value * @ return the value or { @ code null } if the key is not found or the value was { @ code null } */ public String get ( final String key ) { } }
final Collection < Argument > args = map . get ( key ) ; if ( args != null ) { return args . iterator ( ) . hasNext ( ) ? args . iterator ( ) . next ( ) . getValue ( ) : null ; } return null ;
public class PerPartitionTable { /** * Synchronized so that when the a single batch is filled up , we only queue one task to * drain the queue . The task will drain the queue until it doesn ' t contain a single batch . */ synchronized void insertRowInTable ( final VoltBulkLoaderRow nextRow ) throws InterruptedException { } }
m_partitionRowQueue . put ( nextRow ) ; if ( m_partitionRowQueue . size ( ) == m_minBatchTriggerSize ) { m_es . execute ( new Runnable ( ) { @ Override public void run ( ) { try { while ( m_partitionRowQueue . size ( ) >= m_minBatchTriggerSize ) { loadTable ( buildTable ( ) , m_table ) ; } } catch ( Exception e ) { loaderLog . error ( "Failed to load batch" , e ) ; } } } ) ; }
public class EpanetWrapper { /** * Retrieves the value of a specific link ( node ? ) parameter . * @ param index the node index . * @ param code the parameter code . * @ return the value at the node . * @ throws EpanetException */ public float ENgetnodevalue ( int index , NodeParameters code ) throws EpanetException { } }
float [ ] nodeValue = new float [ 1 ] ; int error = epanet . ENgetnodevalue ( index , code . getCode ( ) , nodeValue ) ; checkError ( error ) ; return nodeValue [ 0 ] ;
public class Patterns { /** * Returns a { @ link Pattern } that matches 0 or more characters satisfying { @ code predicate } . */ public static Pattern many ( final CharPredicate predicate ) { } }
return new Pattern ( ) { @ Override public int match ( CharSequence src , int begin , int end ) { return matchMany ( predicate , src , end , begin , 0 ) ; } @ Override public String toString ( ) { return predicate + "*" ; } } ;
public class KeyGenUtil { /** * Generates a cache key by aggregating ( concatenating ) the output of * each of the cache key generators in the array . * @ param request The request object * @ param keyGens The array * @ return The aggregated cache key */ static public String generateKey ( HttpServletRequest request , Iterable < ICacheKeyGenerator > keyGens ) { } }
StringBuffer sb = new StringBuffer ( ) ; for ( ICacheKeyGenerator keyGen : keyGens ) { String key = keyGen . generateKey ( request ) ; if ( key != null && key . length ( ) > 0 ) { sb . append ( sb . length ( ) > 0 ? ";" : "" ) . append ( key ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ } } return sb . toString ( ) ;
public class ApiOvhCloud { /** * Create interface on an instance and attached it to a network * REST : POST / cloud / project / { serviceName } / instance / { instanceId } / interface * @ param instanceId [ required ] Instance id * @ param ip [ required ] Static ip ( Can only be defined for private networks ) * @ param networkId [ required ] Network id * @ param serviceName [ required ] Service name */ public OvhInterface project_serviceName_instance_instanceId_interface_POST ( String serviceName , String instanceId , String ip , String networkId ) throws IOException { } }
String qPath = "/cloud/project/{serviceName}/instance/{instanceId}/interface" ; StringBuilder sb = path ( qPath , serviceName , instanceId ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "ip" , ip ) ; addBody ( o , "networkId" , networkId ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhInterface . class ) ;
public class BinaryHashPartition { /** * After build phase . * @ return build spill return buffer , if have spilled , it returns the current write buffer , * because it was used all the time in build phase , so it can only be returned at this time . */ int finalizeBuildPhase ( IOManager ioAccess , FileIOChannel . Enumerator probeChannelEnumerator ) throws IOException { } }
this . finalBufferLimit = this . buildSideWriteBuffer . getCurrentPositionInSegment ( ) ; this . partitionBuffers = this . buildSideWriteBuffer . close ( ) ; if ( ! isInMemory ( ) ) { // close the channel . this . buildSideChannel . close ( ) ; this . probeSideBuffer = FileChannelUtil . createOutputView ( ioAccess , probeChannelEnumerator . next ( ) , compressionEnable , compressionCodecFactory , compressionBlockSize , memorySegmentSize ) ; return 1 ; } else { return 0 ; }
public class PasswordCipher { /** * Use the key to produce a clear text String from the cipherText . If no key * is provided , or if no type is specified , just return the text as is . */ public static String decipher ( String key , String cipherText , String cipherType ) { } }
if ( key == null || key == "" ) { return cipherText ; } if ( cipherText == null ) { return null ; } if ( cipherType == null || cipherType == "" ) { return cipherText ; } else if ( "1" . equalsIgnoreCase ( cipherType ) ) { byte [ ] keyBytes = convertKeyToByteArray ( key ) ; byte [ ] cipherBytes = Base64 . decode ( cipherText ) ; sanityCheckOnCipherBytes ( cipherText , cipherBytes ) ; byte [ ] clearTextBytes = applyCipher ( keyBytes , cipherBytes ) ; return convertByteArrayToClearText ( clearTextBytes ) ; } else { throw new IllegalArgumentException ( "Unrecognized cipher type: '" + cipherType + "'" ) ; }
public class ZoneMeta { /** * Parse a custom time zone identifier and return the normalized * custom time zone identifier for the given custom id string . * @ param id a string of the form GMT [ + - ] hh : mm , GMT [ + - ] hhmm , or * GMT [ + - ] hh . * @ return The normalized custom id string . */ public static String getCustomID ( String id ) { } }
int [ ] fields = new int [ 4 ] ; if ( parseCustomID ( id , fields ) ) { return formatCustomID ( fields [ 1 ] , fields [ 2 ] , fields [ 3 ] , fields [ 0 ] < 0 ) ; } return null ;
public class Job { /** * Signal exceptional cancellation of this job . * @ param msg cancellation message explaining reason for cancelation */ public void cancel ( final String msg ) { } }
JobState js = msg == null ? JobState . CANCELLED : JobState . FAILED ; cancel ( msg , js ) ;
public class AnimaCache { /** * User - > users * User - > t _ users * @ param className * @ param prefix * @ return */ public static String getTableName ( String className , String prefix ) { } }
boolean hasPrefix = prefix != null && prefix . trim ( ) . length ( ) > 0 ; return hasPrefix ? English . plural ( prefix + "_" + AnimaUtils . toUnderline ( className ) , 2 ) : English . plural ( AnimaUtils . toUnderline ( className ) , 2 ) ;
public class CoverageMonitor { /** * Records the given file as a dependency after some filtering . * @ param f * File to record as a dependency */ public static void addFileURL ( File f ) { } }
String absolutePath = f . getAbsolutePath ( ) ; if ( ! filterFile ( absolutePath ) ) { try { recordURL ( f . toURI ( ) . toURL ( ) . toExternalForm ( ) ) ; } catch ( MalformedURLException e ) { // Never expected . } }
public class PartitionManagerImpl { /** * Auxiliary function to transform a PartitionEntry object into a plain map representation . */ private void add ( Map < String , List < String > > partition , PartitionEntry entry ) { } }
String tenantId = entry . getTenantId ( ) ; String triggerId = entry . getTriggerId ( ) ; if ( partition . get ( tenantId ) == null ) { partition . put ( tenantId , new ArrayList < > ( ) ) ; } partition . get ( tenantId ) . add ( triggerId ) ;
public class TeasyExpectedConditions { /** * Trick with zero coordinates for not - displayed element works only in FF */ private static boolean isElementHiddenUnderScroll ( WebElement element ) { } }
return ExecutionUtils . isFF ( ) && element . getLocation ( ) . getX ( ) > 0 && element . getLocation ( ) . getY ( ) > 0 ;
public class AbstractSampleRandomGroupsController { /** * Do some magic to turn request parameters into a context object */ private ProctorContext getProctorContext ( final HttpServletRequest request ) throws IllegalAccessException , InstantiationException { } }
final ProctorContext proctorContext = contextClass . newInstance ( ) ; final BeanWrapper beanWrapper = new BeanWrapperImpl ( proctorContext ) ; for ( final PropertyDescriptor descriptor : beanWrapper . getPropertyDescriptors ( ) ) { final String propertyName = descriptor . getName ( ) ; if ( ! "class" . equals ( propertyName ) ) { // ignore class property which every object has final String parameterValue = request . getParameter ( propertyName ) ; if ( parameterValue != null ) { beanWrapper . setPropertyValue ( propertyName , parameterValue ) ; } } } return proctorContext ;
public class BpmnParse { /** * Parses the start events of a certain level in the process ( process , * subprocess or another scope ) . * @ param parentElement * The ' parent ' element that contains the start events ( process , * subprocess ) . * @ param scope * The { @ link ScopeImpl } to which the start events must be added . */ public void parseStartEvents ( Element parentElement , ScopeImpl scope ) { } }
List < Element > startEventElements = parentElement . elements ( "startEvent" ) ; List < ActivityImpl > startEventActivities = new ArrayList < ActivityImpl > ( ) ; for ( Element startEventElement : startEventElements ) { ActivityImpl startEventActivity = createActivityOnScope ( startEventElement , scope ) ; parseAsynchronousContinuationForActivity ( startEventElement , startEventActivity ) ; if ( scope instanceof ProcessDefinitionEntity ) { parseProcessDefinitionStartEvent ( startEventActivity , startEventElement , parentElement , scope ) ; startEventActivities . add ( startEventActivity ) ; } else { parseScopeStartEvent ( startEventActivity , startEventElement , parentElement , ( ActivityImpl ) scope ) ; } ensureNoIoMappingDefined ( startEventElement ) ; parseExecutionListenersOnScope ( startEventElement , startEventActivity ) ; for ( BpmnParseListener parseListener : parseListeners ) { parseListener . parseStartEvent ( startEventElement , scope , startEventActivity ) ; } } if ( scope instanceof ProcessDefinitionEntity ) { selectInitial ( startEventActivities , ( ProcessDefinitionEntity ) scope , parentElement ) ; parseStartFormHandlers ( startEventElements , ( ProcessDefinitionEntity ) scope ) ; }
public class Row { /** * Returns an entry from a given column name as a LocalDate ( possibly null ) . Throws an IllegalArgumentException if the name is not defined * in the header definition . Throws a DateTimeParseException if the underlying data is not a date in ISO local date format ( YYYY - MM - DD ) . * @ param column the column name of the entry , not null * @ return the entry , can be null if the value is null or empty */ public LocalDate getLocalDate ( final String column ) { } }
int index = _headerDefinition . columnIndex ( column ) ; return getLocalDate ( index ) ;
public class DateTime { /** * Parses the prefix of the duration , the interval and returns it as a number . * E . g . if you supply " 1d " it will return " 1 " . If you supply " 60m " it will * return " 60 " . * @ param duration The duration to parse in the format # units , e . g . " 1d " or " 60m " * @ return The interval as an integer , regardless of units . * @ throws IllegalArgumentException if the duration is null , empty or parsing * of the integer failed . * @ since 2.4 */ public static final int getDurationInterval ( final String duration ) { } }
if ( duration == null || duration . isEmpty ( ) ) { throw new IllegalArgumentException ( "Duration cannot be null or empty" ) ; } if ( duration . contains ( "." ) ) { throw new IllegalArgumentException ( "Floating point intervals are not supported" ) ; } int unit = 0 ; while ( Character . isDigit ( duration . charAt ( unit ) ) ) { unit ++ ; } int interval ; try { interval = Integer . parseInt ( duration . substring ( 0 , unit ) ) ; } catch ( NumberFormatException e ) { throw new IllegalArgumentException ( "Invalid duration (number): " + duration ) ; } if ( interval <= 0 ) { throw new IllegalArgumentException ( "Zero or negative duration: " + duration ) ; } return interval ;
public class DFAState { /** * Returns a set of nfA states to where it is possible to move by nfa transitions . * @ param condition * @ return */ Set < NFAState < T > > nfaTransitsFor ( CharRange condition ) { } }
Set < NFAState < T > > nset = new NumSet < > ( ) ; for ( NFAState < T > nfa : nfaSet ) { nset . addAll ( nfa . transit ( condition ) ) ; } return nset ;
public class Vector2d { /** * / * ( non - Javadoc ) * @ see org . joml . Vector2dc # mul ( org . joml . Vector2dc , org . joml . Vector2d ) */ public Vector2d mul ( Vector2dc v , Vector2d dest ) { } }
dest . x = x * v . x ( ) ; dest . y = y * v . y ( ) ; return dest ;
public class Group { /** * Returns the group ' s hierarchy */ private Vector get_hierarchy ( ) { } }
synchronized ( this ) { final Vector h = new Vector ( ) ; final Iterator it = elements . iterator ( ) ; while ( it . hasNext ( ) ) { final GroupElement e = ( GroupElement ) it . next ( ) ; if ( e instanceof GroupDeviceElement ) { h . add ( e ) ; } else { h . add ( ( ( Group ) e ) . get_hierarchy ( ) ) ; } } return h ; }
public class LdapIdentityStore { /** * Get the groups for the caller * @ param context The { @ link DirContext } to use when performing the search . * @ param callerDn The caller ' s distinguished name . * @ return The set of groups the caller is a member of . */ private Set < String > getGroups ( DirContext context , String callerDn ) { } }
Set < String > groups = null ; String groupSearchBase = idStoreDefinition . getGroupSearchBase ( ) ; String groupSearchFilter = idStoreDefinition . getGroupSearchFilter ( ) ; if ( groupSearchBase . isEmpty ( ) || groupSearchFilter . isEmpty ( ) ) { groups = getGroupsByMembership ( context , callerDn ) ; } else { groups = getGroupsByMember ( context , callerDn , groupSearchBase , groupSearchFilter ) ; } return groups ;
public class ShamirSecretSharing { /** * Verify if all shares have the same prime . If they do not , then they are not from the same series and cannot * possibly be joined . * @ param shares to check * @ return true if all shares have the same prime , false if not . */ private static boolean checkSamePrimes ( List < SecretShare > shares ) { } }
boolean ret = true ; BigInteger prime = null ; for ( SecretShare share : shares ) { if ( prime == null ) { prime = share . getPrime ( ) ; } else if ( ! prime . equals ( share . getPrime ( ) ) ) { ret = false ; break ; } } return ret ;
public class wisite_binding { /** * Use this API to fetch wisite _ binding resources of given names . */ public static wisite_binding [ ] get ( nitro_service service , String sitepath [ ] ) throws Exception { } }
if ( sitepath != null && sitepath . length > 0 ) { wisite_binding response [ ] = new wisite_binding [ sitepath . length ] ; wisite_binding obj [ ] = new wisite_binding [ sitepath . length ] ; for ( int i = 0 ; i < sitepath . length ; i ++ ) { obj [ i ] = new wisite_binding ( ) ; obj [ i ] . set_sitepath ( sitepath [ i ] ) ; response [ i ] = ( wisite_binding ) obj [ i ] . get_resource ( service ) ; } return response ; } return null ;
public class AuditSparqlProcessor { /** * Returns the Audit event type based on fedora event type and properties . * @ param eventType from Fedora * @ param properties associated with the Fedora event * @ return Audit event */ private static Optional < String > getAuditEventType ( final List < String > eventType , final List < String > resourceType ) { } }
// mapping event type / properties to audit event type if ( eventType . contains ( EVENT_NAMESPACE + "ResourceCreation" ) || eventType . contains ( AS_NAMESPACE + "Create" ) ) { if ( resourceType . contains ( REPOSITORY + "Binary" ) ) { return of ( CONTENT_ADD ) ; } else { return of ( OBJECT_ADD ) ; } } else if ( eventType . contains ( EVENT_NAMESPACE + "ResourceDeletion" ) || eventType . contains ( AS_NAMESPACE + "Delete" ) ) { if ( resourceType . contains ( REPOSITORY + "Binary" ) ) { return of ( CONTENT_REM ) ; } else { return of ( OBJECT_REM ) ; } } else if ( eventType . contains ( EVENT_NAMESPACE + "ResourceModification" ) || eventType . contains ( AS_NAMESPACE + "Update" ) ) { if ( resourceType . contains ( REPOSITORY + "Binary" ) ) { return of ( CONTENT_MOD ) ; } else { return of ( METADATA_MOD ) ; } } return empty ( ) ;
public class PropertiesTableModel { /** * { @ inheritDoc } */ @ Override public String getColumnName ( final int columnIndex ) { } }
final PropertiesColumns column = PropertiesColumns . values ( ) [ columnIndex ] ; switch ( column ) { case KEY : return "Key" ; case VALUE : return "Value" ; } return "" ;
public class EuclidianDistance { /** * Get the distance between two data samples . * @ param sample1 * the first sample of < code > double < / code > values * @ param sample2 * the second sample of < code > double < / code > values * @ return the distance between < code > sample1 < / code > and * < code > sample2 < / code > * @ throws IllegalArgumentException * if the two samples contain different amounts of values */ public double getDistance ( double [ ] sample1 , double [ ] sample2 ) throws IllegalArgumentException { } }
int n = sample1 . length ; if ( n != sample2 . length || n < 1 ) throw new IllegalArgumentException ( "Input arrays must have the same length." ) ; double sumOfSquares = 0 ; for ( int i = 0 ; i < n ; i ++ ) { if ( Double . isNaN ( sample1 [ i ] ) || Double . isNaN ( sample2 [ i ] ) ) continue ; sumOfSquares += ( sample1 [ i ] - sample2 [ i ] ) * ( sample1 [ i ] - sample2 [ i ] ) ; } return Math . sqrt ( sumOfSquares ) ;
public class DefaultGroovyMethods { /** * Iterates through the Array calling the given closure condition for each item but stopping once the first non - null * result is found and returning that result . If all results are null , null is returned . * @ param self an Array * @ param condition a closure that returns a non - null value to indicate that processing should stop and the value should be returned * @ return the first non - null result from calling the closure , or null * @ since 2.5.0 */ public static < S , T > T findResult ( S [ ] self , @ ClosureParams ( FirstParam . Component . class ) Closure < T > condition ) { } }
return findResult ( new ArrayIterator < S > ( self ) , condition ) ;
public class CdnManager { /** * 刷新文件外链和目录 , 外链每次不超过100个 , 目录每次不超过10个 * 刷新目录需要额外开通权限 , 可以联系七牛技术支持处理 * 参考文档 : < a href = " http : / / developer . qiniu . com / fusion / api / cache - refresh " > 缓存刷新 < / a > * @ param urls 待刷新文件外链列表 * @ param dirs 待刷新目录列表 * @ return 刷新请求的回复 */ public CdnResult . RefreshResult refreshUrlsAndDirs ( String [ ] urls , String [ ] dirs ) throws QiniuException { } }
// check params if ( urls != null && urls . length > MAX_API_REFRESH_URL_COUNT ) { throw new QiniuException ( new Exception ( "url count exceeds the max refresh limit per request" ) ) ; } if ( dirs != null && dirs . length > MAX_API_REFRESH_DIR_COUNT ) { throw new QiniuException ( new Exception ( "dir count exceeds the max refresh limit per request" ) ) ; } HashMap < String , String [ ] > req = new HashMap < > ( ) ; if ( urls != null ) { req . put ( "urls" , urls ) ; } if ( dirs != null ) { req . put ( "dirs" , dirs ) ; } byte [ ] body = Json . encode ( req ) . getBytes ( Constants . UTF_8 ) ; String url = server + "/v2/tune/refresh" ; StringMap headers = auth . authorizationV2 ( url , "POST" , body , Client . JsonMime ) ; Response response = client . post ( url , body , headers , Client . JsonMime ) ; return response . jsonToObject ( CdnResult . RefreshResult . class ) ;
public class ProxyRepo { /** * { @ inheritDoc } */ public ArtifactStore newInstance ( ) { } }
if ( factoryHelper == null ) { throw new IllegalStateException ( "FactoryHelper has not been set" ) ; } return new ProxyArtifactStore ( factoryHelper . getRepositoryMetadataManager ( ) , factoryHelper . getRemoteArtifactRepositories ( ) , factoryHelper . getRemotePluginRepositories ( ) , factoryHelper . getLocalRepository ( ) , factoryHelper . getArtifactFactory ( ) , factoryHelper . getArtifactResolver ( ) , factoryHelper . getArchetypeManager ( ) , factoryHelper . getLog ( ) ) ;
public class DescribeVpcClassicLinkRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < DescribeVpcClassicLinkRequest > getDryRunRequest ( ) { } }
Request < DescribeVpcClassicLinkRequest > request = new DescribeVpcClassicLinkRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class SpringApplication { /** * Get the bean definition registry . * @ param context the application context * @ return the BeanDefinitionRegistry if it can be determined */ private BeanDefinitionRegistry getBeanDefinitionRegistry ( ApplicationContext context ) { } }
if ( context instanceof BeanDefinitionRegistry ) { return ( BeanDefinitionRegistry ) context ; } if ( context instanceof AbstractApplicationContext ) { return ( BeanDefinitionRegistry ) ( ( AbstractApplicationContext ) context ) . getBeanFactory ( ) ; } throw new IllegalStateException ( "Could not locate BeanDefinitionRegistry" ) ;
public class A_CmsXmlDocument { /** * Corrects the structure of this XML document . < p > * @ param cms the current OpenCms user context * @ return the file that contains the corrected XML structure * @ throws CmsXmlException if something goes wrong */ public CmsFile correctXmlStructure ( CmsObject cms ) throws CmsXmlException { } }
// apply XSD schema translation Attribute schema = m_document . getRootElement ( ) . attribute ( I_CmsXmlSchemaType . XSI_NAMESPACE_ATTRIBUTE_NO_SCHEMA_LOCATION ) ; if ( schema != null ) { String schemaLocation = schema . getValue ( ) ; String translatedSchema = OpenCms . getResourceManager ( ) . getXsdTranslator ( ) . translateResource ( schemaLocation ) ; if ( ! schemaLocation . equals ( translatedSchema ) ) { schema . setValue ( translatedSchema ) ; } } updateLocaleNodeSorting ( ) ; // iterate over all locales Iterator < Locale > i = m_locales . iterator ( ) ; while ( i . hasNext ( ) ) { Locale locale = i . next ( ) ; List < String > names = getNames ( locale ) ; List < I_CmsXmlContentValue > validValues = new ArrayList < I_CmsXmlContentValue > ( ) ; // iterate over all nodes per language Iterator < String > j = names . iterator ( ) ; while ( j . hasNext ( ) ) { // this step is required for values that need a processing of their content // an example for this is the HTML value that does link replacement String name = j . next ( ) ; I_CmsXmlContentValue value = getValue ( name , locale ) ; if ( value . isSimpleType ( ) ) { String content = value . getStringValue ( cms ) ; value . setStringValue ( cms , content ) ; } // save valid elements for later check validValues . add ( value ) ; } if ( isAutoCorrectionEnabled ( ) ) { // full correction of XML if ( validValues . size ( ) < 1 ) { // no valid element was in the content if ( hasLocale ( locale ) ) { // remove the old locale entirely , as there was no valid element removeLocale ( locale ) ; } // add a new default locale , this will also generate the default XML as required addLocale ( cms , locale ) ; } else { // there is at least one valid element in the content List < Element > roots = new ArrayList < Element > ( ) ; List < CmsXmlContentDefinition > rootCds = new ArrayList < CmsXmlContentDefinition > ( ) ; List < Element > validElements = new ArrayList < Element > ( ) ; // gather all XML content definitions and their parent nodes Iterator < I_CmsXmlContentValue > it = validValues . iterator ( ) ; while ( it . hasNext ( ) ) { // collect all root elements , also for the nested content definitions I_CmsXmlContentValue value = it . next ( ) ; Element element = value . getElement ( ) ; validElements . add ( element ) ; if ( element . supportsParent ( ) ) { // get the parent XML node Element root = element . getParent ( ) ; if ( ( root != null ) && ! roots . contains ( root ) ) { // this is a parent node we do not have already in our storage CmsXmlContentDefinition rcd = value . getContentDefinition ( ) ; if ( rcd != null ) { // this value has a valid XML content definition roots . add ( root ) ; rootCds . add ( rcd ) ; } else { // no valid content definition for the XML value throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_CORRECT_NO_CONTENT_DEF_3 , value . getName ( ) , value . getTypeName ( ) , value . getPath ( ) ) ) ; } } } } for ( int le = 0 ; le < roots . size ( ) ; le ++ ) { // iterate all XML content root nodes and correct each XML subtree Element root = roots . get ( le ) ; CmsXmlContentDefinition cd = rootCds . get ( le ) ; // step 1 : first sort the nodes according to the schema , this takes care of re - ordered elements List < List < Element > > nodeLists = new ArrayList < List < Element > > ( ) ; boolean isMultipleChoice = cd . getSequenceType ( ) == CmsXmlContentDefinition . SequenceType . MULTIPLE_CHOICE ; // if it ' s a multiple choice element , the child elements must not be sorted into their types , // but must keep their original order if ( isMultipleChoice ) { List < Element > nodeList = new ArrayList < Element > ( ) ; List < Element > elements = CmsXmlGenericWrapper . elements ( root ) ; Set < String > typeNames = cd . getSchemaTypes ( ) ; for ( Element element : elements ) { // check if the node type is still in the definition if ( typeNames . contains ( element . getName ( ) ) ) { nodeList . add ( element ) ; } } checkMaxOccurs ( nodeList , cd . getChoiceMaxOccurs ( ) , cd . getTypeName ( ) ) ; nodeLists . add ( nodeList ) ; } // if it ' s a sequence , the children are sorted according to the sequence type definition else { for ( I_CmsXmlSchemaType type : cd . getTypeSequence ( ) ) { List < Element > elements = CmsXmlGenericWrapper . elements ( root , type . getName ( ) ) ; checkMaxOccurs ( elements , type . getMaxOccurs ( ) , type . getTypeName ( ) ) ; nodeLists . add ( elements ) ; } } // step 2 : clear the list of nodes ( this will remove all invalid nodes ) List < Element > nodeList = CmsXmlGenericWrapper . elements ( root ) ; nodeList . clear ( ) ; Iterator < List < Element > > in = nodeLists . iterator ( ) ; while ( in . hasNext ( ) ) { // now add all valid nodes in the right order List < Element > elements = in . next ( ) ; nodeList . addAll ( elements ) ; } // step 3 : now append the missing elements according to the XML content definition cd . addDefaultXml ( cms , this , root , locale ) ; } } } initDocument ( ) ; } // write the modified XML back to the VFS file if ( m_file != null ) { // make sure the file object is available m_file . setContents ( marshal ( ) ) ; } return m_file ;
public class LinuxResource { /** * calculate the disk usage at current filesystem * @ return disk usage , from 0.0 ~ 1.0 */ public static Double getDiskUsage ( ) { } }
if ( ! OSInfo . isLinux ( ) && ! OSInfo . isMac ( ) ) { return 0.0 ; } try { String output = SystemOperation . exec ( "df -h " + duHome ) ; if ( output != null ) { String [ ] lines = output . split ( "[\\r\\n]+" ) ; if ( lines . length >= 2 ) { String [ ] parts = lines [ 1 ] . split ( "\\s+" ) ; if ( parts . length >= 5 ) { String pct = parts [ 4 ] ; if ( pct . endsWith ( "%" ) ) { return Integer . valueOf ( pct . substring ( 0 , pct . length ( ) - 1 ) ) / 100.0 ; } } } } } catch ( Exception e ) { LOG . warn ( "failed to get disk usage." ) ; } return 0.0 ;
public class SibRaManagedConnection { /** * Used to indicate that an application local transaction has been rolled * back . Notifies the connection event listeners . * @ param connection * the connection on which the transaction was started */ void localTransactionRolledBack ( final SibRaConnection connection ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . entry ( this , TRACE , "localTransactionRolledBack" , connection ) ; } final ConnectionEvent event = new ConnectionEvent ( this , ConnectionEvent . LOCAL_TRANSACTION_ROLLEDBACK ) ; event . setConnectionHandle ( connection ) ; for ( Iterator iterator = _eventListeners . iterator ( ) ; iterator . hasNext ( ) ; ) { final ConnectionEventListener listener = ( ConnectionEventListener ) iterator . next ( ) ; listener . localTransactionRolledback ( event ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isEntryEnabled ( ) ) { SibTr . exit ( this , TRACE , "localTransactionRolledBack" ) ; }
public class ApiOvhSms { /** * Attempt a new validation after moderation refusal * REST : POST / sms / { serviceName } / templatesControl / { name } / relaunchValidation * @ param description [ required ] Template description * @ param message [ required ] Message pattern to be moderated . Use " # VALUE # " format for dynamic text area * @ param serviceName [ required ] The internal name of your SMS offer * @ param name [ required ] Name of the template */ public void serviceName_templatesControl_name_relaunchValidation_POST ( String serviceName , String name , String description , String message ) throws IOException { } }
String qPath = "/sms/{serviceName}/templatesControl/{name}/relaunchValidation" ; StringBuilder sb = path ( qPath , serviceName , name ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "description" , description ) ; addBody ( o , "message" , message ) ; exec ( qPath , "POST" , sb . toString ( ) , o ) ;
public class Comparing { /** * Evaluates max of two comparable elements . * @ param < T > the element type * @ param lhs the left element * @ param rhs the right element * @ return the greater element */ public static < T extends Comparable < T > > T max ( T lhs , T rhs ) { } }
return BinaryOperator . maxBy ( new ComparableComparator < T > ( ) ) . apply ( lhs , rhs ) ;
public class OfflinerQueryHandler { /** * Retrieve a value saved for offline access . * @ param context context used to retrieve the content resolver . * @ param url key . * @ return retrieved value or null if no entry match the given key . */ public String get ( Context context , String url ) { } }
final Cursor cursor = context . getContentResolver ( ) . query ( getUri ( OfflinerDBHelper . TABLE_CACHE ) , OfflinerDBHelper . PARAMS_CACHE , OfflinerDBHelper . REQUEST_URL + " = '" + url + "'" , null , null ) ; String result = null ; if ( cursor != null ) { if ( cursor . getCount ( ) != 0 ) { cursor . moveToFirst ( ) ; result = cursor . getString ( cursor . getColumnIndex ( OfflinerDBHelper . REQUEST_RESULT ) ) ; } cursor . close ( ) ; } return result ;
public class PropertyChangeSupport { /** * Fires a property change event to listeners * that have been registered to track updates of * all properties or a property with the specified name . * No event is fired if the given event ' s old and new values are equal and non - null . * @ param event the { @ code PropertyChangeEvent } to be fired */ public void firePropertyChange ( PropertyChangeEvent event ) { } }
Object oldValue = event . getOldValue ( ) ; Object newValue = event . getNewValue ( ) ; if ( oldValue == null || newValue == null || ! oldValue . equals ( newValue ) ) { String name = event . getPropertyName ( ) ; PropertyChangeListener [ ] common = this . map . get ( null ) ; PropertyChangeListener [ ] named = ( name != null ) ? this . map . get ( name ) : null ; fire ( common , event ) ; fire ( named , event ) ; }
public class DictionaryMaker { /** * 读取所有条目 * @ param path * @ return */ public static List < Item > loadAsItemList ( String path ) { } }
List < Item > itemList = new LinkedList < Item > ( ) ; try { BufferedReader br = new BufferedReader ( new InputStreamReader ( IOAdapter == null ? new FileInputStream ( path ) : IOAdapter . open ( path ) , "UTF-8" ) ) ; String line ; while ( ( line = br . readLine ( ) ) != null ) { Item item = Item . create ( line ) ; if ( item == null ) { logger . warning ( "使用【" + line + "】创建Item失败" ) ; return null ; // continue ; } itemList . add ( item ) ; } } catch ( Exception e ) { logger . warning ( "读取词典" + path + "发生异常" + e ) ; return null ; } return itemList ;
public class JQMButton { /** * Works in case of this button placed on Header , Popup , . . . */ public void setPosOnBand ( PosOnBand value ) { } }
if ( value == null ) { getElement ( ) . removeClassName ( STYLE_UI_BTN_RIGHT ) ; getElement ( ) . removeClassName ( STYLE_UI_BTN_LEFT ) ; } else { switch ( value ) { case LEFT : getElement ( ) . addClassName ( STYLE_UI_BTN_LEFT ) ; break ; case RIGHT : getElement ( ) . addClassName ( STYLE_UI_BTN_RIGHT ) ; break ; } }
public class CollisionFormulaConfig { /** * Export the current formula data to the formula node . * @ param root The root node ( must not be < code > null < / code > ) . * @ param formula The formula reference ( must not be < code > null < / code > ) . * @ throws LionEngineException If error on writing . */ public static void exports ( Xml root , CollisionFormula formula ) { } }
Check . notNull ( root ) ; Check . notNull ( formula ) ; final Xml node = root . createChild ( NODE_FORMULA ) ; node . writeString ( ATT_NAME , formula . getName ( ) ) ; CollisionRangeConfig . exports ( node , formula . getRange ( ) ) ; CollisionFunctionConfig . exports ( node , formula . getFunction ( ) ) ; CollisionConstraintConfig . exports ( node , formula . getConstraint ( ) ) ;
public class DefaultLibraryLoader { /** * Load classes then perform load native library . */ @ Override public void load ( Callback < Void > callback , Class [ ] loadClasses ) { } }
boolean doNotLoad = false ; for ( Class clazzes : loadClasses ) { try { Class . forName ( clazzes . getName ( ) ) ; } catch ( ClassNotFoundException e ) { callback . onFailure ( e ) ; doNotLoad = true ; } } if ( ! doNotLoad ) { doLoad ( callback ) ; }
public class ElementMatchers { /** * Matches a { @ link NamedElement } for an infix of its name . * @ param infix The expected infix of the name . * @ param < T > The type of the matched object . * @ return An element matcher for a named element ' s name ' s infix . */ public static < T extends NamedElement > ElementMatcher . Junction < T > nameContains ( String infix ) { } }
return new NameMatcher < T > ( new StringMatcher ( infix , StringMatcher . Mode . CONTAINS ) ) ;
public class BaseAttribute { /** * Check whether value is of allowed type and convert to an allowed type if possible . */ public static Object checkValue ( Object value ) { } }
if ( null == value ) return null ; if ( value instanceof Tag ) return ( ( Tag ) value ) . getName ( ) ; // name instead of ordinal to make decoding easier Object result = value ; if ( result instanceof GString ) result = value . toString ( ) ; if ( result instanceof BaseAttribute ) { if ( log . isLoggable ( Level . WARNING ) ) log . warning ( "An Attribute may not itself contain an attribute as a value. Assuming you forgot to call getValue()." ) ; result = checkValue ( ( ( ( BaseAttribute ) value ) . getValue ( ) ) ) ; } boolean ok = false ; for ( Class type : SUPPORTED_VALUE_TYPES ) { if ( type . isAssignableFrom ( result . getClass ( ) ) ) { ok = true ; break ; } } if ( ! ok ) { throw new IllegalArgumentException ( "Attribute values of this type are not allowed: " + result . getClass ( ) . getSimpleName ( ) ) ; } return result ;
public class ForwardCurve { /** * Create a forward curve from given times and given forwards . * @ param name The name of this curve . * @ param referenceDate The reference date for this code , i . e . , the date which defines t = 0. * @ param paymentOffsetCode The maturity of the index modeled by this curve . * @ param paymentBusinessdayCalendar The business day calendar used for adjusting the payment date . * @ param paymentDateRollConvention The date roll convention used for adjusting the payment date . * @ param interpolationMethod The interpolation method used for the curve . * @ param extrapolationMethod The extrapolation method used for the curve . * @ param interpolationEntity The entity interpolated / extrapolated . * @ param interpolationEntityForward Interpolation entity used for forward rate interpolation . * @ param discountCurveName The name of a discount curve associated with this index ( associated with it ' s funding or collateralization ) , if any . * @ param model The model to be used to fetch the discount curve , if needed . * @ param times A vector of given time points . * @ param givenForwards A vector of given forwards ( corresponding to the given time points ) . * @ return A new ForwardCurve object . */ public static ForwardCurve createForwardCurveFromForwards ( String name , LocalDate referenceDate , String paymentOffsetCode , BusinessdayCalendarInterface paymentBusinessdayCalendar , BusinessdayCalendarInterface . DateRollConvention paymentDateRollConvention , InterpolationMethod interpolationMethod , ExtrapolationMethod extrapolationMethod , InterpolationEntity interpolationEntity , InterpolationEntityForward interpolationEntityForward , String discountCurveName , AnalyticModelInterface model , double [ ] times , double [ ] givenForwards ) { } }
ForwardCurve forwardCurve = new ForwardCurve ( name , referenceDate , paymentOffsetCode , paymentBusinessdayCalendar , paymentDateRollConvention , interpolationMethod , extrapolationMethod , interpolationEntity , interpolationEntityForward , discountCurveName ) ; for ( int timeIndex = 0 ; timeIndex < times . length ; timeIndex ++ ) { forwardCurve . addForward ( model , times [ timeIndex ] , givenForwards [ timeIndex ] , false ) ; } return forwardCurve ;
public class Packer { /** * Get the sprite a given location on the current sheet * @ param x The x coordinate to look for the sprite * @ param y The y coordinate to look for the sprite * @ return The sprite found at the given location or null if no sprite can be found */ public Sprite getSpriteAt ( int x , int y ) { } }
for ( int i = 0 ; i < sprites . size ( ) ; i ++ ) { if ( ( ( Sprite ) sprites . get ( i ) ) . contains ( x , y ) ) { return ( ( Sprite ) sprites . get ( i ) ) ; } } return null ;
public class EvaluatorImpl { /** * Make a NumericValue from a standard wrapper subclass of Number */ public static int getType ( Number val ) { } }
if ( val instanceof Integer ) { return INT ; } else if ( val instanceof Long ) { return LONG ; } else if ( val instanceof Short ) { return SHORT ; } else if ( val instanceof Byte ) { return BYTE ; } else if ( val instanceof Double ) { return DOUBLE ; } else if ( val instanceof Float ) { return FLOAT ; } else throw new IllegalArgumentException ( ) ;
public class srecLexer { /** * $ ANTLR start " NEWLINE " */ public final void mNEWLINE ( ) throws RecognitionException { } }
try { int _type = NEWLINE ; int _channel = DEFAULT_TOKEN_CHANNEL ; // / home / victor / srec / core / src / main / antlr / srec . g : 151:2 : ( ( ( ' \ \ r ' ) ? ' \ \ n ' ( ' ' ) * ) + ) // / home / victor / srec / core / src / main / antlr / srec . g : 151:4 : ( ( ' \ \ r ' ) ? ' \ \ n ' ( ' ' ) * ) + { // / home / victor / srec / core / src / main / antlr / srec . g : 151:4 : ( ( ' \ \ r ' ) ? ' \ \ n ' ( ' ' ) * ) + int cnt10 = 0 ; loop10 : do { int alt10 = 2 ; int LA10_0 = input . LA ( 1 ) ; if ( ( LA10_0 == '\n' || LA10_0 == '\r' ) ) { alt10 = 1 ; } switch ( alt10 ) { case 1 : // / home / victor / srec / core / src / main / antlr / srec . g : 151:5 : ( ' \ \ r ' ) ? ' \ \ n ' ( ' ' ) * { // / home / victor / srec / core / src / main / antlr / srec . g : 151:5 : ( ' \ \ r ' ) ? int alt8 = 2 ; int LA8_0 = input . LA ( 1 ) ; if ( ( LA8_0 == '\r' ) ) { alt8 = 1 ; } switch ( alt8 ) { case 1 : // / home / victor / srec / core / src / main / antlr / srec . g : 151:5 : ' \ \ r ' { match ( '\r' ) ; } break ; } match ( '\n' ) ; // / home / victor / srec / core / src / main / antlr / srec . g : 151:16 : ( ' ' ) * loop9 : do { int alt9 = 2 ; int LA9_0 = input . LA ( 1 ) ; if ( ( LA9_0 == ' ' ) ) { alt9 = 1 ; } switch ( alt9 ) { case 1 : // / home / victor / srec / core / src / main / antlr / srec . g : 151:16 : ' ' { match ( ' ' ) ; } break ; default : break loop9 ; } } while ( true ) ; } break ; default : if ( cnt10 >= 1 ) break loop10 ; EarlyExitException eee = new EarlyExitException ( 10 , input ) ; throw eee ; } cnt10 ++ ; } while ( true ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class xen_smx_gsbvpx_image { /** * < pre > * Use this operation to delete smx _ gsb XVA file . * < / pre > */ public static xen_smx_gsbvpx_image delete ( nitro_service client , xen_smx_gsbvpx_image resource ) throws Exception { } }
resource . validate ( "delete" ) ; return ( ( xen_smx_gsbvpx_image [ ] ) resource . delete_resource ( client ) ) [ 0 ] ;
public class SLF4JLoggerImpl { /** * Log an exception ( throwable ) at the ERROR level with an accompanying message . If the exception is null , then this method * calls { @ link org . modeshape . common . logging . Logger # error ( org . modeshape . common . i18n . I18nResource , Object . . . ) } . * @ param t the exception ( throwable ) to log * @ param message the message accompanying the exception * @ param params the parameter values that are to replace the variables in the format string */ @ Override public void error ( Throwable t , I18nResource message , Object ... params ) { } }
if ( ! isErrorEnabled ( ) ) return ; if ( t == null ) { error ( message , params ) ; return ; } if ( message == null ) { logger . error ( null , t ) ; return ; } logger . error ( message . text ( getLoggingLocale ( ) , params ) , t ) ;
public class BaseDataAdaptiveTableLayoutAdapter { /** * Switch 2 columns headers with data * @ param columnIndex column header from * @ param columnToIndex column header to */ void switchTwoColumnHeaders ( int columnIndex , int columnToIndex ) { } }
Object cellData = getColumnHeaders ( ) [ columnToIndex ] ; getColumnHeaders ( ) [ columnToIndex ] = getColumnHeaders ( ) [ columnIndex ] ; getColumnHeaders ( ) [ columnIndex ] = cellData ;
public class PlanAssembler { /** * Add a limit , pushed - down if possible , and return the new root . * @ param root top of the original plan * @ return new plan ' s root node */ private AbstractPlanNode handleSelectLimitOperator ( AbstractPlanNode root ) { } }
// The coordinator ' s top limit graph fragment for a MP plan . // If planning " order by . . . limit " , getNextSelectPlan ( ) // will have already added an order by to the coordinator frag . // This is the only limit node in a SP plan LimitPlanNode topLimit = m_parsedSelect . getLimitNodeTop ( ) ; assert ( topLimit != null ) ; /* * TODO : allow push down limit with distinct ( select distinct C from T limit 5) * , DISTINCT in aggregates and DISTINCT PUSH DOWN with partition column included . */ AbstractPlanNode sendNode = null ; // Whether or not we can push the limit node down boolean canPushDown = ! m_parsedSelect . hasDistinctWithGroupBy ( ) ; if ( canPushDown ) { sendNode = checkLimitPushDownViability ( root ) ; if ( sendNode == null ) { canPushDown = false ; } else { canPushDown = m_parsedSelect . getCanPushdownLimit ( ) ; } } if ( m_parsedSelect . m_mvFixInfo . needed ( ) ) { // Do not push down limit for mv based distributed query . canPushDown = false ; } /* * Push down the limit plan node when possible even if offset is set . If * the plan is for a partitioned table , do the push down . Otherwise , * there is no need to do the push down work , the limit plan node will * be run in the partition . */ if ( canPushDown ) { /* * For partitioned table , the pushed - down limit plan node has a limit based * on the combined limit and offset , which may require an expression if either of these * was not a hard - coded constant and didn ' t get parameterized . * The top level limit plan node remains the same , with the original limit and offset values . */ LimitPlanNode distLimit = m_parsedSelect . getLimitNodeDist ( ) ; // Disconnect the distributed parts of the plan below the SEND node AbstractPlanNode distributedPlan = sendNode . getChild ( 0 ) ; distributedPlan . clearParents ( ) ; sendNode . clearChildren ( ) ; // If the distributed limit must be performed on ordered input , // ensure the order of the data on each partition . if ( m_parsedSelect . hasOrderByColumns ( ) ) { distributedPlan = handleOrderBy ( m_parsedSelect , distributedPlan ) ; } if ( isInlineLimitPlanNodePossible ( distributedPlan ) ) { // Inline the distributed limit . distributedPlan . addInlinePlanNode ( distLimit ) ; sendNode . addAndLinkChild ( distributedPlan ) ; } else { distLimit . addAndLinkChild ( distributedPlan ) ; // Add the distributed work back to the plan sendNode . addAndLinkChild ( distLimit ) ; } } // In future , inline LIMIT for join , Receive // Then we do not need to distinguish the order by node . return inlineLimitOperator ( root , topLimit ) ;
public class GbmMojoModel { /** * Corresponds to ` hex . tree . gbm . GbmMojoModel . score0 ( ) ` */ @ Override public final double [ ] score0 ( double [ ] row , double offset , double [ ] preds ) { } }
super . scoreAllTrees ( row , preds ) ; return unifyPreds ( row , offset , preds ) ;
public class CalendarDay { /** * Get a new instance set to the specified day * @ param date { @ linkplain LocalDate } to pull date information from . Passing null will return null * @ return CalendarDay set to the specified date */ public static CalendarDay from ( @ Nullable LocalDate date ) { } }
if ( date == null ) { return null ; } return new CalendarDay ( date ) ;
public class RegisteredResources { /** * Generates a new XidImpl to represent a new branch of this * transaction . * @ return A new XidImpl representing a new branch of this transaction . */ protected Xid generateNewBranch ( ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "generateNewBranch" ) ; // Create a new Xid branch final XidImpl result = new XidImpl ( _txServiceXid , ++ _branchCount ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "generateNewBranch" , result ) ; return result ;
public class ChannelFrameworkImpl { /** * @ see * com . ibm . wsspi . channelfw . ChannelFramework # updateChannelWeight ( java . lang . * String , int ) */ @ Override public synchronized ChannelData updateChannelWeight ( String channelName , int newWeight ) throws ChannelException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "updateChannelWeight" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "channelName=" + channelName + ", newWeight=" + newWeight ) ; } if ( null == channelName ) { throw new InvalidChannelNameException ( "Input channel name is null" ) ; } if ( newWeight < 0 ) { throw new InvalidWeightException ( "Invalid input weight, " + newWeight ) ; } // Find the existing channel config ChannelDataImpl channelData = ( ChannelDataImpl ) channelDataMap . get ( channelName ) ; if ( null == channelData ) { throw new InvalidChannelNameException ( "Unable to find input channel, " + channelName ) ; } // Set the new weight in the framework . channelData . setDiscriminatorWeight ( newWeight ) ; // Update each running channel that is using this parent channel data // object . updateRunningChannels ( channelData ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "updateChannelWeight" ) ; } return channelData ;
public class Server { /** * Stops the server gracefully . * Do not call in action thread to avoid deadlock ( start a new thread or use { @ link # stopAtShutdown } ) . */ public void stop ( ) { } }
Log . info ( serverName ( ) + " gracefully stopping..." ) ; for ( EventLoopGroup g : eventLoopGroups ) { g . shutdownGracefully ( ) . awaitUninterruptibly ( ) ; } Log . info ( serverName ( ) + " gracefully stopped" ) ;
public class DefaultCosHttpClient { /** * 获得异常发生时的返回信息 */ private String getExceptionMsg ( HttpRequest httpRequest , String exceptionStr ) { } }
String errMsg = new StringBuilder ( "HttpRequest:" ) . append ( httpRequest . toString ( ) ) . append ( "\nException:" ) . append ( exceptionStr ) . toString ( ) ; LOG . error ( errMsg ) ; return errMsg ;
public class StarRegexPathElement { /** * @ param key : String key that needs to be escaped before compiling into regex . * @ return : Metachar escaped key . * Regex has some special meaning for the metachars [ . ^ $ | * + ? ( ) [ { \ ] . If any of these metachars is present in the pattern key that was passed , it needs to be escaped so that * it can be matched against literal . */ private static String escapeMetacharsIfAny ( String key ) { } }
char [ ] keyChars = key . toCharArray ( ) ; // String . replace replaces all instances of the char sequence . So , it would try to escape the occurrence as many times as the occurrence frequency . // For ex : if a key as 2 ' 5star . rating . 1 ' , it would escape it twice resulting in 5star / / . rating / / . 1. // So , we keep an list of already seen characters . Set < Character > charsAlreadySeen = new HashSet < > ( ) ; for ( char keychar : keyChars ) { switch ( keychar ) { case '(' : case '[' : case '{' : case '\\' : case '^' : case '$' : case '|' : case ')' : case '?' : case '+' : case '.' : if ( ! charsAlreadySeen . contains ( keychar ) ) { key = key . replace ( String . valueOf ( keychar ) , "\\" + keychar ) ; charsAlreadySeen . add ( keychar ) ; } break ; default : break ; } } return key ;
public class AddonDependencyEntry { /** * Create a new { @ link AddonDependencyEntry } with the given attributes . */ public static AddonDependencyEntry create ( String name , VersionRange range ) { } }
return create ( name , range , false , false ) ;
public class FrozenProperties { /** * Set a header style property using its name as the key . Please ensure the style name and value * are appropriately configured or it may result in unexpected behavior . * @ param styleName the style name as seen here { @ link Style # STYLE _ Z _ INDEX } for example . * @ param value the string value required for the given style property . */ public FrozenProperties setHeaderStyleProperty ( StyleName styleName , String value ) { } }
headerStyleProps . put ( styleName , value ) ; return this ;
public class Matrices { /** * Creates a minus function that subtracts given { @ code value } from it ' s argument . * @ param arg a value to be subtracted from function ' s argument * @ return a closure that does { @ code _ - _ } */ public static MatrixFunction asMinusFunction ( final double arg ) { } }
return new MatrixFunction ( ) { @ Override public double evaluate ( int i , int j , double value ) { return value - arg ; } } ;
public class ObjectWritable { /** * Retrieve byte [ ] for given name . This should be done only for * class and method names . the return value represents length and * name as a byte array . If the name is not present , cache it , if * the map max capacity is not exceeded . */ private static byte [ ] getByteNameWithCaching ( String entityName ) { } }
byte [ ] name = cachedByteClassNames . get ( entityName ) ; if ( name == null ) { name = prepareCachedNameBytes ( entityName ) ; // if the cache max capacity is not exceeded , cache the name if ( cachedByteClassNames . size ( ) < CACHE_MAX_SIZE ) { cachedByteClassNames . put ( entityName , name ) ; } } // this should never be null return name ;
public class Matrix4x3d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4x3dc # getTransposedFloats ( int , java . nio . ByteBuffer ) */ public ByteBuffer getTransposedFloats ( int index , ByteBuffer buffer ) { } }
MemUtil . INSTANCE . putfTransposed ( this , index , buffer ) ; return buffer ;
public class lbmetrictable_metric_binding { /** * Use this API to count the filtered set of lbmetrictable _ metric _ binding resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static long count_filtered ( nitro_service service , String metrictable , String filter ) throws Exception { } }
lbmetrictable_metric_binding obj = new lbmetrictable_metric_binding ( ) ; obj . set_metrictable ( metrictable ) ; options option = new options ( ) ; option . set_count ( true ) ; option . set_filter ( filter ) ; lbmetrictable_metric_binding [ ] response = ( lbmetrictable_metric_binding [ ] ) obj . getfiltered ( service , option ) ; if ( response != null ) { return response [ 0 ] . __count ; } return 0 ;
public class MessagingSecurityUtility { /** * Create AuthenticationData Object from the UserName and Password passed * @ param userName * @ param password * @ return */ public static AuthenticationData createAuthenticationData ( String userName , String password ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , CLASS_NAME + "createAuthenticationData" , new Object [ ] { userName , "Password Not Traced" } ) ; } AuthenticationData authData = new WSAuthenticationData ( ) ; if ( userName == null ) userName = "" ; if ( password == null ) password = "" ; authData . set ( AuthenticationData . USERNAME , userName ) ; authData . set ( AuthenticationData . PASSWORD , new ProtectedString ( password . toCharArray ( ) ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exit ( tc , CLASS_NAME + "createAuthenticationData" , authData ) ; } return authData ;
public class ByteArrayList { /** * Adds all the elements in the given array to the array list . * @ param values The values to add to the array list . */ public void add ( byte [ ] values ) { } }
ensureCapacity ( size + values . length ) ; for ( byte element : values ) { this . add ( element ) ; }
public class SyncPointInfo { /** * < code > optional string syncPointUri = 1 ; < / code > */ public java . lang . String getSyncPointUri ( ) { } }
java . lang . Object ref = syncPointUri_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; if ( bs . isValidUtf8 ( ) ) { syncPointUri_ = s ; } return s ; }
public class TimestampUtils { /** * Parse a string and return a LocalTime representing its value . * @ param s The ISO formated time string to parse . * @ return null if s is null or a LocalTime of the parsed string s . * @ throws SQLException if there is a problem parsing s . */ public LocalTime toLocalTime ( String s ) throws SQLException { } }
if ( s == null ) { return null ; } if ( s . equals ( "24:00:00" ) ) { return LocalTime . MAX ; } try { return LocalTime . parse ( s ) ; } catch ( DateTimeParseException nfe ) { throw new PSQLException ( GT . tr ( "Bad value for type timestamp/date/time: {1}" , s ) , PSQLState . BAD_DATETIME_FORMAT , nfe ) ; }
public class AddressDivisionGrouping { /** * this does not handle overflow , overflow should be checked before calling this */ protected static < R extends AddressSection , S extends AddressSegment > R increment ( R section , long increment , BigInteger bigIncrement , AddressCreator < ? , R , ? , S > addrCreator , Supplier < R > lowerProducer , Supplier < R > upperProducer , Integer prefixLength ) { } }
if ( ! section . isMultiple ( ) ) { return add ( section , bigIncrement , addrCreator , prefixLength ) ; } boolean isDecrement = increment <= 0 ; if ( isDecrement ) { return add ( lowerProducer . get ( ) , bigIncrement , addrCreator , prefixLength ) ; } BigInteger count = section . getCount ( ) ; BigInteger incrementPlus1 = bigIncrement . add ( BigInteger . ONE ) ; int countCompare = count . compareTo ( incrementPlus1 ) ; if ( countCompare <= 0 ) { if ( countCompare == 0 ) { return upperProducer . get ( ) ; } return add ( upperProducer . get ( ) , incrementPlus1 . subtract ( count ) , addrCreator , prefixLength ) ; } return incrementRange ( section , increment , addrCreator , lowerProducer , prefixLength ) ;
public class CmsFunctionRenderer { /** * Helper method for cached reading of resources under specific , fixed paths . < p > * @ param cms the current CMS context * @ param path the path to read * @ return the resource which has been read */ private static CmsResource getDefaultResource ( CmsObject cms , String path ) { } }
CmsResource resource = ( CmsResource ) CmsVfsMemoryObjectCache . getVfsMemoryObjectCache ( ) . getCachedObject ( cms , path ) ; if ( resource == null ) { try { resource = cms . readResource ( path ) ; CmsVfsMemoryObjectCache . getVfsMemoryObjectCache ( ) . putCachedObject ( cms , path , resource ) ; } catch ( CmsException e ) { LOG . warn ( e . getLocalizedMessage ( ) , e ) ; } } return resource ;
public class SibTr { /** * If debug level tracing is enabled then trace a byte array using formatted * output with offsets . Duplicate output lines are suppressed to save space . * @ param tc the non - null < code > TraceComponent < / code > the event is associated * with . * @ param data the byte array to be traced */ public static void bytes ( TraceComponent tc , byte [ ] data ) { } }
int length = 0 ; if ( data != null ) length = data . length ; bytes ( null , tc , data , 0 , length , "" ) ;
public class OrmReader { /** * COMPLEXITY : ON */ private static < T > T statementToObject ( final PreparedStatement stmt , final T target , final Object ... args ) throws SQLException { } }
populateStatementParameters ( stmt , args ) ; try ( final ResultSet resultSet = stmt . executeQuery ( ) ) { if ( resultSet . next ( ) ) { return resultSetToObject ( resultSet , target ) ; } return null ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } finally { stmt . close ( ) ; }
public class AbstractAmazonDynamoDBAsync { /** * Simplified method form for invoking the UpdateTable operation . * @ see # updateTableAsync ( UpdateTableRequest ) */ @ Override public java . util . concurrent . Future < UpdateTableResult > updateTableAsync ( String tableName , ProvisionedThroughput provisionedThroughput ) { } }
return updateTableAsync ( new UpdateTableRequest ( ) . withTableName ( tableName ) . withProvisionedThroughput ( provisionedThroughput ) ) ;
public class systemglobal_binding { /** * Use this API to fetch a systemglobal _ binding resource . */ public static systemglobal_binding get ( nitro_service service ) throws Exception { } }
systemglobal_binding obj = new systemglobal_binding ( ) ; systemglobal_binding response = ( systemglobal_binding ) obj . get_resource ( service ) ; return response ;
public class GrammarInfo { /** * Extracts the declared package name from the specified grammar file . * @ param grammar * The contents of the grammar file , must not be < code > null < / code > . * @ return The declared package name or an empty string if not found . */ private String findPackageName ( final String grammar ) { } }
final String packageDeclaration = "package\\s+([^\\s.;]+(\\.[^\\s.;]+)*)\\s*;" ; final Matcher matcher = Pattern . compile ( packageDeclaration ) . matcher ( grammar ) ; if ( matcher . find ( ) ) { return matcher . group ( 1 ) ; } return "" ;
public class PatchedBigQueryTableRowIterator { /** * Executes the specified query and returns a reference to the temporary BigQuery table created * to hold the results . * @ throws IOException if the query fails . */ private TableReference executeQueryAndWaitForCompletion ( ) throws IOException , InterruptedException { } }
checkState ( projectId != null , "Unable to execute a query without a configured project id" ) ; checkState ( queryConfig != null , "Unable to execute a query without a configured query" ) ; // Dry run query to get source table location Job dryRunJob = new Job ( ) . setConfiguration ( new JobConfiguration ( ) . setQuery ( queryConfig ) . setDryRun ( true ) ) ; JobStatistics jobStats = executeWithBackOff ( client . jobs ( ) . insert ( projectId , dryRunJob ) , String . format ( "Error when trying to dry run query %s." , queryConfig . toPrettyString ( ) ) ) . getStatistics ( ) ; // Let BigQuery to pick default location if the query does not read any tables . String location = null ; @ Nullable List < TableReference > tables = jobStats . getQuery ( ) . getReferencedTables ( ) ; if ( tables != null && ! tables . isEmpty ( ) ) { Table table = getTable ( tables . get ( 0 ) ) ; location = table . getLocation ( ) ; } // Create a temporary dataset to store results . // Starting dataset name with an " _ " so that it is hidden . Random rnd = new Random ( System . currentTimeMillis ( ) ) ; temporaryDatasetId = "_beam_temporary_dataset_" + rnd . nextInt ( 1000000 ) ; temporaryTableId = "beam_temporary_table_" + rnd . nextInt ( 1000000 ) ; createDataset ( temporaryDatasetId , location ) ; Job job = new Job ( ) ; JobConfiguration config = new JobConfiguration ( ) ; config . setQuery ( queryConfig ) ; job . setConfiguration ( config ) ; TableReference destinationTable = new TableReference ( ) ; destinationTable . setProjectId ( projectId ) ; destinationTable . setDatasetId ( temporaryDatasetId ) ; destinationTable . setTableId ( temporaryTableId ) ; queryConfig . setDestinationTable ( destinationTable ) ; queryConfig . setAllowLargeResults ( true ) ; Job queryJob = executeWithBackOff ( client . jobs ( ) . insert ( projectId , job ) , String . format ( "Error when trying to execute the job for query %s." , queryConfig . toPrettyString ( ) ) ) ; JobReference jobId = queryJob . getJobReference ( ) ; while ( true ) { Job pollJob = executeWithBackOff ( client . jobs ( ) . get ( projectId , jobId . getJobId ( ) ) , String . format ( "Error when trying to get status of the job for query %s." , queryConfig . toPrettyString ( ) ) ) ; JobStatus status = pollJob . getStatus ( ) ; if ( status . getState ( ) . equals ( "DONE" ) ) { // Job is DONE , but did not necessarily succeed . ErrorProto error = status . getErrorResult ( ) ; if ( error == null ) { return pollJob . getConfiguration ( ) . getQuery ( ) . getDestinationTable ( ) ; } else { // There will be no temporary table to delete , so null out the reference . temporaryTableId = null ; throw new IOException ( String . format ( "Executing query %s failed: %s" , queryConfig . toPrettyString ( ) , error . getMessage ( ) ) ) ; } } Uninterruptibles . sleepUninterruptibly ( QUERY_COMPLETION_POLL_TIME . getMillis ( ) , TimeUnit . MILLISECONDS ) ; }
public class TaskManagerTabPanel { /** * Executes the Task */ public void runTask ( ) { } }
MainTask tasks [ ] = new MainTask [ jTableAlgorithms . getModel ( ) . getRowCount ( ) * jTableStreams . getModel ( ) . getRowCount ( ) ] ; int taskCount = 0 ; String dir = "" ; try { this . currentTask = ( MainTask ) ClassOption . cliStringToObject ( this . jTextFieldTask . getText ( ) , MainTask . class , null ) ; } catch ( Exception ex ) { Logger . getLogger ( TaskManagerTabPanel . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; } MainTask auxTask = ( MainTask ) this . currentTask . copy ( ) ; dir += this . resultsPath ; File f = new File ( dir ) ; if ( f . exists ( ) ) { Object [ ] options = { "Yes" , "No" } ; String cancel = "NO" ; int resp = JOptionPane . showOptionDialog ( this , "The selected folder is not empty. This action may overwrite " + "previous experiment results. Do you want to continue?" , "Warning" , JOptionPane . OK_CANCEL_OPTION , JOptionPane . QUESTION_MESSAGE , null , options , cancel ) ; if ( resp == JOptionPane . OK_OPTION ) { ReadFile . deleteDrectory ( f ) ; } else { JOptionPane . showMessageDialog ( this , "Please specify another directory" , "Message" , JOptionPane . INFORMATION_MESSAGE ) ; return ; } } f . mkdir ( ) ; String algNames = "" ; String streamNames = "" ; for ( int i = 0 ; i < jTableAlgorithms . getModel ( ) . getRowCount ( ) ; i ++ ) { String alg = jTableAlgorithms . getModel ( ) . getValueAt ( i , 0 ) . toString ( ) ; String algFile = jTableAlgorithms . getModel ( ) . getValueAt ( i , 1 ) . toString ( ) ; algNames += algFile ; if ( i != jTableAlgorithms . getModel ( ) . getRowCount ( ) - 1 ) { algNames += "," ; } for ( int j = 0 ; j < jTableStreams . getModel ( ) . getRowCount ( ) ; j ++ ) { String stream = jTableStreams . getModel ( ) . getValueAt ( j , 0 ) . toString ( ) ; String streamFile = jTableStreams . getModel ( ) . getValueAt ( j , 1 ) . toString ( ) ; streamNames += streamFile . split ( " " ) [ 0 ] ; if ( j != jTableStreams . getModel ( ) . getRowCount ( ) - 1 ) { streamNames += "," ; } if ( i == 0 ) { String sfile = FilenameUtils . separatorsToSystem ( dir + "\\\\" + streamFile ) ; f = new File ( sfile ) ; f . mkdir ( ) ; } String task = " -l " ; if ( alg . split ( " " ) != null ) { task += "(" + alg + ") -s (" + stream + ")" + " -d (" + dir + File . separator + streamFile . split ( " " ) [ 0 ] + File . separator + algFile + ".txt" + ")" ; } else { task += alg + " -s (" + stream + ")" + " -d (" + dir + File . separator + streamFile . split ( " " ) [ 0 ] + File . separator + algFile + ".txt" + ")" ; } // String task = FilenameUtils . separatorsToSystem ( " - l ( " + alg + " ) - s ( " + stream + " ) " + " - d " + " ( " + dir + " \ \ \ \ " // + streamFile . split ( " " ) [ 0 ] + " \ \ \ \ " + algFile + " . txt " + " ) " ) ; auxTask . getOptions ( ) . setViaCLIString ( task ) ; try { tasks [ taskCount ] = ( MainTask ) auxTask . copy ( ) ; } catch ( Exception ex ) { Logger . getLogger ( TaskManagerTabPanel . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; } taskCount ++ ; } } this . jButtonRun . setEnabled ( false ) ; Buffer buffer = new Buffer ( tasks ) ; int proc = 1 ; if ( ! this . jTextFieldProcess . getText ( ) . equals ( "" ) ) { proc = Integer . parseInt ( this . jTextFieldProcess . getText ( ) ) ; } if ( proc > tasks . length ) { proc = tasks . length ; } for ( int i = 0 ; i < proc ; i ++ ) { ExpTaskThread thread = new ExpTaskThread ( buffer ) ; thread . start ( ) ; this . taskList . add ( 0 , thread ) ; this . taskTableModel . fireTableDataChanged ( ) ; this . taskTable . setRowSelectionInterval ( 0 , 0 ) ; } Thread obs = new Thread ( ) { public void run ( ) { while ( true ) { int count = 0 ; for ( ExpTaskThread thread : TaskManagerTabPanel . this . taskList ) { if ( thread . isCompleted == true ) { count ++ ; // System . out . println ( count ) ; } } if ( count == TaskManagerTabPanel . this . taskList . size ( ) ) { TaskManagerTabPanel . this . summary . readData ( resultsPath ) ; TaskManagerTabPanel . this . plot . readData ( resultsPath ) ; TaskManagerTabPanel . this . analizeTab . readData ( resultsPath ) ; TaskManagerTabPanel . this . jButtonRun . setEnabled ( true ) ; break ; } } } } ; obs . start ( ) ;