signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class NullAwayBenchmarkHarness { /** * Some recommendations for this mode . * < ul > * < li > Disable all other checks but NullAway by passing { @ code - XepDisableAllChecks * - Xep : NullAway : WARN } after other EP options * < li > If you want to just benchmark the baseline without NullAway , only pass { @ code * - XepDisableAllChecks } ( you ' ll have to do this in a different run ) * < / ul > */ private static void justRun ( String [ ] args ) { } }
List < String > javacArgs = new ArrayList < > ( Arrays . asList ( args ) ) ; String nullawayJar = getJarFileForClass ( NullAway . class ) . getFile ( ) ; // add NullAway jar to existing processor path if found boolean foundProcessorPath = false ; for ( int i = 0 ; i < javacArgs . size ( ) ; i ++ ) { if ( javacArgs . get ( i ) . equals ( "-processorpath" ) ) { foundProcessorPath = true ; String procPath = javacArgs . get ( i + 1 ) ; procPath = procPath + System . getProperties ( ) . getProperty ( "path.separator" ) + nullawayJar ; javacArgs . set ( i + 1 , procPath ) ; break ; } } if ( ! foundProcessorPath ) { javacArgs . add ( "-processorpath" ) ; javacArgs . add ( nullawayJar ) ; } System . out . println ( "Running" ) ; runCompile ( javacArgs , 3 , 8 ) ;
public class DPTXlator { /** * Translates the < code > value < / code > according to the set datapoint ID . * If , and only if , < code > value < / code > can successfully be translated , it gets * stored by the translator , replacing any old items . Textual commands contained in * < code > value < / code > are treated case insensitive . < br > * The < code > value < / code > string might have its unit of measure appended ( units are * case sensitive ) . * @ param value value represented as string for translation , case insensitive * @ throws KNXFormatException if < code > value < / code > can ' t be translated due to wrong * formatted content , or if < code > value < / code > doesn ' t fit into KNX data type */ public void setValue ( String value ) throws KNXFormatException { } }
final short [ ] buf = new short [ typeSize > 0 ? typeSize : 1 ] ; toDPT ( value , buf , 0 ) ; data = buf ;
public class VirtualNetworkGatewayConnectionsInner { /** * The Put VirtualNetworkGatewayConnectionSharedKey operation sets the virtual network gateway connection shared key for passed virtual network gateway connection in the specified resource group through Network resource provider . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayConnectionName The virtual network gateway connection name . * @ param parameters Parameters supplied to the Begin Set Virtual Network Gateway connection Shared key operation throughNetwork resource provider . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ConnectionSharedKeyInner object if successful . */ public ConnectionSharedKeyInner beginSetSharedKey ( String resourceGroupName , String virtualNetworkGatewayConnectionName , ConnectionSharedKeyInner parameters ) { } }
return beginSetSharedKeyWithServiceResponseAsync ( resourceGroupName , virtualNetworkGatewayConnectionName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Engine { /** * Reset the local idle timers , now that we have received some data . * If we have set an idle timeout the client must send some data at least that often , * we double the timeout before checking . */ private void resetReceiveIdleTimer ( Event event ) { } }
final String methodName = "resetReceiveIdleTimer" ; logger . entry ( this , methodName , event ) ; if ( receiveScheduledFuture != null ) { receiveScheduledFuture . cancel ( false ) ; } final Transport transport = event . getTransport ( ) ; if ( transport != null ) { final int localIdleTimeOut = transport . getIdleTimeout ( ) ; if ( localIdleTimeOut > 0 ) { Runnable receiveTimeout = new Runnable ( ) { @ Override public void run ( ) { final String methodName = "run" ; logger . entry ( this , methodName ) ; transport . process ( ) ; transport . tick ( System . currentTimeMillis ( ) ) ; logger . exit ( methodName ) ; } } ; receiveScheduledFuture = scheduler . schedule ( receiveTimeout , localIdleTimeOut , TimeUnit . MILLISECONDS ) ; } } logger . exit ( this , methodName ) ;
public class AUTH { /** * Handles a GMS header * @ param gms _ hdr * @ param msg * @ return true if the message should be passed up , or else false */ protected boolean handleAuthHeader ( GMS . GmsHeader gms_hdr , AuthHeader auth_hdr , Message msg ) { } }
if ( needsAuthentication ( gms_hdr ) ) { if ( this . auth_token . authenticate ( auth_hdr . getToken ( ) , msg ) ) return true ; // authentication passed , send message up the stack else { log . warn ( "%s: failed to validate AuthHeader (token: %s) from %s; dropping message and sending " + "rejection message" , local_addr , auth_token . getClass ( ) . getSimpleName ( ) , msg . src ( ) ) ; sendRejectionMessage ( gms_hdr . getType ( ) , msg . getSrc ( ) , "authentication failed" ) ; return false ; } } return true ;
public class DomainUtil { /** * Based on the current request represented by the HttpExchange construct a complete URL for the supplied path . * @ param exchange - The current HttpExchange * @ param path - The path to include in the constructed URL * @ return The constructed URL */ public static String constructUrl ( final HttpServerExchange exchange , final String path ) { } }
final HeaderMap headers = exchange . getRequestHeaders ( ) ; String host = headers . getFirst ( HOST ) ; String protocol = exchange . getConnection ( ) . getSslSessionInfo ( ) != null ? "https" : "http" ; return protocol + "://" + host + path ;
public class HelpDoclet { /** * Doclet implementations ( subclasses ) should return a GSONWorkUnit - derived object if the GSON objects * for the DocumentedFeature needs to contain custom values . * @ return a GSONWorkUnit - derived object */ protected GSONWorkUnit createGSONWorkUnit ( final DocWorkUnit workUnit , final List < Map < String , String > > indexByGroupMaps , final List < Map < String , String > > featureMaps ) { } }
return new GSONWorkUnit ( ) ;
public class MinMaxBinaryArrayDoubleEndedHeap { /** * Given a node at a maximum level , find its child or grandchild with the * maximum key . This method should not be called for a node which has no * children . * @ param k * a node at a maximum level * @ return the child or grandchild with a maximum key , or undefined if there * are no children */ @ SuppressWarnings ( "unchecked" ) private int maxChildOrGrandchild ( int k ) { } }
int gc = 4 * k ; int maxgc ; K gcValue ; // 4 grandchilden if ( gc + 3 <= size ) { gcValue = array [ gc ] ; maxgc = gc ; if ( ( ( Comparable < ? super K > ) array [ ++ gc ] ) . compareTo ( gcValue ) > 0 ) { gcValue = array [ gc ] ; maxgc = gc ; } if ( ( ( Comparable < ? super K > ) array [ ++ gc ] ) . compareTo ( gcValue ) > 0 ) { gcValue = array [ gc ] ; maxgc = gc ; } if ( ( ( Comparable < ? super K > ) array [ ++ gc ] ) . compareTo ( gcValue ) > 0 ) { maxgc = gc ; } return maxgc ; } // less or equal to 3 switch ( size - gc ) { case 2 : // 3 grandchildren , two children gcValue = array [ gc ] ; maxgc = gc ; if ( ( ( Comparable < ? super K > ) array [ ++ gc ] ) . compareTo ( gcValue ) > 0 ) { gcValue = array [ gc ] ; maxgc = gc ; } if ( ( ( Comparable < ? super K > ) array [ ++ gc ] ) . compareTo ( gcValue ) > 0 ) { maxgc = gc ; } return maxgc ; case 1 : // 2 grandchildren , maybe two children gcValue = array [ gc ] ; maxgc = gc ; if ( ( ( Comparable < ? super K > ) array [ ++ gc ] ) . compareTo ( gcValue ) > 0 ) { gcValue = array [ gc ] ; maxgc = gc ; } if ( 2 * k + 1 <= size && ( ( Comparable < ? super K > ) array [ 2 * k + 1 ] ) . compareTo ( gcValue ) > 0 ) { maxgc = 2 * k + 1 ; } return maxgc ; case 0 : // 1 grandchild , maybe two children gcValue = array [ gc ] ; maxgc = gc ; if ( 2 * k + 1 <= size && ( ( Comparable < ? super K > ) array [ 2 * k + 1 ] ) . compareTo ( gcValue ) > 0 ) { maxgc = 2 * k + 1 ; } return maxgc ; } // 0 grandchildren maxgc = 2 * k ; gcValue = array [ maxgc ] ; if ( 2 * k + 1 <= size && ( ( Comparable < ? super K > ) array [ 2 * k + 1 ] ) . compareTo ( gcValue ) > 0 ) { maxgc = 2 * k + 1 ; } return maxgc ;
public class SeleniumProxyHandler { protected HttpTunnel newHttpTunnel ( HttpRequest request , HttpResponse response , InetAddress iaddr , int port , int timeoutMS ) throws IOException { } }
try { Socket socket = new Socket ( iaddr , port ) ; socket . setSoTimeout ( timeoutMS ) ; socket . setTcpNoDelay ( true ) ; return new HttpTunnel ( socket , null , null ) ; } catch ( IOException e ) { log . log ( Level . FINE , "Exception thrown" , e ) ; response . sendError ( HttpResponse . __400_Bad_Request ) ; return null ; }
public class Utils { /** * Throws a IllegalArgumentException if given String is empty * @ param str the parameter to check * @ param message the parameter to check * @ return the provided parameter */ public static String checkNotEmpty ( String str , String message ) { } }
if ( checkNotNull ( str ) . isEmpty ( ) ) { throw new IllegalArgumentException ( message ) ; } return str ;
public class ERELoading { /** * = = = = Fillers and transforming them to APF entity / value / time = = = = */ private EREFiller toFiller ( final Element xml , final String docid ) { } }
final String id = generateID ( XMLUtils . requiredAttribute ( xml , "id" ) , docid ) ; final String type = XMLUtils . requiredAttribute ( xml , "type" ) ; final int extentStart = XMLUtils . requiredIntegerAttribute ( xml , "offset" ) ; final int extentEnd = extentStart + XMLUtils . requiredIntegerAttribute ( xml , "length" ) - 1 ; final String text = xml . getTextContent ( ) ; final ERESpan span = ERESpan . from ( extentStart , extentEnd , text ) ; final EREFiller ereFiller ; if ( xml . hasAttribute ( NORMALIZED_TIME_ATTR ) ) { ereFiller = EREFiller . fromTime ( id , type , xml . getAttribute ( NORMALIZED_TIME_ATTR ) , span ) ; } else { ereFiller = EREFiller . from ( id , type , span ) ; } idMap . put ( id , ereFiller ) ; return ereFiller ;
public class SqlREPL { /** * メッセージの表示 * @ throws IOException IO例外 */ private void showMessage ( final Terminal terminal , final String path ) { } }
String messageFilePath = this . getClass ( ) . getPackage ( ) . getName ( ) . replace ( "." , "/" ) + path ; try ( BufferedReader reader = new BufferedReader ( new InputStreamReader ( Thread . currentThread ( ) . getContextClassLoader ( ) . getResourceAsStream ( messageFilePath ) , Charset . forName ( "UTF-8" ) ) ) ) { reader . lines ( ) . forEach ( s -> { try { terminal . writer ( ) . println ( s ) ; } catch ( Exception ex ) { // ここで例外が出てもメッセージ表示が正しく出ないだけなので 、 エラーを握りつぶす } } ) ; } catch ( IOException ex ) { // ここで例外が出てもメッセージ表示が正しく出ないだけなので 、 エラーを握りつぶす } terminal . flush ( ) ;
public class PelopsClient { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . client . ClientBase # onPersist ( com . impetus . kundera . metadata * . model . EntityMetadata , java . lang . Object , java . lang . Object , * java . util . List ) */ @ Override protected void onPersist ( EntityMetadata metadata , Object entity , Object id , List < RelationHolder > rlHolders ) { } }
if ( ! isOpen ( ) ) { throw new PersistenceException ( "PelopsClient is closed." ) ; } // check for counter column if ( isUpdate && metadata . isCounterColumnType ( ) ) { throw new UnsupportedOperationException ( "Invalid operation! Merge is not possible over counter column." ) ; } String insert_Query = null ; if ( isCql3Enabled ( metadata ) ) { Cassandra . Client client = getRawClient ( metadata . getSchema ( ) ) ; try { cqlClient . persist ( metadata , entity , client , rlHolders , getTtlValues ( ) . get ( metadata . getTableName ( ) ) ) ; } catch ( InvalidRequestException e ) { log . error ( "Error during persist while executing query {}, Caused by: ." , insert_Query , e ) ; throw new KunderaException ( e ) ; } catch ( TException e ) { log . error ( "Error during persist while executing query {}, Caused by: ." , insert_Query , e ) ; throw new KunderaException ( e ) ; } catch ( UnsupportedEncodingException e ) { log . error ( "Error during persist while executing query {}, Caused by: ." , insert_Query , e ) ; throw new KunderaException ( e ) ; } } else { Collection < ThriftRow > tfRows = null ; try { String columnFamily = metadata . getTableName ( ) ; tfRows = dataHandler . toThriftRow ( entity , id , metadata , columnFamily , getTtlValues ( ) . get ( columnFamily ) ) ; } catch ( Exception e ) { log . error ( "Error during persist, Caused by: ." , e ) ; throw new KunderaException ( e ) ; } for ( ThriftRow tf : tfRows ) { if ( tf . getColumnFamilyName ( ) . equals ( metadata . getTableName ( ) ) ) { addRelationsToThriftRow ( metadata , tf , rlHolders ) ; } Mutator mutator = clientFactory . getMutator ( pool ) ; if ( metadata . isCounterColumnType ( ) ) { if ( log . isInfoEnabled ( ) ) { log . info ( "Persisting counter column family record for row key {}" , tf . getId ( ) ) ; } List < CounterColumn > thriftCounterColumns = tf . getCounterColumns ( ) ; List < CounterSuperColumn > thriftCounterSuperColumns = tf . getCounterSuperColumns ( ) ; if ( thriftCounterColumns != null && ! thriftCounterColumns . isEmpty ( ) ) { mutator . writeCounterColumns ( tf . getColumnFamilyName ( ) , Bytes . fromByteBuffer ( CassandraUtilities . toBytes ( tf . getId ( ) , tf . getId ( ) . getClass ( ) ) ) , Arrays . asList ( tf . getCounterColumns ( ) . toArray ( new CounterColumn [ 0 ] ) ) ) ; } if ( thriftCounterSuperColumns != null && ! thriftCounterSuperColumns . isEmpty ( ) ) { for ( CounterSuperColumn sc : thriftCounterSuperColumns ) { mutator . writeSubCounterColumns ( tf . getColumnFamilyName ( ) , Bytes . fromByteBuffer ( CassandraUtilities . toBytes ( tf . getId ( ) , tf . getId ( ) . getClass ( ) ) ) , Bytes . fromByteArray ( sc . getName ( ) ) , sc . getColumns ( ) ) ; } } } else { List < Column > thriftColumns = tf . getColumns ( ) ; List < SuperColumn > thriftSuperColumns = tf . getSuperColumns ( ) ; if ( thriftColumns != null && ! thriftColumns . isEmpty ( ) ) { // Bytes . from mutator . writeColumns ( tf . getColumnFamilyName ( ) , Bytes . fromByteBuffer ( CassandraUtilities . toBytes ( tf . getId ( ) , tf . getId ( ) . getClass ( ) ) ) , tf . getColumns ( ) ) ; } if ( thriftSuperColumns != null && ! thriftSuperColumns . isEmpty ( ) ) { for ( SuperColumn sc : thriftSuperColumns ) { if ( log . isInfoEnabled ( ) ) { log . info ( "Persisting super column family record for row key {}" , tf . getId ( ) ) ; } mutator . writeSubColumns ( tf . getColumnFamilyName ( ) , Bytes . fromByteBuffer ( CassandraUtilities . toBytes ( tf . getId ( ) , tf . getId ( ) . getClass ( ) ) ) , Bytes . fromByteArray ( sc . getName ( ) ) , sc . getColumns ( ) ) ; } } } mutator . execute ( getConsistencyLevel ( ) ) ; } tfRows = null ; if ( isTtlPerRequest ( ) ) { getTtlValues ( ) . clear ( ) ; } }
public class ColorDrawable { /** * Sets the drawable ' s color value . This action will clobber the results of prior calls to * { @ link # setAlpha ( int ) } on this object , which side - affected the underlying color . * @ param color The color to draw . */ public void setColor ( int color ) { } }
if ( mState . mBaseColor != color || mState . mUseColor != color ) { invalidateSelf ( ) ; mState . mBaseColor = mState . mUseColor = color ; }
public class CmsLinkValidationInternalTable { /** * Reloads the table . < p > */ void reload ( ) { } }
List < CmsResource > broken = m_linkValidator . failedResources ( m_resourcesToCheck ) ; if ( broken . size ( ) > 0 ) { setVisible ( true ) ; m_introComponent . setVisible ( false ) ; m_nullComponent . setVisible ( false ) ; } else { setVisible ( false ) ; m_introComponent . setVisible ( false ) ; m_nullComponent . setVisible ( true ) ; } fillTable ( getRootCms ( ) , broken ) ;
public class IndustryApi { /** * List character industry jobs ( asynchronously ) List industry jobs placed * by a character - - - This route is cached for up to 300 seconds SSO Scope : * esi - industry . read _ character _ jobs . v1 * @ param characterId * An EVE character ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param includeCompleted * Whether to retrieve completed character industry jobs . Only * includes jobs from the past 90 days ( optional ) * @ param token * Access token to use if unable to set a header ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getCharactersCharacterIdIndustryJobsAsync ( Integer characterId , String datasource , String ifNoneMatch , Boolean includeCompleted , String token , final ApiCallback < List < CharacterIndustryJobsResponse > > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getCharactersCharacterIdIndustryJobsValidateBeforeCall ( characterId , datasource , ifNoneMatch , includeCompleted , token , callback ) ; Type localVarReturnType = new TypeToken < List < CharacterIndustryJobsResponse > > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class Waiter { /** * Waits for a text to be shown . * @ param text the text that needs to be shown , specified as a regular expression . * @ param expectedMinimumNumberOfMatches the minimum number of matches of text that must be shown . { @ code 0 } means any number of matches * @ param timeout the amount of time in milliseconds to wait * @ param scroll { @ code true } if scrolling should be performed * @ param onlyVisible { @ code true } if only visible text views should be waited for * @ param hardStoppage { @ code true } if search is to be stopped when timeout expires * @ return { @ code true } if text is found and { @ code false } if it is not found before the timeout */ public TextView waitForText ( String text , int expectedMinimumNumberOfMatches , long timeout , boolean scroll , boolean onlyVisible , boolean hardStoppage ) { } }
return waitForText ( TextView . class , text , expectedMinimumNumberOfMatches , timeout , scroll , onlyVisible , hardStoppage ) ;
public class DRConsumerDrIdTracker { /** * Merge the given tracker with the current tracker . Ranges can * overlap . After the merge , the current tracker will be truncated to the * larger safe point . * @ param tracker */ public void mergeTracker ( DRConsumerDrIdTracker tracker ) { } }
final long newSafePoint = Math . max ( tracker . getSafePointDrId ( ) , getSafePointDrId ( ) ) ; m_map . addAll ( tracker . m_map ) ; truncate ( newSafePoint ) ; m_lastSpUniqueId = Math . max ( m_lastSpUniqueId , tracker . m_lastSpUniqueId ) ; m_lastMpUniqueId = Math . max ( m_lastMpUniqueId , tracker . m_lastMpUniqueId ) ;
public class SetTimeBasedAutoScalingRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SetTimeBasedAutoScalingRequest setTimeBasedAutoScalingRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( setTimeBasedAutoScalingRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( setTimeBasedAutoScalingRequest . getInstanceId ( ) , INSTANCEID_BINDING ) ; protocolMarshaller . marshall ( setTimeBasedAutoScalingRequest . getAutoScalingSchedule ( ) , AUTOSCALINGSCHEDULE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FNPImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case AfplibPackage . FNP__RG : getRg ( ) . clear ( ) ; getRg ( ) . addAll ( ( Collection < ? extends FNPRG > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class HttpResponseMessageImpl { /** * Clear this message for re - use . */ @ Override public void clear ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Clearing this response: " + this ) ; } super . clear ( ) ; this . myStatusCode = StatusCodes . OK ; this . myReason = null ; this . myReasonBytes = null ;
public class ValueEnforcer { /** * Check if * < code > nValue & ge ; nLowerBoundInclusive & amp ; & amp ; nValue & le ; nUpperBoundInclusive < / code > * @ param aValue * Value * @ param sName * Name * @ param aLowerBoundInclusive * Lower bound * @ param aUpperBoundInclusive * Upper bound * @ return The value */ public static BigInteger isBetweenInclusive ( final BigInteger aValue , final String sName , @ Nonnull final BigInteger aLowerBoundInclusive , @ Nonnull final BigInteger aUpperBoundInclusive ) { } }
if ( isEnabled ( ) ) return isBetweenInclusive ( aValue , ( ) -> sName , aLowerBoundInclusive , aUpperBoundInclusive ) ; return aValue ;
public class MESubscription { /** * Returns the value of the topic . * @ return The topic . */ final String getTopic ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getTopic" ) ; SibTr . exit ( tc , "getTopic" , _topic ) ; } return _topic ;
public class ComponentFinder { /** * Gets the page if the request handler is instance of IPageRequestHandler . * @ param requestHandler * The { @ link IRequestHandler } to get the page . * @ return The page or null if not found . */ public static Page getPage ( final IRequestHandler requestHandler ) { } }
if ( requestHandler instanceof IPageRequestHandler ) { final IPageRequestHandler pageRequestHandler = ( IPageRequestHandler ) requestHandler ; return ( Page ) pageRequestHandler . getPage ( ) ; } return null ;
public class SyncRecordMessageFilterHandler { /** * On select , synchronize the records . */ public int doRecordChange ( FieldInfo field , int iChangeType , boolean bDisplayOption ) { } }
// Read a valid record int iErrorCode = super . doRecordChange ( field , iChangeType , bDisplayOption ) ; // Initialize the record if ( iErrorCode == DBConstants . NORMAL_RETURN ) { if ( ( iChangeType == DBConstants . ADD_TYPE ) || ( iChangeType == DBConstants . UPDATE_TYPE ) || ( iChangeType == DBConstants . DELETE_TYPE ) ) this . updateListener ( null ) ; // Stop listening if ( iErrorCode == DBConstants . AFTER_REQUERY_TYPE ) // ? | | ( iErrorCode = = DBConstants . SELECT _ TYPE ) ) { } } return iErrorCode ;
public class Util { /** * Remove the hyphens from the beginning of < code > str < / code > and * return the new String . * @ param str The string from which the hyphens should be removed . * @ return the new String . */ static String stripLeadingHyphens ( String str ) { } }
if ( str == null ) { return null ; } if ( str . startsWith ( "--" ) ) { return str . substring ( 2 , str . length ( ) ) ; } else if ( str . startsWith ( "-" ) ) { return str . substring ( 1 , str . length ( ) ) ; } return str ;
public class Util { /** * To stream stream . * @ param < T > the type parameter * @ param iterator the iterator * @ param size the size * @ param parallel the parallel * @ return the stream */ public static < T > Stream < T > toStream ( @ javax . annotation . Nonnull final Iterator < T > iterator , final int size , final boolean parallel ) { } }
return StreamSupport . stream ( Spliterators . spliterator ( iterator , size , Spliterator . ORDERED ) , parallel ) ;
public class PatternKit { /** * Verify that the suffix is a picture format . * @ param suffix filename suffix * @ return verify that success returns true , and the failure returns false . */ public static boolean isImage ( String suffix ) { } }
if ( null != suffix && ! "" . equals ( suffix ) && suffix . contains ( "." ) ) { String regex = "(.*?)(?i)(jpg|jpeg|png|gif|bmp|webp)" ; return isMatch ( regex , suffix ) ; } return false ;
public class ParameterizedTextureType { /** * Gets the value of the genericApplicationPropertyOfParameterizedTexture property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the genericApplicationPropertyOfParameterizedTexture property . * For example , to add a new item , do as follows : * < pre > * get _ GenericApplicationPropertyOfParameterizedTexture ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */ public List < JAXBElement < Object > > get_GenericApplicationPropertyOfParameterizedTexture ( ) { } }
if ( _GenericApplicationPropertyOfParameterizedTexture == null ) { _GenericApplicationPropertyOfParameterizedTexture = new ArrayList < JAXBElement < Object > > ( ) ; } return this . _GenericApplicationPropertyOfParameterizedTexture ;
public class HttpResponseStatus { /** * Parses the specified HTTP status line into a { @ link HttpResponseStatus } . The expected formats of the line are : * < ul > * < li > { @ code statusCode } ( e . g . 200 ) < / li > * < li > { @ code statusCode } { @ code reasonPhrase } ( e . g . 404 Not Found ) < / li > * < / ul > * @ throws IllegalArgumentException if the specified status line is malformed */ public static HttpResponseStatus parseLine ( CharSequence line ) { } }
return ( line instanceof AsciiString ) ? parseLine ( ( AsciiString ) line ) : parseLine ( line . toString ( ) ) ;
public class CmsVfsTab { /** * Collects the structure ids belonging to open tree entries . < p > * @ return the structure ids for the open tree entries */ Set < CmsUUID > getOpenElementIds ( ) { } }
Set < CmsUUID > ids = new HashSet < CmsUUID > ( ) ; for ( CmsLazyTreeItem item : m_treeItems ) { CmsVfsEntryBean entry = item . getData ( ) ; if ( item . isOpen ( ) ) { ids . add ( entry . getStructureId ( ) ) ; } } return ids ;
public class OneDReader { /** * We ' re going to examine rows from the middle outward , searching alternately above and below the * middle , and farther out each time . rowStep is the number of rows between each successive * attempt above and below the middle . So we ' d scan row middle , then middle - rowStep , then * middle + rowStep , then middle - ( 2 * rowStep ) , etc . * rowStep is bigger as the image is taller , but is always at least 1 . We ' ve somewhat arbitrarily * decided that moving up and down by about 1/16 of the image is pretty good ; we try more of the * image if " trying harder " . * @ param image The image to decode * @ param hints Any hints that were requested * @ return The contents of the decoded barcode * @ throws NotFoundException Any spontaneous errors which occur */ private Result doDecode ( BinaryBitmap image , Map < DecodeHintType , ? > hints ) throws NotFoundException { } }
int width = image . getWidth ( ) ; int height = image . getHeight ( ) ; BitArray row = new BitArray ( width ) ; boolean tryHarder = hints != null && hints . containsKey ( DecodeHintType . TRY_HARDER ) ; int rowStep = Math . max ( 1 , height >> ( tryHarder ? 8 : 5 ) ) ; int maxLines ; if ( tryHarder ) { maxLines = height ; // Look at the whole image , not just the center } else { maxLines = 15 ; // 15 rows spaced 1/32 apart is roughly the middle half of the image } int middle = height / 2 ; for ( int x = 0 ; x < maxLines ; x ++ ) { // Scanning from the middle out . Determine which row we ' re looking at next : int rowStepsAboveOrBelow = ( x + 1 ) / 2 ; boolean isAbove = ( x & 0x01 ) == 0 ; // i . e . is x even ? int rowNumber = middle + rowStep * ( isAbove ? rowStepsAboveOrBelow : - rowStepsAboveOrBelow ) ; if ( rowNumber < 0 || rowNumber >= height ) { // Oops , if we run off the top or bottom , stop break ; } // Estimate black point for this row and load it : try { row = image . getBlackRow ( rowNumber , row ) ; } catch ( NotFoundException ignored ) { continue ; } // While we have the image data in a BitArray , it ' s fairly cheap to reverse it in place to // handle decoding upside down barcodes . for ( int attempt = 0 ; attempt < 2 ; attempt ++ ) { if ( attempt == 1 ) { // trying again ? row . reverse ( ) ; // reverse the row and continue // This means we will only ever draw result points * once * in the life of this method // since we want to avoid drawing the wrong points after flipping the row , and , // don ' t want to clutter with noise from every single row scan - - just the scans // that start on the center line . if ( hints != null && hints . containsKey ( DecodeHintType . NEED_RESULT_POINT_CALLBACK ) ) { Map < DecodeHintType , Object > newHints = new EnumMap < > ( DecodeHintType . class ) ; newHints . putAll ( hints ) ; newHints . remove ( DecodeHintType . NEED_RESULT_POINT_CALLBACK ) ; hints = newHints ; } } try { // Look for a barcode Result result = decodeRow ( rowNumber , row , hints ) ; // We found our barcode if ( attempt == 1 ) { // But it was upside down , so note that result . putMetadata ( ResultMetadataType . ORIENTATION , 180 ) ; // And remember to flip the result points horizontally . ResultPoint [ ] points = result . getResultPoints ( ) ; if ( points != null ) { points [ 0 ] = new ResultPoint ( width - points [ 0 ] . getX ( ) - 1 , points [ 0 ] . getY ( ) ) ; points [ 1 ] = new ResultPoint ( width - points [ 1 ] . getX ( ) - 1 , points [ 1 ] . getY ( ) ) ; } } return result ; } catch ( ReaderException re ) { // continue - - just couldn ' t decode this row } } } throw NotFoundException . getNotFoundInstance ( ) ;
public class OffsetTime { /** * Returns a copy of this { @ code OffsetTime } with the specified offset ensuring * that the result has the same local time . * This method returns an object with the same { @ code LocalTime } and the specified { @ code ZoneOffset } . * No calculation is needed or performed . * For example , if this time represents { @ code 10:30 + 02:00 } and the offset specified is * { @ code + 03:00 } , then this method will return { @ code 10:30 + 03:00 } . * To take into account the difference between the offsets , and adjust the time fields , * use { @ link # withOffsetSameInstant } . * This instance is immutable and unaffected by this method call . * @ param offset the zone offset to change to , not null * @ return an { @ code OffsetTime } based on this time with the requested offset , not null */ public OffsetTime withOffsetSameLocal ( ZoneOffset offset ) { } }
return offset != null && offset . equals ( this . offset ) ? this : new OffsetTime ( time , offset ) ;
public class ConsistentColor { /** * Convert an angle in the CbCr plane to values cb , cr in the YCbCr color space . * @ see < a href = " https : / / xmpp . org / extensions / xep - 0392 . html # algorithm - cbcr " > § 5.3 : CbCr generation < / a > * @ param angle angel in CbCr plane . * @ return value pair cb , cr */ private static double [ ] angleToCbCr ( double angle ) { } }
double cb = Math . cos ( angle ) ; double cr = Math . sin ( angle ) ; double acb = Math . abs ( cb ) ; double acr = Math . abs ( cr ) ; double factor ; if ( acr > acb ) { factor = 0.5 / acr ; } else { factor = 0.5 / acb ; } cb *= factor ; cr *= factor ; return new double [ ] { cb , cr } ;
public class RecordSet { /** * Get the number of items already received from the server . * @ return Nsumber of received items */ public int getNumberAvailable ( ) { } }
int result = 0 ; for ( int i = 0 ; i < data . size ( ) ; i ++ ) { if ( data . get ( i ) != null ) { result += 1 ; } } return result ;
public class Value { /** * One of ICE , HDFS , S3 , GCS , NFS or TCP , according to where this Value is persisted . * @ return Short String of the persitance name */ public static String nameOfPersist ( int x ) { } }
switch ( x ) { case ICE : return "ICE" ; case HDFS : return "HDFS" ; case S3 : return "S3" ; case NFS : return "NFS" ; case TCP : return "TCP" ; case GCS : return "GCS" ; default : return null ; }
public class GitRepositoryState { /** * = $ { git . build . version } */ public static GitRepositoryState get ( ) { } }
if ( gitRepositoryState == null ) { Properties properties = new Properties ( ) ; try { properties . load ( GitRepositoryState . class . getClassLoader ( ) . getResourceAsStream ( "git.properties" ) ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } gitRepositoryState = new GitRepositoryState ( properties ) ; } return gitRepositoryState ;
public class Annotation { /** * Compares the { @ code # start _ time } of this annotation to the given note * @ return 1 if the local start time is greater , - 1 if it ' s less or 0 if * equal */ @ Override public int compareTo ( Annotation note ) { } }
return start_time > note . start_time ? 1 : start_time < note . start_time ? - 1 : 0 ;
public class HttpDispatcherChannel { /** * @ see com . ibm . wsspi . channelfw . Channel # init ( ) */ @ Override @ Trivial public void init ( ) throws ChannelException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Init channel: " + this ) ; }
public class Context { /** * initialize Locale ( fr , en ) . */ private void initializeLocale ( ) { } }
final String locale = getProperty ( LOCALE , applicationProperties ) ; if ( locale != null && ! "" . equals ( locale ) ) { final String [ ] localeParts = locale . split ( "_" ) ; if ( localeParts . length == 2 ) { currentLocale = new Locale ( localeParts [ 0 ] , localeParts [ 1 ] ) ; } else { currentLocale = new Locale ( localeParts [ 0 ] ) ; } } else { currentLocale = Locale . getDefault ( ) ; } logger . info ( Messages . getMessage ( CONTEXT_LOCALE_USED ) , currentLocale ) ;
public class SocketHubReceiver { /** * Fire connectors . * @ param isReconnect true if reconnect . */ private synchronized void fireConnector ( final boolean isReconnect ) { } }
if ( active && connector == null ) { getLogger ( ) . debug ( "Starting a new connector thread." ) ; connector = new Connector ( isReconnect ) ; connector . setDaemon ( true ) ; connector . setPriority ( Thread . MIN_PRIORITY ) ; connector . start ( ) ; }
public class EmailIntentSender { /** * Creates the message subject * @ param context a context * @ return the message subject */ @ NonNull protected String buildSubject ( @ NonNull Context context ) { } }
final String subject = mailConfig . subject ( ) ; if ( subject != null ) { return subject ; } return context . getPackageName ( ) + " Crash Report" ;
public class ImmutableMultitable { /** * Guaranteed to throw an exception and leave the table unmodified . * @ throws UnsupportedOperationException always * @ deprecated Unsupported operation . */ @ Deprecated @ Override public boolean remove ( @ Nullable final Object rowKey , @ Nullable final Object columnKey , final Object value ) { } }
throw new UnsupportedOperationException ( ) ;
public class EJSHome { /** * Gets the set of wrappers for binding this home to naming . */ public final HomeWrapperSet getWrapperSet ( ) // d648522 , d739542 throws CSIException , RemoteException { } }
EJSWrapperCommon w = getWrapper ( ) ; Remote remote = beanMetaData . homeRemoteImplClass == null ? null : w . getRemoteWrapper ( ) ; Object local = beanMetaData . homeLocalImplClass == null ? null : w . getLocalObject ( ) ; return new HomeBindingInfo ( remote , local ) ;
public class BaseNDArrayFactory { /** * Random normal using the current time stamp * as the seed * @ param rows the number of rows in the matrix * @ param columns the number of columns in the matrix * @ return */ @ Override public INDArray randn ( long rows , long columns ) { } }
return randn ( new long [ ] { rows , columns } , System . currentTimeMillis ( ) ) ;
public class Publisher { /** * Sends a notification for a feed located at " topic " . The feed MUST contain rel = " hub " . * @ param topic URL for the feed * @ param feed The feed itself * @ throws NotificationException Any failure */ public void sendUpdateNotification ( final String topic , final SyndFeed feed ) throws NotificationException { } }
for ( final SyndLink link : feed . getLinks ( ) ) { if ( "hub" . equals ( link . getRel ( ) ) ) { sendUpdateNotification ( link . getRel ( ) , topic ) ; return ; } } throw new NotificationException ( "Hub link not found." ) ;
public class ParseDateTag { /** * Evaluates expressions as necessary */ private void evaluateExpressions ( ) throws JspException { } }
/* * Note : we don ' t check for type mismatches here ; we assume * the expression evaluator will return the expected type * ( by virtue of knowledge we give it about what that type is ) . * A ClassCastException here is truly unexpected , so we let it * propagate up . */ // ' value ' attribute if ( value_ != null ) { value = ( String ) ExpressionEvaluatorManager . evaluate ( "value" , value_ , String . class , this , pageContext ) ; } // ' type ' attribute if ( type_ != null ) { type = ( String ) ExpressionEvaluatorManager . evaluate ( "type" , type_ , String . class , this , pageContext ) ; } // ' dateStyle ' attribute if ( dateStyle_ != null ) { dateStyle = ( String ) ExpressionEvaluatorManager . evaluate ( "dateStyle" , dateStyle_ , String . class , this , pageContext ) ; } // ' timeStyle ' attribute if ( timeStyle_ != null ) { timeStyle = ( String ) ExpressionEvaluatorManager . evaluate ( "timeStyle" , timeStyle_ , String . class , this , pageContext ) ; } // ' pattern ' attribute if ( pattern_ != null ) { pattern = ( String ) ExpressionEvaluatorManager . evaluate ( "pattern" , pattern_ , String . class , this , pageContext ) ; } // ' timeZone ' attribute if ( timeZone_ != null ) { timeZone = ExpressionEvaluatorManager . evaluate ( "timeZone" , timeZone_ , Object . class , this , pageContext ) ; } // ' parseLocale ' attribute if ( parseLocale_ != null ) { Object obj = ExpressionEvaluatorManager . evaluate ( "parseLocale" , parseLocale_ , Object . class , this , pageContext ) ; Locale locale = LocaleUtil . parseLocaleAttributeValue ( obj ) ; if ( locale != null ) { this . parseLocale = locale ; } }
public class MessageMD5ChecksumHandler { /** * Throw an exception if the MD5 checksums included in the ReceiveMessageResult do not match the * client - side calculation on the received messages . */ private static void receiveMessageResultMd5Check ( ReceiveMessageResult receiveMessageResult ) { } }
if ( receiveMessageResult . getMessages ( ) != null ) { for ( Message messageReceived : receiveMessageResult . getMessages ( ) ) { String messageBody = messageReceived . getBody ( ) ; String bodyMd5Returned = messageReceived . getMD5OfBody ( ) ; String clientSideBodyMd5 = calculateMessageBodyMd5 ( messageBody ) ; if ( ! clientSideBodyMd5 . equals ( bodyMd5Returned ) ) { throw new AmazonClientException ( String . format ( MD5_MISMATCH_ERROR_MESSAGE , MESSAGE_BODY , clientSideBodyMd5 , bodyMd5Returned ) ) ; } Map < String , MessageAttributeValue > messageAttr = messageReceived . getMessageAttributes ( ) ; if ( messageAttr != null && ! messageAttr . isEmpty ( ) ) { String attrMd5Returned = messageReceived . getMD5OfMessageAttributes ( ) ; String clientSideAttrMd5 = calculateMessageAttributesMd5 ( messageAttr ) ; if ( ! clientSideAttrMd5 . equals ( attrMd5Returned ) ) { throw new AmazonClientException ( String . format ( MD5_MISMATCH_ERROR_MESSAGE , MESSAGE_ATTRIBUTES , clientSideAttrMd5 , attrMd5Returned ) ) ; } } } }
public class ConsistencyCheckerController { /** * Returns the end date given the start date end the period . The end date is * startDate + period , but only if endDate is at least one period ago . That * is , we always leave the last incomplete period unprocessed . * Override this to control the job generation algorithm */ public Date getEndDate ( Date startDate , long period ) { } }
long now = getNow ( ) . getTime ( ) ; long endDate = startDate . getTime ( ) + period ; if ( now - period > endDate ) { return new Date ( endDate ) ; } else { return null ; }
public class CPDAvailabilityEstimatePersistenceImpl { /** * Returns an ordered range of all the cpd availability estimates where commerceAvailabilityEstimateId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPDAvailabilityEstimateModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param commerceAvailabilityEstimateId the commerce availability estimate ID * @ param start the lower bound of the range of cpd availability estimates * @ param end the upper bound of the range of cpd availability estimates ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching cpd availability estimates */ @ Override public List < CPDAvailabilityEstimate > findByCommerceAvailabilityEstimateId ( long commerceAvailabilityEstimateId , int start , int end , OrderByComparator < CPDAvailabilityEstimate > orderByComparator ) { } }
return findByCommerceAvailabilityEstimateId ( commerceAvailabilityEstimateId , start , end , orderByComparator , true ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link SingleOperationRefType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link SingleOperationRefType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "usesSingleOperation" ) public JAXBElement < SingleOperationRefType > createUsesSingleOperation ( SingleOperationRefType value ) { } }
return new JAXBElement < SingleOperationRefType > ( _UsesSingleOperation_QNAME , SingleOperationRefType . class , null , value ) ;
public class StorageProviderBase { /** * Sets the properties of this space . Note that this method is intentionally * not exposed to users , as it is not meant to be used for user properties , * but only for system - level properties . The names and values need to be * kept short , and the overall number of properties needs to be tightly * limited , or there will be issues due to provider - specific limitation . * This method allows for Space Access control details to be updated at the same * time as space properties . * @ param spaceId * @ param spaceProperties * @ param spaceACLs */ public void setNewSpaceProperties ( String spaceId , Map < String , String > spaceProperties , Map < String , AclType > spaceACLs ) { } }
// Add ACLs to the properties list spaceProperties . putAll ( packACLs ( spaceACLs ) ) ; boolean success = false ; int maxLoops = 6 ; for ( int loops = 0 ; ! success && loops < maxLoops ; loops ++ ) { try { doSetSpaceProperties ( spaceId , spaceProperties ) ; success = true ; } catch ( NotFoundException e ) { success = false ; } } if ( ! success ) { throw new StorageException ( "Properties for space " + spaceId + " could not be created. " + "The space cannot be found." ) ; }
public class EjbProcessApplication { /** * determine the ee application name based on information obtained from JNDI . */ protected String lookupEeApplicationName ( ) { } }
try { InitialContext initialContext = new InitialContext ( ) ; String appName = ( String ) initialContext . lookup ( JAVA_APP_APP_NAME_PATH ) ; String moduleName = ( String ) initialContext . lookup ( MODULE_NAME_PATH ) ; // make sure that if an EAR carries multiple PAs , they are correctly // identified by appName + moduleName if ( moduleName != null && ! moduleName . equals ( appName ) ) { return appName + "/" + moduleName ; } else { return appName ; } } catch ( NamingException e ) { throw LOG . ejbPaCannotAutodetectName ( e ) ; }
public class StorageUtil { /** * store loaded data to xml file * @ param doc * @ param file * @ throws IOException */ public void store ( Document doc , File file ) throws IOException { } }
store ( doc , ResourceUtil . toResource ( file ) ) ;
public class IonTokenReader { /** * Scans a timestamp after reading < code > yyyy - < / code > . * We can be a little lenient here since the result will be reparsed and * validated more thoroughly by { @ link Timestamp # valueOf ( CharSequence ) } . * @ param c the last character scanned ; must be < code > ' - ' < / code > . * @ return { @ link Type # constTime } */ Type scanTimestamp ( int c ) throws IOException { } }
endofdate : for ( ; ; ) { // fake for loop to create a label we can jump out of , // because 4 or 5 levels of nested if ' s is just ugly // at this point we will have read leading digits and exactly 1 dash // in other words , we ' ll have read the year if ( c == 'T' ) { // yearT is a valid timestamp value value . append ( ( char ) c ) ; c = this . read ( ) ; // because we ' ll unread it before we return break endofdate ; } if ( c != '-' ) { // not a dash or a T after the year - so this is a bad value throw new IllegalStateException ( "invalid timestamp, expecting a dash here at " + this . position ( ) ) ; } // append the dash and then read the month field value . append ( ( char ) c ) ; // so append it , because we haven ' t already c = readDigits ( 2 , "month" ) ; if ( c == 'T' ) { // year - monthT is a valid timestamp value value . append ( ( char ) c ) ; c = this . read ( ) ; // because we ' ll unread it before we return break endofdate ; } if ( c != '-' ) { // if the month isn ' t followed by a dash or a T it ' s an invalid month throw new IonException ( "invalid timestamp, expecting month at " + this . position ( ) ) ; } // append the dash and read the day ( or day - of - month ) field value . append ( ( char ) c ) ; c = readDigits ( 2 , "day of month" ) ; if ( c == 'T' ) { check4timezone : for ( ; ; ) { // another fake label / for = goto // attach the ' T ' to the value we ' re collecting value . append ( ( char ) c ) ; // we ' re going to " watch " how many digits we read in the hours // field . It ' s 0 that ' s actually ok , since we can end at the // ' T ' we just read int length_before_reading_hours = value . length ( ) ; // so read the hours c = readDigits ( 2 , "hours" ) ; if ( length_before_reading_hours == value . length ( ) ) { // FIXME I don ' t think there should be a timezone here break check4timezone ; } if ( c != ':' ) { throw new IonException ( "invalid timestamp, expecting hours at " + this . position ( ) ) ; } value . append ( ( char ) c ) ; // so read the minutes c = readDigits ( 2 , "minutes" ) ; if ( c != ':' ) { if ( c == '-' || c == '+' || c == 'Z' ) { break check4timezone ; } break endofdate ; } value . append ( ( char ) c ) ; // so read the seconds c = readDigits ( 2 , "seconds" ) ; if ( c != '.' ) { if ( c == '-' || c == '+' || c == 'Z' ) { break check4timezone ; } break endofdate ; } value . append ( ( char ) c ) ; // so read the fractional seconds c = readDigits ( 32 , "fractional seconds" ) ; break check4timezone ; } // check4timezone // now check to see if it ' s a timezone offset we ' re looking at if ( c == '-' || c == '+' ) { value . append ( ( char ) c ) ; // so read the timezone offset c = readDigits ( 2 , "timezone offset" ) ; if ( c != ':' ) break endofdate ; value . append ( ( char ) c ) ; c = readDigits ( 2 , "timezone offset" ) ; } else if ( c == 'Z' ) { value . append ( ( char ) c ) ; c = this . read ( ) ; // because we ' ll unread it before we return } } break endofdate ; } // endofdate checkAndUnreadNumericStopper ( c ) ; return Type . constTime ;
public class FastSet { /** * { @ inheritDoc } */ @ Override public boolean containsAny ( IntSet c ) { } }
if ( c == null || c . isEmpty ( ) || c == this ) { return true ; } if ( isEmpty ( ) ) { return false ; } final FastSet other = convert ( c ) ; final int [ ] localWords = words ; // faster final int [ ] localOtherWords = other . words ; // faster for ( int i = Math . min ( firstEmptyWord , other . firstEmptyWord ) - 1 ; i >= 0 ; i -- ) { if ( ( localWords [ i ] & localOtherWords [ i ] ) != 0 ) { return true ; } } return false ;
public class LineColumnReader { /** * Skips characters . * @ param toSkip the number of characters to skip * @ return The number of characters actually skipped */ @ Override public long skip ( long toSkip ) throws IOException { } }
for ( long i = 0 ; i < toSkip ; i ++ ) { int intRead = read ( ) ; if ( intRead == - 1 ) return i ; } return toSkip ;
public class AbstractBlockBasedDataStore { /** * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . utils . store . ChainedDataStore # retrieve ( int ) */ @ Override public final Object retrieve ( int handle ) throws DataStoreException { } }
if ( SAFE_MODE ) checkHandle ( handle ) ; // Compute total size int totalSize = computeSize ( handle ) ; // Retrieve all blocks byte [ ] data = new byte [ totalSize ] ; int offset = 0 ; int current = handle ; while ( current != - 1 ) { int blockLen = allocatedSize [ current ] ; // Read block from map file readDataBlock ( data , offset , blockLen , current ) ; offset += blockLen ; if ( ( flags [ current ] & FLAG_END_BLOCK ) > 0 ) break ; current = nextBlock [ current ] ; } if ( current == - 1 ) throw new DataStoreException ( "Can't find end block for " + handle ) ; return data ;
public class Collectors { /** * Use occurrences to save the count of largest objects if { @ code areAllSmallestSame = true } ( e . g . { @ code Number / String / . . . } ) and return a list by repeat the smallest object { @ code n } times . * @ param atMostSize * @ param areAllSmallestSame * @ return * @ see Collectors # maxAll ( Comparator , int , boolean ) */ @ SuppressWarnings ( "rawtypes" ) public static < T extends Comparable > Collector < T , ? , List < T > > minAll ( final int atMostSize , final boolean areAllSmallestSame ) { } }
return minAll ( Fn . naturalOrder ( ) , atMostSize , areAllSmallestSame ) ;
public class MDAGNode { /** * 获取一个字符串路径上经过的节点 < br > * Retrieves the nodes in the _ transition path starting * from this node corresponding to a given String . * @ param str a String corresponding to a _ transition path starting from this node * @ return a Stack of MDAGNodes containing the nodes in the _ transition path * denoted by { @ code str } , in the order they are encountered in during transitioning */ public Stack < MDAGNode > getTransitionPathNodes ( String str ) { } }
Stack < MDAGNode > nodeStack = new Stack < MDAGNode > ( ) ; MDAGNode currentNode = this ; int numberOfChars = str . length ( ) ; // Iteratively _ transition through the MDAG using the chars in str , // putting each encountered node in nodeStack for ( int i = 0 ; i < numberOfChars && currentNode != null ; i ++ ) { currentNode = currentNode . transition ( str . charAt ( i ) ) ; nodeStack . add ( currentNode ) ; } return nodeStack ;
public class Dependencies { /** * Find an entry in the allowed list by package name . * @ param packageName * Name to find . * @ return Package or < code > null < / code > if no entry with the given name was found . */ public final Package < DependsOn > findAllowedByName ( final String packageName ) { } }
final List < Package < DependsOn > > list = getAllowed ( ) ; for ( final Package < DependsOn > pkg : list ) { if ( pkg . getName ( ) . equals ( packageName ) ) { return pkg ; } } return null ;
public class BootStrapper { /** * if initialtoken was specified , use that ( split on comma ) . * otherwise , if num _ tokens = = 1 , pick a token to assume half the load of the most - loaded node . * else choose num _ tokens tokens at random */ public static Collection < Token > getBootstrapTokens ( final TokenMetadata metadata ) throws ConfigurationException { } }
Collection < String > initialTokens = DatabaseDescriptor . getInitialTokens ( ) ; // if user specified tokens , use those if ( initialTokens . size ( ) > 0 ) { logger . debug ( "tokens manually specified as {}" , initialTokens ) ; List < Token > tokens = new ArrayList < Token > ( initialTokens . size ( ) ) ; for ( String tokenString : initialTokens ) { Token token = StorageService . getPartitioner ( ) . getTokenFactory ( ) . fromString ( tokenString ) ; if ( metadata . getEndpoint ( token ) != null ) throw new ConfigurationException ( "Bootstrapping to existing token " + tokenString + " is not allowed (decommission/removenode the old node first)." ) ; tokens . add ( token ) ; } return tokens ; } int numTokens = DatabaseDescriptor . getNumTokens ( ) ; if ( numTokens < 1 ) throw new ConfigurationException ( "num_tokens must be >= 1" ) ; if ( numTokens == 1 ) logger . warn ( "Picking random token for a single vnode. You should probably add more vnodes; failing that, you should probably specify the token manually" ) ; return getRandomTokens ( metadata , numTokens ) ;
public class SnapToLineEdge { /** * Fits a line defined by the two points . When fitting the line the weight of the edge is used to determine . * how influential the point is . Multiple calls might be required to get a perfect fit . * @ param a Start of line * @ param b End of line . . * @ param found ( output ) Fitted line to the edge * @ return true if successful or false if it failed */ public boolean refine ( Point2D_F64 a , Point2D_F64 b , LineGeneral2D_F64 found ) { } }
// determine the local coordinate system center . x = ( a . x + b . x ) / 2.0 ; center . y = ( a . y + b . y ) / 2.0 ; localScale = a . distance ( center ) ; // define the line which points are going to be sampled along double slopeX = ( b . x - a . x ) ; double slopeY = ( b . y - a . y ) ; double r = Math . sqrt ( slopeX * slopeX + slopeY * slopeY ) ; // tangent of unit length that radial sample samples are going to be along // Two choices for tangent here . Select the one which points to the " right " of the line , // which is inside of the edge double tanX = slopeY / r ; double tanY = - slopeX / r ; // set up inputs into line fitting computePointsAndWeights ( slopeX , slopeY , a . x , a . y , tanX , tanY ) ; if ( samplePts . size ( ) >= 4 ) { // fit line and convert into generalized format if ( null == FitLine_F64 . polar ( samplePts . toList ( ) , weights . data , polar ) ) { throw new RuntimeException ( "All weights were zero, bug some place" ) ; } UtilLine2D_F64 . convert ( polar , found ) ; // Convert line from local to global coordinates localToGlobal ( found ) ; return true ; } else { return false ; }
public class Restarter { /** * Restart the running application . * @ param failureHandler a failure handler to deal with application that doesn ' t start */ public void restart ( FailureHandler failureHandler ) { } }
if ( ! this . enabled ) { this . logger . debug ( "Application restart is disabled" ) ; return ; } this . logger . debug ( "Restarting application" ) ; getLeakSafeThread ( ) . call ( ( ) -> { Restarter . this . stop ( ) ; Restarter . this . start ( failureHandler ) ; return null ; } ) ;
public class CmsXmlUtils { /** * Returns the last Xpath element from the provided path , * without the index value . < p > * Examples : < br > * < code > title < / code > is left untouched < br > * < code > title [ 1 ] < / code > becomes < code > title < / code > < br > * < code > title / subtitle < / code > becomes < code > subtitle < / code > < br > * < code > title [ 1 ] / subtitle [ 1 ] < / code > becomes < code > subtitle < / code > < p > * @ param path the path to get the last Xpath element from * @ return the last Xpath element from the provided path */ public static String getLastXpathElement ( String path ) { } }
int pos = path . lastIndexOf ( '/' ) ; if ( pos >= 0 ) { path = path . substring ( pos + 1 ) ; } return CmsXmlUtils . removeXpathIndex ( path ) ;
public class JKJdbcUtil { /** * Ping . * @ param driver the driver * @ param url the url * @ param user the user * @ param password the password */ public static void ping ( String driver , String url , String user , String password ) { } }
try { Class . forName ( driver ) ; Connection connection = DriverManager . getConnection ( url , user , password ) ; connection . close ( ) ; } catch ( Exception e ) { JKExceptionUtil . handle ( e ) ; }
public class MutableSymbolTable { /** * Remap a set of oldIndex - > newIndex such that this whole thing is done as one operation and you dont have * to worry about the ordering to consider to be sure you dont lose any symbols * Each indexPair is < oldId , newId > * @ param listOfOldToNew */ public void remapAll ( List < IndexPair > listOfOldToNew ) { } }
List < String > symbols = Lists . newArrayListWithCapacity ( listOfOldToNew . size ( ) ) ; int max = - 1 ; for ( int i = 0 ; i < listOfOldToNew . size ( ) ; i ++ ) { IndexPair indexPair = listOfOldToNew . get ( i ) ; symbols . add ( invert ( ) . keyForId ( indexPair . getLeft ( ) ) ) ; max = Math . max ( max , indexPair . getRight ( ) ) ; } if ( max >= nextId ) { nextId = max + 1 ; } // now actually remap them for ( int i = 0 ; i < listOfOldToNew . size ( ) ; i ++ ) { IndexPair pair = listOfOldToNew . get ( i ) ; String symbol = symbols . get ( i ) ; idToSymbol . remove ( pair . getLeft ( ) ) ; symbolToId . remove ( symbol ) ; idToSymbol . put ( pair . getRight ( ) , symbol ) ; symbolToId . put ( symbol , pair . getRight ( ) ) ; }
public class SlotManager { /** * Unregisters the task manager identified by the given instance id and its associated slots * from the slot manager . * @ param instanceId identifying the task manager to unregister * @ return True if there existed a registered task manager with the given instance id */ public boolean unregisterTaskManager ( InstanceID instanceId ) { } }
checkInit ( ) ; LOG . debug ( "Unregister TaskManager {} from the SlotManager." , instanceId ) ; TaskManagerRegistration taskManagerRegistration = taskManagerRegistrations . remove ( instanceId ) ; if ( null != taskManagerRegistration ) { internalUnregisterTaskManager ( taskManagerRegistration ) ; return true ; } else { LOG . debug ( "There is no task manager registered with instance ID {}. Ignoring this message." , instanceId ) ; return false ; }
public class MathUtil { /** * Return the squared distance between the given points . */ public static int distanceSq ( int x0 , int y0 , int x1 , int y1 ) { } }
return ( ( x1 - x0 ) * ( x1 - x0 ) ) + ( ( y1 - y0 ) * ( y1 - y0 ) ) ;
public class AbstractKQueueChannel { /** * Returns an off - heap copy of the specified { @ link ByteBuf } , and releases the specified holder . * The caller must ensure that the holder releases the original { @ link ByteBuf } when the holder is released by * this method . */ protected final ByteBuf newDirectBuffer ( Object holder , ByteBuf buf ) { } }
final int readableBytes = buf . readableBytes ( ) ; if ( readableBytes == 0 ) { ReferenceCountUtil . release ( holder ) ; return Unpooled . EMPTY_BUFFER ; } final ByteBufAllocator alloc = alloc ( ) ; if ( alloc . isDirectBufferPooled ( ) ) { return newDirectBuffer0 ( holder , buf , alloc , readableBytes ) ; } final ByteBuf directBuf = ByteBufUtil . threadLocalDirectBuffer ( ) ; if ( directBuf == null ) { return newDirectBuffer0 ( holder , buf , alloc , readableBytes ) ; } directBuf . writeBytes ( buf , buf . readerIndex ( ) , readableBytes ) ; ReferenceCountUtil . safeRelease ( holder ) ; return directBuf ;
public class AndroidSentryClientFactory { /** * Check whether the application has been granted a certain permission . * @ param permission Permission as a string * @ return true if permissions is granted */ private boolean checkPermission ( String permission ) { } }
int res = ctx . checkCallingOrSelfPermission ( permission ) ; return ( res == PackageManager . PERMISSION_GRANTED ) ;
public class ServiceQuery { /** * Returns the total number of inner landscape points in the ad group bid landscape page . */ private int getTotalLandscapePointsInPage ( AdGroupBidLandscapePage page ) { } }
int totalLandscapePointsInPage = 0 ; for ( AdGroupBidLandscape adGroupBidLandscape : page . getEntries ( ) ) { totalLandscapePointsInPage += adGroupBidLandscape . getLandscapePoints ( ) . size ( ) ; } return totalLandscapePointsInPage ;
public class ExecutionStartedEventDetailsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ExecutionStartedEventDetails executionStartedEventDetails , ProtocolMarshaller protocolMarshaller ) { } }
if ( executionStartedEventDetails == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( executionStartedEventDetails . getInput ( ) , INPUT_BINDING ) ; protocolMarshaller . marshall ( executionStartedEventDetails . getRoleArn ( ) , ROLEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CommerceAddressPersistenceImpl { /** * Returns the last commerce address in the ordered set where commerceRegionId = & # 63 ; . * @ param commerceRegionId the commerce region ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce address * @ throws NoSuchAddressException if a matching commerce address could not be found */ @ Override public CommerceAddress findByCommerceRegionId_Last ( long commerceRegionId , OrderByComparator < CommerceAddress > orderByComparator ) throws NoSuchAddressException { } }
CommerceAddress commerceAddress = fetchByCommerceRegionId_Last ( commerceRegionId , orderByComparator ) ; if ( commerceAddress != null ) { return commerceAddress ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commerceRegionId=" ) ; msg . append ( commerceRegionId ) ; msg . append ( "}" ) ; throw new NoSuchAddressException ( msg . toString ( ) ) ;
public class KeysInner { /** * Retrieve the automation keys for an account . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the KeyListResultInner object if successful . */ public KeyListResultInner listByAutomationAccount ( String resourceGroupName , String automationAccountName ) { } }
return listByAutomationAccountWithServiceResponseAsync ( resourceGroupName , automationAccountName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ParallelOracleBuilders { /** * Creates a { @ link StaticParallelOracleBuilder } using the provided collection of membership oracles . The resulting * parallel oracle will always use a { @ link PoolPolicy # FIXED } pool policy and spawn a separate thread for each of * the provided oracles ( so that the oracles do not need to care about synchronization if they don ' t share state ) . * @ param oracles * the oracle instances to distribute the queries to * @ param < I > * input symbol type * @ param < D > * output domain type * @ return the preconfigured oracle builder */ @ Nonnull public static < I , D > StaticParallelOracleBuilder < I , D > newStaticParallelOracle ( Collection < ? extends MembershipOracle < I , D > > oracles ) { } }
return new StaticParallelOracleBuilder < > ( oracles ) ;
public class CmsRestoreDeletedDialog { /** * Submits the dialog . < p > */ void submit ( ) { } }
List < CmsUUID > selectedIds = getSelectedIds ( ) ; List < CmsUUID > updated = Lists . newArrayList ( ) ; CmsObject cms = m_dialogContext . getCms ( ) ; try { for ( CmsUUID selectedId : selectedIds ) { cms . restoreDeletedResource ( selectedId ) ; updated . add ( selectedId ) ; } m_dialogContext . finish ( updated ) ; } catch ( CmsException e ) { m_dialogContext . error ( e ) ; }
public class Sets { /** * This method returns true if set A is a superset of set B * i . e . it answers the question if A contains all items from B * @ param setA set A * @ param setB set B * @ param < T > type * @ return { @ code boolean } true if A is a superset of B */ public static < T > boolean isSuperset ( Set < T > setA , Set < T > setB ) { } }
return setA . containsAll ( setB ) ;
public class DefaultCookie { /** * Validate a cookie attribute value , throws a { @ link IllegalArgumentException } otherwise . * Only intended to be used by { @ link io . netty . handler . codec . http . DefaultCookie } . * @ param name attribute name * @ param value attribute value * @ return the trimmed , validated attribute value * @ deprecated CookieUtil is package private , will be removed once old Cookie API is dropped */ @ Deprecated protected String validateValue ( String name , String value ) { } }
return validateAttributeValue ( name , value ) ;
public class FilePickerFragment { /** * This the method that gets notified when permission is granted / denied . By default , * a granted request will result in a refresh of the list . * @ param requestCode the code you requested * @ param permissions array of permissions you requested . empty if process was cancelled . * @ param grantResults results for requests . empty if process was cancelled . */ @ Override public void onRequestPermissionsResult ( int requestCode , @ NonNull String [ ] permissions , @ NonNull int [ ] grantResults ) { } }
// If arrays are empty , then process was cancelled if ( permissions . length == 0 ) { // Treat this as a cancel press if ( mListener != null ) { mListener . onCancelled ( ) ; } } else { // if ( requestCode = = PERMISSIONS _ REQUEST _ WRITE _ EXTERNAL _ STORAGE ) { if ( PackageManager . PERMISSION_GRANTED == grantResults [ 0 ] ) { // Do refresh if ( mRequestedPath != null ) { refresh ( mRequestedPath ) ; } } else { Toast . makeText ( getContext ( ) , R . string . nnf_permission_external_write_denied , Toast . LENGTH_SHORT ) . show ( ) ; // Treat this as a cancel press if ( mListener != null ) { mListener . onCancelled ( ) ; } } }
import java . util . * ; public class Main { /** * This function calculates the difference between the maximum and minimum value in an array . * @ param array : list of integers * @ return An integer representing the difference between the largest and smallest value in the given list . * Examples : * > > > compute _ difference ( [ 1 , 2 , 3 , 4 ] ) * > > > compute _ difference ( [ 4 , 5 , 12 ] ) * > > > compute _ difference ( [ 9 , 2 , 3 ] ) */ public static int computeDifference ( List < Integer > array ) { } }
int difference = Collections . max ( array ) - Collections . min ( array ) ; return difference ;
public class RethinkDBClient { /** * ( non - Javadoc ) * @ see com . impetus . kundera . client . ClientBase # delete ( java . lang . Object , * java . lang . Object ) */ @ Override protected void delete ( Object entity , Object pKey ) { } }
EntityMetadata entityMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , entity . getClass ( ) ) ; r . db ( entityMetadata . getSchema ( ) ) . table ( entityMetadata . getTableName ( ) ) . get ( pKey ) . delete ( ) . run ( connection ) ;
public class ErrorPage { /** * Use of the WAR class loader is correct . */ @ SuppressWarnings ( "rawtypes" ) public Class getException ( ) { } }
try { return Class . forName ( errorParam , true , Thread . currentThread ( ) . getContextClassLoader ( ) ) . newInstance ( ) . getClass ( ) ; } catch ( Exception e ) { return null ; }
public class InitiatorMailbox { /** * Produce the repair log . This is idempotent . */ private void handleLogRequest ( VoltMessage message ) { } }
Iv2RepairLogRequestMessage req = ( Iv2RepairLogRequestMessage ) message ; // It is possible for a dead host to queue messages after a repair request is processed // so make sure this can ' t happen by re - queuing this message after we know the dead host is gone // Since we are not checking validateForeignHostId on the PicoNetwork thread , it is possible for // the PicoNetwork thread to validateForeignHostId and queue a message behind this repair message . // Further , we loose visibility to the ForeignHost as soon as HostMessenger marks the host invalid // even though the PicoNetwork thread could still be alive so we will skeptically int deadHostId = req . getDeadHostId ( ) ; if ( deadHostId != Integer . MAX_VALUE ) { if ( m_messenger . canCompleteRepair ( deadHostId ) ) { // Make sure we are the last in the task queue when we know the ForeignHost is gone req . disableDeadHostCheck ( ) ; deliver ( message ) ; } else { if ( req . getRepairRetryCount ( ) > 100 && req . getRepairRetryCount ( ) % 100 == 0 ) { hostLog . warn ( "Repair Request for dead host " + deadHostId + " has not been processed yet because connection has not closed" ) ; } Runnable retryRepair = new Runnable ( ) { @ Override public void run ( ) { InitiatorMailbox . this . deliver ( message ) ; } } ; VoltDB . instance ( ) . scheduleWork ( retryRepair , 10 , - 1 , TimeUnit . MILLISECONDS ) ; // the repair message will be resubmitted shortly when the ForeignHosts to the dead host have been removed } return ; } List < Iv2RepairLogResponseMessage > logs = m_repairLog . contents ( req . getRequestId ( ) , req . isMPIRequest ( ) ) ; if ( req . isMPIRequest ( ) ) { m_scheduler . cleanupTransactionBacklogOnRepair ( ) ; } for ( Iv2RepairLogResponseMessage log : logs ) { send ( message . m_sourceHSId , log ) ; }
public class CorpusAdministration { /** * Extract the zipped ANNIS corpus files to an output directory . * @ param outDir The ouput directory . * @ param zip ZIP - file to extract . * @ return A list of root directories where the tab - files are located if * found , null otherwise . */ private List < File > unzipCorpus ( File outDir , ZipFile zip ) { } }
List < File > rootDirs = new ArrayList < > ( ) ; Enumeration < ? extends ZipEntry > zipEnum = zip . entries ( ) ; while ( zipEnum . hasMoreElements ( ) ) { ZipEntry e = zipEnum . nextElement ( ) ; File outFile = new File ( outDir , e . getName ( ) . replaceAll ( "\\/" , "/" ) ) ; if ( e . isDirectory ( ) ) { if ( ! outFile . mkdirs ( ) ) { log . warn ( "Could not create output directory " + outFile . getAbsolutePath ( ) ) ; } } // end if directory else { if ( "corpus.tab" . equals ( outFile . getName ( ) ) || "corpus.annis" . equals ( outFile . getName ( ) ) ) { rootDirs . add ( outFile . getParentFile ( ) ) ; } if ( ! outFile . getParentFile ( ) . isDirectory ( ) ) { if ( ! outFile . getParentFile ( ) . mkdirs ( ) ) { { log . warn ( "Could not create output directory for file " + outFile . getAbsolutePath ( ) ) ; } } } try ( FileOutputStream outStream = new FileOutputStream ( outFile ) ; ) { ByteStreams . copy ( zip . getInputStream ( e ) , outStream ) ; } catch ( FileNotFoundException ex ) { log . error ( null , ex ) ; } catch ( IOException ex ) { log . error ( null , ex ) ; } } // end else is file } // end for each entry in zip file return rootDirs ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcFurnitureTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class JsonIOUtil { /** * Serializes the { @ code message } into a byte array using the given { @ code schema } . */ public static < T > byte [ ] toByteArray ( T message , Schema < T > schema , boolean numeric ) { } }
final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; try { writeTo ( baos , message , schema , numeric ) ; } catch ( IOException e ) { throw new RuntimeException ( "Serializing to a byte array threw an IOException " + "(should never happen)." , e ) ; } return baos . toByteArray ( ) ;
public class WebPExtension { /** * Determine if the GeoPackage has the extension * @ param tableName * table name * @ return true if has extension */ public boolean has ( String tableName ) { } }
boolean exists = has ( EXTENSION_NAME , tableName , TileTable . COLUMN_TILE_DATA ) ; return exists ;
public class CalendarPicker { /** * / * [ deutsch ] * < p > Erzeugt einen neuen { @ code CalendarPicker } f & uuml ; r das persische Kalendersystem ( jalali ) . < / p > * @ param locale the language and country configuration * @ param todaySupplier determines the current calendar date * @ return CalendarPicker */ public static CalendarPicker < PersianCalendar > persian ( Locale locale , Supplier < PersianCalendar > todaySupplier ) { } }
return CalendarPicker . create ( PersianCalendar . axis ( ) , new FXCalendarSystemPersian ( ) , locale , todaySupplier ) ;
public class Predicates { /** * Returns a predicate that evaluates to { @ code true } if its * argument does not satisfy { @ code p } . */ public static < T > Predicate < T > not ( final Predicate < ? super T > p ) { } }
return new Predicate < T > ( ) { @ Override public boolean apply ( T obj ) { return ! p . apply ( obj ) ; } } ;
public class Environment { /** * Performs POST to supplied url of result of applying template with model . * @ param url url to post to . * @ param templateName name of template to use . * @ param model model for template . * @ param result result to populate with response . * @ param headers headers to add . * @ param contentType contentType for request . */ public void doHttpPost ( String url , String templateName , Object model , HttpResponse result , Map < String , Object > headers , String contentType ) { } }
String request = processTemplate ( templateName , model ) ; result . setRequest ( request ) ; doHttpPost ( url , result , headers , contentType ) ;
public class WebUtils { /** * Gets the logout requests from flow scope . * @ param context the context * @ return the logout requests */ public static List < SingleLogoutRequest > getLogoutRequests ( final RequestContext context ) { } }
return ( List < SingleLogoutRequest > ) context . getFlowScope ( ) . get ( PARAMETER_LOGOUT_REQUESTS ) ;
public class HandlerEvaluator { /** * Runs the evaluator in the given class according to the valid fields * extracted from the template ( via attribute ui : field ) . * @ param writer * the writer used to output the results * @ param fieldManager * the field manager instance * @ param uiOwner * the name of the class evaluated here that owns the template */ public void run ( IndentedWriter writer , FieldManager fieldManager , String uiOwner ) throws UnableToCompleteException { } }
// Iterate through all methods defined in the class . for ( JMethod method : ownerClass . getUiHandlers ( ) ) { // Evaluate the method . String boundMethod = method . getName ( ) ; if ( method . isPrivate ( ) ) { logger . die ( "Method '%s' cannot be private." , boundMethod ) ; } // Retrieves both event and handler types . JParameter [ ] parameters = method . getParameters ( ) ; if ( parameters . length != 1 ) { logger . die ( "Method '%s' must have a single event parameter defined." , boundMethod ) ; } JClassType eventType = parameters [ 0 ] . getType ( ) . isClass ( ) ; if ( eventType == null ) { logger . die ( "Parameter type is not a class." ) ; } JClassType handlerType = getHandlerForEvent ( eventType ) ; if ( handlerType == null ) { logger . die ( "Parameter '%s' is not an event (subclass of GwtEvent)." , eventType . getName ( ) ) ; } // Cool to add the handler in the output . String handlerVarName = HANDLER_BASE_NAME + ( ++ varCounter ) ; writeHandler ( writer , uiOwner , handlerVarName , handlerType , eventType , boundMethod ) ; // Adds the handler created above . UiHandler annotation = method . getAnnotation ( UiHandler . class ) ; for ( String objectName : annotation . value ( ) ) { // Is the field object valid ? FieldWriter fieldWriter = fieldManager . lookup ( objectName ) ; if ( fieldWriter == null ) { logger . die ( ( "Method '%s' can not be bound. You probably missed ui:field='%s' " + "in the template." ) , boundMethod , objectName ) ; } // Retrieves the " add handler " method in the object . JMethod addHandlerMethodType = getAddHandlerMethodForObject ( fieldWriter . getInstantiableType ( ) , handlerType ) ; logger . getTreeLogger ( ) . log ( Type . INFO , "-------------" ) ; logger . getTreeLogger ( ) . log ( Type . INFO , "eventType = " + eventType ) ; logger . getTreeLogger ( ) . log ( Type . INFO , "DomEvent = " + oracle . findType ( DomEvent . class . getCanonicalName ( ) ) ) ; logger . getTreeLogger ( ) . log ( Type . INFO , "gwtEvent = " + oracle . findType ( GwtEvent . class . getCanonicalName ( ) ) ) ; logger . getTreeLogger ( ) . log ( Type . INFO , "to DomEvent ?" + eventType . isAssignableTo ( oracle . findType ( DomEvent . class . getCanonicalName ( ) ) ) ) ; logger . getTreeLogger ( ) . log ( Type . INFO , "to GwtEvent ?" + eventType . isAssignableTo ( oracle . findType ( GwtEvent . class . getCanonicalName ( ) ) ) ) ; if ( addHandlerMethodType == null ) { if ( eventType . isAssignableTo ( oracle . findType ( DomEvent . class . getCanonicalName ( ) ) ) ) { writeAddHandler ( writer , fieldManager , handlerVarName + ", " + eventType . getPackage ( ) . getName ( ) + "." + eventType . getName ( ) + ".getType()" , "addDomHandler" , objectName ) ; logger . getTreeLogger ( ) . log ( Type . INFO , "addDomHandler" ) ; } else { writeAddHandler ( writer , fieldManager , handlerVarName + ", " + eventType . getPackage ( ) . getName ( ) + "." + eventType . getName ( ) + ".getType()" , "addHandler" , objectName ) ; logger . getTreeLogger ( ) . log ( Type . INFO , "super.addHandler" ) ; } } else { // logger . die ( " Field ' % s ' does not have an ' add % s ' method associated . " , // objectName , handlerType . getName ( ) ) ; // Cool to tie the handler into the object . writeAddHandler ( writer , fieldManager , handlerVarName , addHandlerMethodType . getName ( ) , objectName ) ; } } }
public class NativeOpExecutioner { /** * This method executes previously built batch * @ param batch */ @ Override public < T extends Aggregate > void exec ( Batch < T > batch ) { } }
// profilingHookIn ( batch ) ; IntPointer pointer = ( IntPointer ) getPointer ( batch ) ; int maxTypes = 5 ; int maxIntArrays = batch . getSample ( ) . maxIntArrays ( ) ; int maxArraySize = batch . getSample ( ) . maxIntArraySize ( ) ; int indexPos = maxTypes * Batch . getBatchLimit ( ) ; int intArraysPos = indexPos + ( batch . getSample ( ) . maxIndexArguments ( ) * Batch . getBatchLimit ( ) ) ; int realPos = ( intArraysPos + ( maxIntArrays * maxArraySize * Batch . getBatchLimit ( ) ) ) / ( Nd4j . dataType ( ) == DataType . DOUBLE ? 2 : 1 ) ; int argsPos = ( realPos + ( ( batch . getSample ( ) . maxRealArguments ( ) * Batch . getBatchLimit ( ) ) ) ) / ( Nd4j . dataType ( ) == DataType . DOUBLE ? 1 : 2 ) ; int shapesPos = argsPos + ( batch . getSample ( ) . maxArguments ( ) * Batch . getBatchLimit ( ) ) ; DataType dataType = null ; for ( int i = 0 ; i < batch . getNumAggregates ( ) ; i ++ ) { T op = batch . getAggregates ( ) . get ( i ) ; if ( i == 0 ) dataType = op . getArguments ( ) . get ( 0 ) . dataType ( ) ; // put num arguments int idx = i * maxTypes ; pointer . put ( idx , op . getArguments ( ) . size ( ) ) ; pointer . put ( idx + 1 , op . getShapes ( ) . size ( ) ) ; pointer . put ( idx + 2 , op . getIndexingArguments ( ) . size ( ) ) ; pointer . put ( idx + 3 , op . getRealArguments ( ) . size ( ) ) ; pointer . put ( idx + 4 , op . getIntArrayArguments ( ) . size ( ) ) ; // putting indexing arguments for ( int e = 0 ; e < op . getIndexingArguments ( ) . size ( ) ; e ++ ) { idx = indexPos + i * batch . getSample ( ) . maxIndexArguments ( ) ; pointer . put ( idx + e , op . getIndexingArguments ( ) . get ( e ) ) ; } // putting intArray values int bsize = maxIntArrays * maxArraySize ; for ( int e = 0 ; e < op . getIntArrayArguments ( ) . size ( ) ; e ++ ) { int step = ( i * bsize ) + ( e * maxArraySize ) ; if ( op . getIntArrayArguments ( ) . get ( e ) != null ) for ( int x = 0 ; x < op . getIntArrayArguments ( ) . get ( e ) . length ; x ++ ) { idx = intArraysPos + step + x ; pointer . put ( idx , op . getIntArrayArguments ( ) . get ( e ) [ x ] ) ; } } // TODO : variable datatype should be handled here // putting real arguments switch ( dataType ) { case FLOAT : FloatPointer fPtr = new FloatPointer ( pointer ) ; for ( int e = 0 ; e < op . getRealArguments ( ) . size ( ) ; e ++ ) { idx = realPos + i * op . maxRealArguments ( ) ; fPtr . put ( idx + e , op . getRealArguments ( ) . get ( e ) . floatValue ( ) ) ; } break ; case DOUBLE : DoublePointer dPtr = new DoublePointer ( pointer ) ; for ( int e = 0 ; e < op . getRealArguments ( ) . size ( ) ; e ++ ) { idx = realPos + ( i * op . maxRealArguments ( ) ) ; dPtr . put ( idx + e , op . getRealArguments ( ) . get ( e ) . doubleValue ( ) ) ; } break ; default : throw new ND4JIllegalArgumentException ( "Only FLOAT and DOUBLE datatypes are supported" ) ; } if ( extraz . get ( ) == null ) extraz . set ( new PointerPointer ( 32 ) ) ; // putting arguments pointers PointerPointer ptrPtr = new PointerPointer ( pointer ) ; // extraz . get ( ) . put ( pointer ) ; for ( int e = 0 ; e < op . getArguments ( ) . size ( ) ; e ++ ) { idx = argsPos + i * batch . getSample ( ) . maxArguments ( ) ; if ( op . getArguments ( ) . get ( e ) != null ) { ptrPtr . put ( idx + e , op . getArguments ( ) . get ( e ) . data ( ) . addressPointer ( ) ) ; } } // putting shape pointers for ( int e = 0 ; e < op . getShapes ( ) . size ( ) ; e ++ ) { idx = shapesPos + i * batch . getSample ( ) . maxShapes ( ) ; if ( op . getShapes ( ) . get ( e ) != null ) ptrPtr . put ( idx + e , op . getShapes ( ) . get ( e ) . addressPointer ( ) ) ; } } loop . execAggregateBatch ( null , batch . getNumAggregates ( ) , batch . opNum ( ) , batch . getSample ( ) . maxArguments ( ) , batch . getSample ( ) . maxShapes ( ) , batch . getSample ( ) . maxIntArrays ( ) , batch . getSample ( ) . maxIntArraySize ( ) , batch . getSample ( ) . maxIndexArguments ( ) , batch . getSample ( ) . maxRealArguments ( ) , pointer , FlatBuffersMapper . getDataTypeAsByte ( dataType ) ) ;
public class XML { public static Node findElementByName ( String elementName , Node node ) { } }
if ( node == null || node . getNodeType ( ) == Node . COMMENT_NODE || node . getNodeType ( ) == Node . TEXT_NODE ) return null ; if ( elementName . equals ( node . getNodeName ( ) ) ) { return node ; } NodeList nodeList = node . getChildNodes ( ) ; if ( nodeList == null ) return null ; int len = nodeList . getLength ( ) ; Node child ; for ( int i = 0 ; i < len ; i ++ ) { child = nodeList . item ( i ) ; if ( child . getNodeType ( ) == Node . TEXT_NODE ) continue ; if ( elementName . equals ( child . getNodeName ( ) ) ) return child ; // check for next NodeList grands = child . getChildNodes ( ) ; if ( grands != null ) { int grandsCnt = grands . getLength ( ) ; for ( int x = 0 ; x < grandsCnt ; x ++ ) { Node grandResults = findElementByName ( elementName , grands . item ( x ) ) ; if ( grandResults != null ) return grandResults ; } } } return null ;
public class MemorySegment { /** * Bulk copy method . Copies { @ code numBytes } bytes from this memory segment , starting at position * { @ code offset } to the target memory segment . The bytes will be put into the target segment * starting at position { @ code targetOffset } . * @ param offset The position where the bytes are started to be read from in this memory segment . * @ param target The memory segment to copy the bytes to . * @ param targetOffset The position in the target memory segment to copy the chunk to . * @ param numBytes The number of bytes to copy . * @ throws IndexOutOfBoundsException If either of the offsets is invalid , or the source segment does not * contain the given number of bytes ( starting from offset ) , or the target segment does * not have enough space for the bytes ( counting from targetOffset ) . */ public final void copyTo ( int offset , MemorySegment target , int targetOffset , int numBytes ) { } }
// system arraycopy does the boundary checks anyways , no need to check extra System . arraycopy ( this . memory , offset , target . memory , targetOffset , numBytes ) ;
public class SSLFactoryJsse { /** * Returns the certificate file . */ private Path keyStoreFile ( ) { } }
String fileName = _config . get ( _prefix + ".ssl.key-store" ) ; if ( fileName == null ) { return null ; } return Vfs . path ( fileName ) ;
public class VisOdomDualTrackPnP { /** * Updates the relative position of all points so that the current frame is the reference frame . Mathematically * this is not needed , but should help keep numbers from getting too large . */ private void changePoseToReference ( ) { } }
Se3_F64 keyToCurr = currToKey . invert ( null ) ; List < PointTrack > all = trackerLeft . getAllTracks ( null ) ; for ( PointTrack t : all ) { LeftTrackInfo p = t . getCookie ( ) ; SePointOps_F64 . transform ( keyToCurr , p . location . location , p . location . location ) ; } concatMotion ( ) ;
public class EnumParse { /** * Retrieve the enum of a given type from a given raw value . Enums must implement the * { @ link com . punchthrough . bean . sdk . internal . utility . EnumParse . ParsableEnum } interface to ensure they have * a { @ link com . punchthrough . bean . sdk . internal . utility . EnumParse . ParsableEnum # getRawValue ( ) } method . * @ param enumClass The class of the enum type being parsed , e . g . < code > BeanState . class < / code > * @ param value The raw byte value of the enum to be retrieved * @ param < T > The enum type being parsed * @ return The enum value with the given raw value * @ throws com . punchthrough . bean . sdk . internal . exception . NoEnumFoundException if the given enum type has no enum value with a raw value * matching the given value */ public static < T extends Enum & ParsableEnum > T enumWithRawValue ( Class < T > enumClass , byte value ) throws NoEnumFoundException { } }
return enumWithRawValue ( enumClass , ( int ) value ) ;
public class GenericShapeProvider { /** * { @ inheritDoc } * @ param o { @ inheritDoc } * @ return { @ inheritDoc } * @ throws NotAvailableException { @ inheritDoc } */ @ Override public Shape getShape ( final Object o ) throws NotAvailableException { } }
if ( o instanceof GlyphIcons ) { return GLYPH_ICON_SHAPE_PROVIDER . getShape ( ( GlyphIcons ) o ) ; } if ( o instanceof Shape ) { return SVG_PATH_SHAPE_PROVIDER . getShape ( ( SVGPath ) o ) ; } throw new NotAvailableException ( ContextType . USE_ID_AS_CONTEXT , "ShapeProvider" , new InvalidStateException ( "Shape description " + o + " is not supported!" ) ) ;