signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Cells { /** * Returns the Cell ( associated to the default table ) whose name is < i > cellName < / i > , or null if this Cells object * contains no cell whose name is cellName . * @ param cellName the name of the Cell we want to retrieve from this Cells object . * @ return the Cell whose name is cellName contained in this Cells object . null if no cell named cellName is * present . */ public Cell getCellByName ( String cellName ) { } }
Set < String > keys = cells . keySet ( ) ; for ( String key : keys ) { List < Cell > cellList = cells . get ( key ) ; for ( Cell c : cellList ) { if ( c . getCellName ( ) . equals ( cellName ) ) { return c ; } } } return null ;
public class BytesHashMap { /** * Append an value into the hash map ' s record area . * @ return An BinaryRow mapping to the memory segments in the map ' s record area belonging to * the newly appended value . * @ throws EOFException if the map can ' t allocate much more memory . */ public BinaryRow append ( LookupInfo info , BinaryRow value ) throws IOException { } }
try { if ( numElements >= growthThreshold ) { growAndRehash ( ) ; // update info ' s bucketSegmentIndex and bucketOffset lookup ( info . key ) ; } BinaryRow toAppend = hashSetMode ? reusedValue : value ; long pointerToAppended = recordArea . appendRecord ( info . key , toAppend ) ; bucketSegments . get ( info . bucketSegmentIndex ) . putLong ( info . bucketOffset , pointerToAppended ) ; bucketSegments . get ( info . bucketSegmentIndex ) . putInt ( info . bucketOffset + ELEMENT_POINT_LENGTH , info . keyHashCode ) ; numElements ++ ; recordArea . setReadPosition ( pointerToAppended ) ; recordArea . skipKey ( ) ; return recordArea . readValue ( reusedValue ) ; } catch ( EOFException e ) { numSpillFiles ++ ; spillInBytes += recordArea . segments . size ( ) * ( ( long ) segmentSize ) ; throw e ; }
public class SocksProxyConstants { /** * Return the string associated with the specified reply code . * @ param code the reply code * @ return the reply string */ public static String getReplyCodeAsString ( byte code ) { } }
switch ( code ) { // v4 & v4a codes case V4_REPLY_REQUEST_GRANTED : return "Request granted" ; case V4_REPLY_REQUEST_REJECTED_OR_FAILED : return "Request rejected or failed" ; case V4_REPLY_REQUEST_FAILED_NO_IDENTD : return "Request failed because client is not running identd (or not reachable from the server)" ; case V4_REPLY_REQUEST_FAILED_ID_NOT_CONFIRMED : return "Request failed because client's identd could not confirm the user ID string in the request" ; // v5 codes case V5_REPLY_SUCCEEDED : return "Request succeeded" ; case V5_REPLY_GENERAL_FAILURE : return "Request failed: general SOCKS server failure" ; case V5_REPLY_NOT_ALLOWED : return "Request failed: connection not allowed by ruleset" ; case V5_REPLY_NETWORK_UNREACHABLE : return "Request failed: network unreachable" ; case V5_REPLY_HOST_UNREACHABLE : return "Request failed: host unreachable" ; case V5_REPLY_CONNECTION_REFUSED : return "Request failed: connection refused" ; case V5_REPLY_TTL_EXPIRED : return "Request failed: TTL expired" ; case V5_REPLY_COMMAND_NOT_SUPPORTED : return "Request failed: command not supported" ; case V5_REPLY_ADDRESS_TYPE_NOT_SUPPORTED : return "Request failed: address type not supported" ; default : return "Unknown reply code" ; }
public class NatureRecognition { /** * 获取一个词语的参数 * @ param word * @ return */ public String [ ] getParams ( String word ) { } }
for ( Forest forest : forests ) { if ( forest == null ) { continue ; } SmartForest < String [ ] > sf = forest ; for ( int i = 0 ; i < word . length ( ) ; i ++ ) { sf = sf . get ( word . charAt ( i ) ) ; if ( sf == null ) { return null ; } } if ( sf . getStatus ( ) > 1 ) { return sf . getParam ( ) ; } else { return null ; } } return null ;
public class EqualityComparisonBaseTreeNode { /** * but I can ' t get ANTLR to generated nested tree with added node . */ protected Predicate < Object > getEqualFilter ( ) { } }
String xpath = getXPath ( getChild ( 0 ) ) ; Tree valueNode = getChild ( 1 ) ; switch ( valueNode . getType ( ) ) { case NUMBER : Number value = ( Number ) ( ( ValueTreeNode ) valueNode ) . getValue ( ) ; return new PathValueEventFilter ( xpath , new NumericValuePredicate ( value , "=" ) ) ; case STRING : String sValue = ( String ) ( ( ValueTreeNode ) valueNode ) . getValue ( ) ; return new PathValueEventFilter ( xpath , new StringValuePredicate ( sValue ) ) ; case TRUE : return new PathValueEventFilter ( xpath , BooleanValuePredicate . TRUE ) ; case FALSE : return new PathValueEventFilter ( xpath , BooleanValuePredicate . FALSE ) ; case NULL : return new PathValueEventFilter ( xpath , NullValuePredicate . INSTANCE ) ; case XPATH_FUN_NAME : String aPath = ( String ) ( ( ValueTreeNode ) valueNode ) . getValue ( ) ; return new PathValueEventFilter ( xpath , new XPathValuePredicate ( aPath , xpath ) ) ; case TIME_MILLIS_FUN_NAME : TimeMillisValueTreeNode timeNode = ( TimeMillisValueTreeNode ) valueNode ; return new PathValueEventFilter ( xpath , new TimeMillisValuePredicate ( timeNode . getValueFormat ( ) , timeNode . getValue ( ) , "=" ) ) ; case TIME_STRING_FUN_NAME : TimeStringValueTreeNode timeStringNode = ( TimeStringValueTreeNode ) valueNode ; return new PathValueEventFilter ( xpath , new TimeStringValuePredicate ( timeStringNode . getValueTimeFormat ( ) , timeStringNode . getInputTimeFormat ( ) , timeStringNode . getValue ( ) , "=" ) ) ; default : throw new UnexpectedTokenException ( valueNode , "Number" , "String" , "TRUE" , "FALSE" ) ; }
public class AmazonEC2Client { /** * Modifies the specified VPC attachment . * @ param modifyTransitGatewayVpcAttachmentRequest * @ return Result of the ModifyTransitGatewayVpcAttachment operation returned by the service . * @ sample AmazonEC2 . ModifyTransitGatewayVpcAttachment * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / ModifyTransitGatewayVpcAttachment " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ModifyTransitGatewayVpcAttachmentResult modifyTransitGatewayVpcAttachment ( ModifyTransitGatewayVpcAttachmentRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeModifyTransitGatewayVpcAttachment ( request ) ;
public class IDESizeImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setIDESZ ( Integer newIDESZ ) { } }
Integer oldIDESZ = idesz ; idesz = newIDESZ ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IDE_SIZE__IDESZ , oldIDESZ , idesz ) ) ;
public class Compiler { /** * Compile a literal string value . * @ param opPos The current position in the m _ opMap array . * @ return reference to { @ link org . apache . xpath . objects . XString } instance . * @ throws TransformerException if a error occurs creating the Expression . */ protected Expression literal ( int opPos ) { } }
opPos = getFirstChildPos ( opPos ) ; return ( XString ) getTokenQueue ( ) . elementAt ( getOp ( opPos ) ) ;
public class JdkCompiler { /** * httl . properties : java . specification . version = 1.7 */ public void setCompileVersion ( String version ) { } }
if ( StringUtils . isNotEmpty ( version ) && ! version . equals ( ClassUtils . getJavaVersion ( ) ) ) { options . add ( "-target" ) ; options . add ( version ) ; lintOptions . add ( "-target" ) ; lintOptions . add ( version ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.drugbank.ca" , name = "allele" , scope = SnpAdverseDrugReactionType . class ) public JAXBElement < String > createSnpAdverseDrugReactionTypeAllele ( String value ) { } }
return new JAXBElement < String > ( _SnpAdverseDrugReactionTypeAllele_QNAME , String . class , SnpAdverseDrugReactionType . class , value ) ;
public class Event { /** * Causes the invoking thread to wait until the processing of the * event has been completed ( see { @ link # isDone ( ) } ) or given timeout * has expired and returns the list of results ( which may be empty * if the event ' s result type is { @ link Void } ) . * @ return the results * @ see java . util . concurrent . Future # get ( long , java . util . concurrent . TimeUnit ) */ public List < T > results ( long timeout , TimeUnit unit ) throws InterruptedException , TimeoutException { } }
synchronized ( this ) { if ( completed ) { return results == null ? Collections . emptyList ( ) : Collections . unmodifiableList ( results ) ; } wait ( unit . toMillis ( timeout ) ) ; } if ( completed ) { return results == null ? Collections . emptyList ( ) : Collections . unmodifiableList ( results ) ; } throw new TimeoutException ( ) ;
public class MessageToEventMapper { /** * Handle content length . * @ param event * the event */ private void handleContentLength ( Event event ) { } }
if ( event . getContent ( ) == null ) { return ; } if ( maxContentLength == - 1 || event . getContent ( ) . length ( ) <= maxContentLength ) { return ; } if ( maxContentLength < CUT_START_TAG . length ( ) + CUT_END_TAG . length ( ) ) { event . setContent ( "" ) ; event . setContentCut ( true ) ; return ; } int contentLength = maxContentLength - CUT_START_TAG . length ( ) - CUT_END_TAG . length ( ) ; event . setContent ( CUT_START_TAG + event . getContent ( ) . substring ( 0 , contentLength ) + CUT_END_TAG ) ; event . setContentCut ( true ) ;
public class Connection { /** * Returns the value of the given parameter as < code > String < / code > . * @ param textKey The name of the parameter . * @ return The value of this parameter . * @ throws OperationalTextKeyException If the given parameter cannot be found . */ public final String getSetting ( final OperationalTextKey textKey ) throws OperationalTextKeyException { } }
return configuration . getSetting ( referenceSession . getTargetName ( ) , connectionID , textKey ) ;
public class RqFormBase { /** * Create map of request parameters . * @ return Parameters map or empty map in case of error . * @ throws IOException If something fails reading or parsing body */ private Map < String , List < String > > map ( ) throws IOException { } }
if ( this . saved . isEmpty ( ) ) { this . saved . add ( this . freshMap ( ) ) ; } return this . saved . get ( 0 ) ;
public class Tokenizer { /** * Returns a string that contains a copy of a specified string * without leading whitespaces . * @ param string the string to trim leading whitespaces * @ return a string with leading whitespaces trimmed */ private static String trimLeadingWhitespace ( String string ) { } }
if ( string . isEmpty ( ) ) { return string ; } int start = 0 ; char c ; for ( int i = 0 ; i < string . length ( ) ; i ++ ) { c = string . charAt ( i ) ; if ( ! Character . isWhitespace ( c ) ) { start = i ; break ; } } if ( start == 0 ) { return string ; } return string . substring ( start ) ;
public class DiscordWebSocketAdapter { /** * Sends the resume packet . * @ param websocket The websocket the resume packet should be sent to . */ private void sendResume ( WebSocket websocket ) { } }
ObjectNode resumePacket = JsonNodeFactory . instance . objectNode ( ) . put ( "op" , GatewayOpcode . RESUME . getCode ( ) ) ; resumePacket . putObject ( "d" ) . put ( "token" , api . getPrefixedToken ( ) ) . put ( "session_id" , sessionId ) . put ( "seq" , lastSeq ) ; logger . debug ( "Sending resume packet" ) ; websocket . sendText ( resumePacket . toString ( ) ) ;
public class AccountsInner { /** * Gets the first page of Data Lake Store accounts linked to the specified Data Lake Analytics account . The response includes a link to the next page , if any . * @ param resourceGroupName The name of the Azure resource group that contains the Data Lake Analytics account . * @ param accountName The name of the Data Lake Analytics account for which to list Data Lake Store accounts . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; DataLakeStoreAccountInfoInner & gt ; object if successful . */ public PagedList < DataLakeStoreAccountInfoInner > listDataLakeStoreAccounts ( final String resourceGroupName , final String accountName ) { } }
ServiceResponse < Page < DataLakeStoreAccountInfoInner > > response = listDataLakeStoreAccountsSinglePageAsync ( resourceGroupName , accountName ) . toBlocking ( ) . single ( ) ; return new PagedList < DataLakeStoreAccountInfoInner > ( response . body ( ) ) { @ Override public Page < DataLakeStoreAccountInfoInner > nextPage ( String nextPageLink ) { return listDataLakeStoreAccountsNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class Group { /** * Issue a < code > get / / PROJECT / NET _ ID / GROUP _ ID Level < / code > to the C - Gate server . * @ see < a href = " http : / / www . clipsal . com / cis / downloads / Toolkit / CGateServerGuide _ 1_0 . pdf " > * < i > C - Gate Server Guide 4.3.44 < / i > < / a > * @ throws CGateException */ public int getLevel ( ) throws CGateException { } }
ArrayList < String > resp_array = getCGateSession ( ) . sendCommand ( "get " + getProjectAddress ( ) + "/" + getResponseAddress ( true ) + " Level" ) . toArray ( ) ; String level_str = responseToMap ( resp_array . get ( 0 ) ) . get ( "Level" ) ; return level_str == null ? 0 : Integer . valueOf ( level_str ) ;
public class ShowEvent { /** * Fires a show event on all registered handlers in the handler manager . If * no such handlers exist , this method will do nothing . * @ param source the source of the handlers */ public static void fire ( final HasShowHandlers source ) { } }
if ( TYPE != null ) { ShowEvent event = new ShowEvent ( ) ; source . fireEvent ( event ) ; }
public class RunningJobsImpl { /** * A guarded remove ( ) that throws an exception if no RunningJob is known for this id . * @ param jobIdentifier */ private synchronized void remove ( final String jobIdentifier ) { } }
final RunningJobImpl result = this . jobs . remove ( jobIdentifier ) ; if ( null == result ) { throw new RuntimeException ( "Trying to remove a RunningJob that is unknown: " + jobIdentifier ) ; }
public class Utils { /** * Copies the content from inputFile into an output stream . * @ param inputFile an input file ( must be a file and exist ) * @ param os the output stream * @ throws IOException if something went wrong */ public static void copyStream ( File inputFile , OutputStream os ) throws IOException { } }
InputStream is = new FileInputStream ( inputFile ) ; try { copyStreamUnsafelyUseWithCaution ( is , os ) ; } finally { is . close ( ) ; }
public class VerboseFormatter { /** * Append log level . * @ param message The message builder . * @ param event The log record . */ private static void appendLevel ( StringBuilder message , LogRecord event ) { } }
final String logLevel = event . getLevel ( ) . getName ( ) ; for ( int i = logLevel . length ( ) ; i < LOG_LEVEL_LENGTH ; i ++ ) { message . append ( Constant . SPACE ) ; } message . append ( logLevel ) . append ( Constant . DOUBLE_DOT ) ;
public class JDBCDatabaseMetaData { /** * Retrieves whether this database is in read - only mode . * < ! - - start release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * Starting with 1.7.2 , this makes * an SQL call to the new isReadOnlyDatabase function * which provides correct determination of the read - only status for * both local and remote database instances . * < / div > * < ! - - end release - specific documentation - - > * @ return < code > true < / code > if so ; < code > false < / code > otherwise * @ exception SQLException if a database access error occurs */ public boolean isReadOnly ( ) throws SQLException { } }
ResultSet rs = execute ( "CALL isReadOnlyDatabase()" ) ; rs . next ( ) ; boolean result = rs . getBoolean ( 1 ) ; rs . close ( ) ; return result ;
public class DefaultBackendSecurity { /** * Parses the beSecurity configuration file . * @ throws BackendSecurityParserException * If an error occurs in attempting to parse the beSecurity * configuration file . */ public BackendSecuritySpec parseBeSecurity ( ) throws BackendSecurityParserException { } }
try { BackendSecurityDeserializer bsd = new BackendSecurityDeserializer ( m_encoding , m_validate ) ; return bsd . deserialize ( m_beSecurityPath ) ; } catch ( Throwable th ) { throw new BackendSecurityParserException ( "[DefaultBackendSecurity] " + "An error has occured in parsing the backend security " + "configuration file located at \"" + m_beSecurityPath + "\". " + "The underlying error was a " + th . getClass ( ) . getName ( ) + "The message was \"" + th . getMessage ( ) + "\"." ) ; }
public class TimeUtils { /** * Compute the number of seconds from the Proleptic Gregorian epoch * to the given time . */ public static long secsSinceEpoch ( DateValue date ) { } }
long result = fixedFromGregorian ( date ) * SECS_PER_DAY ; if ( date instanceof TimeValue ) { TimeValue time = ( TimeValue ) date ; result += time . second ( ) + 60 * ( time . minute ( ) + 60 * time . hour ( ) ) ; } return result ;
public class HttpServer { /** * Set the request log . * @ param log RequestLog to use . */ public synchronized void setRequestLog ( RequestLog log ) { } }
if ( _requestLog != null ) removeComponent ( _requestLog ) ; _requestLog = log ; if ( _requestLog != null ) addComponent ( _requestLog ) ;
public class GridLayoutRenderer { /** * Paints the given WPanel ' s children . * @ param component the container to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WPanel panel = ( WPanel ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; GridLayout layout = ( GridLayout ) panel . getLayout ( ) ; Size hgap = layout . getHorizontalGap ( ) ; String hgapString = hgap == null ? null : hgap . toString ( ) ; Size vgap = layout . getVerticalGap ( ) ; String vgapString = vgap == null ? null : vgap . toString ( ) ; int rows = layout . getRows ( ) ; int cols = layout . getCols ( ) ; xml . appendTagOpen ( "ui:gridlayout" ) ; xml . appendAttribute ( "rows" , rows > 0 ? String . valueOf ( rows ) : "0" ) ; xml . appendAttribute ( "cols" , cols > 0 ? String . valueOf ( cols ) : "0" ) ; xml . appendOptionalAttribute ( "hgap" , hgapString ) ; xml . appendOptionalAttribute ( "vgap" , vgapString ) ; xml . appendClose ( ) ; int size = panel . getChildCount ( ) ; for ( int i = 0 ; i < size ; i ++ ) { xml . appendTag ( "ui:cell" ) ; WComponent child = panel . getChildAt ( i ) ; child . paint ( renderContext ) ; xml . appendEndTag ( "ui:cell" ) ; } xml . appendEndTag ( "ui:gridlayout" ) ;
public class ListConnectorDefinitionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListConnectorDefinitionsRequest listConnectorDefinitionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listConnectorDefinitionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listConnectorDefinitionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listConnectorDefinitionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FeatureDependencyChecker { /** * Verfiy whether the feature is uninstallable and there is no other installed * features still require this feature . * @ param uninstallAsset feature to be uninstalled * @ param installedFeatures installed features * @ return the name / symbolic name of the feature which still requires the uninstalling feature . * Return null if there is no other features still require the uninstalling feature . */ public Collection < ProvisioningFeatureDefinition > isUninstallable ( UninstallAsset uninstallAsset , Collection < ProvisioningFeatureDefinition > installedFeatureDefinitions , Collection < String > uninstallInstallFeatures , boolean isChecking ) { } }
Collection < ProvisioningFeatureDefinition > requiredByTheseFeatures = requiresThisFeature ( uninstallAsset . getProvisioningFeatureDefinition ( ) . getSymbolicName ( ) , installedFeatureDefinitions , uninstallInstallFeatures , isChecking ) ; Collection < ProvisioningFeatureDefinition > temp = new ArrayList < ProvisioningFeatureDefinition > ( ) ; while ( requiredByTheseFeatures . size ( ) > temp . size ( ) ) { temp . addAll ( requiredByTheseFeatures ) ; for ( ProvisioningFeatureDefinition p : temp ) { InstallLogUtils . getInstallLogger ( ) . log ( Level . FINEST , "The uninstalling feature : " + uninstallAsset . getProvisioningFeatureDefinition ( ) . getSymbolicName ( ) + " is required by " + p . getSymbolicName ( ) ) ; InstallLogUtils . getInstallLogger ( ) . log ( Level . FINEST , "Determine additional dependency for feature : " + p . getSymbolicName ( ) ) ; Collection < ProvisioningFeatureDefinition > required = requiresThisFeature ( p . getSymbolicName ( ) , installedFeatureDefinitions , uninstallInstallFeatures , isChecking ) ; if ( ! ! ! required . isEmpty ( ) ) { for ( ProvisioningFeatureDefinition pp : required ) { if ( ! ! ! requiredByTheseFeatures . contains ( pp ) ) { InstallLogUtils . getInstallLogger ( ) . log ( Level . FINEST , "Found additional dependent feature : " + pp . getSymbolicName ( ) ) ; requiredByTheseFeatures . add ( pp ) ; } } } } } return requiredByTheseFeatures ;
public class QueueListenerFactory { /** * Get jobs from the specified queues . * When there are jobs in more than one of the queues , the command guarantees to return jobs in the order the queues are * specified . If COUNT allows more jobs to be returned , queues are scanned again and again in the same order popping more * elements . * The { @ link Observable } emits { @ link Job } objects as soon as a job is received from Disque . The terminal event is emitted * as soon as the { @ link rx . Subscriber subscriber } unsubscribes from the { @ link Observable } . * @ param timeout timeout to wait * @ param timeUnit timeout unit * @ param count count of jobs to return * @ return an Observable that emits { @ link Job } elements until the subscriber terminates the subscription */ public Observable < Job < K , V > > getjobs ( long timeout , TimeUnit timeUnit , long count ) { } }
return new GetJobsBuilder ( ) . getjobs ( timeout , timeUnit , count ) ;
public class SystemIntents { /** * Intent that should open either the Google Play app or if not available , the web browser on the Google Play website * @ param context The context associated to the application * @ param packageName The package name of the application to find on the market * @ return the intent for native application or an intent to redirect to the browser if google play is not installed */ public static Intent newGooglePlayIntent ( Context context , String packageName ) { } }
Intent intent = new Intent ( Intent . ACTION_VIEW , Uri . parse ( "market://details?id=" + packageName ) ) ; if ( ! IntentUtils . isIntentAvailable ( context , intent ) ) { intent = MediaIntents . newOpenWebBrowserIntent ( "https://play.google.com/store/apps/details?id=" + packageName ) ; } if ( intent != null ) { intent . addFlags ( Intent . FLAG_ACTIVITY_NO_HISTORY | Intent . FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET ) ; } return intent ;
public class Kryo { /** * Reads an object using the registered serializer . */ public < T > T readObject ( Input input , Class < T > type ) { } }
if ( input == null ) throw new IllegalArgumentException ( "input cannot be null." ) ; if ( type == null ) throw new IllegalArgumentException ( "type cannot be null." ) ; beginObject ( ) ; try { T object ; if ( references ) { int stackSize = readReferenceOrNull ( input , type , false ) ; if ( stackSize == REF ) return ( T ) readObject ; object = ( T ) getRegistration ( type ) . getSerializer ( ) . read ( this , input , type ) ; if ( stackSize == readReferenceIds . size ) reference ( object ) ; } else object = ( T ) getRegistration ( type ) . getSerializer ( ) . read ( this , input , type ) ; if ( TRACE || ( DEBUG && depth == 1 ) ) log ( "Read" , object , input . position ( ) ) ; return object ; } finally { if ( -- depth == 0 && autoReset ) reset ( ) ; }
public class JMMap { /** * Gets entry stream with filter . * @ param < K > the type parameter * @ param < V > the type parameter * @ param map the map * @ param predicate the predicate * @ return the entry stream with filter */ public static < K , V > Stream < Entry < K , V > > getEntryStreamWithFilter ( Map < K , V > map , Predicate < ? super Entry < K , V > > predicate ) { } }
return buildEntryStream ( map ) . filter ( predicate ) ;
public class TypeUtil { /** * @ return The least restrictive data type of two numeric data types . * @ see DataType # INTEGER * @ see DataType # FLOAT * @ see DataType # DOUBLE */ static public DataType getCommonDataType ( DataType left , DataType right ) { } }
if ( ( left ) . equals ( right ) ) { switch ( left ) { case DOUBLE : case FLOAT : case INTEGER : return left ; } } else if ( ( DataType . DOUBLE ) . equals ( left ) ) { if ( ( DataType . FLOAT ) . equals ( right ) || ( DataType . INTEGER ) . equals ( right ) ) { return left ; } } else if ( ( DataType . FLOAT ) . equals ( left ) ) { if ( ( DataType . DOUBLE ) . equals ( right ) ) { return right ; } else if ( ( DataType . INTEGER ) . equals ( right ) ) { return left ; } } else if ( ( DataType . INTEGER ) . equals ( left ) ) { if ( ( DataType . DOUBLE ) . equals ( right ) || ( DataType . FLOAT ) . equals ( right ) ) { return right ; } } throw new EvaluationException ( "No PMML data type for the intersection of PMML data types " + left . value ( ) + " and " + right . value ( ) ) ;
public class CommerceOrderUtil { /** * Returns all the commerce orders where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ return the matching commerce orders */ public static List < CommerceOrder > findByUuid_C ( String uuid , long companyId ) { } }
return getPersistence ( ) . findByUuid_C ( uuid , companyId ) ;
public class GeographyValue { /** * A helper function to validate the loop structure * If loop is invalid , it generates IllegalArgumentException exception */ private static < T > void diagnoseLoop ( List < T > loop , String excpMsgPrf ) throws IllegalArgumentException { } }
if ( loop == null ) { throw new IllegalArgumentException ( excpMsgPrf + "a polygon must contain at least one ring " + "(with each ring at least 4 points, including repeated closing vertex)" ) ; } // 4 vertices = 3 unique vertices for polygon + 1 end point which is same as start point if ( loop . size ( ) < 4 ) { throw new IllegalArgumentException ( excpMsgPrf + "a polygon ring must contain at least 4 points " + "(including repeated closing vertex)" ) ; } // check if the end points of the loop are equal if ( loop . get ( 0 ) . equals ( loop . get ( loop . size ( ) - 1 ) ) == false ) { throw new IllegalArgumentException ( excpMsgPrf + "closing points of ring are not equal: \"" + loop . get ( 0 ) . toString ( ) + "\" != \"" + loop . get ( loop . size ( ) - 1 ) . toString ( ) + "\"" ) ; }
public class ThreadPoolManager { /** * 新建一个受管理的单线程池 , 你不需要注定关闭线程池 , 管理器会帮你关闭的 * 注意 : 请复用 , 而不要频繁构造线程池 */ public static ExecutorService newSingleThreadPoolExecutor ( ) { } }
ExecutorService singleThreadPoolExecutor = Executors . newSingleThreadExecutor ( ) ; EXECUTORS . add ( singleThreadPoolExecutor ) ; return singleThreadPoolExecutor ;
public class MaterialComboBox { /** * Will automatically check for allowClear option to display / hide the * arrow caret . */ protected void displayArrowForAllowClearOption ( boolean displayArrow ) { } }
if ( isAllowClear ( ) ) { if ( displayArrow && getArrowIconElement ( ) != null ) { getArrowIconElement ( ) . css ( "display" , "block" ) ; } else { getArrowIconElement ( ) . css ( "display" , "none" ) ; } }
public class CPFriendlyURLEntryPersistenceImpl { /** * Returns the cp friendly url entry where groupId = & # 63 ; and classNameId = & # 63 ; and classPK = & # 63 ; and languageId = & # 63 ; and urlTitle = & # 63 ; or throws a { @ link NoSuchCPFriendlyURLEntryException } if it could not be found . * @ param groupId the group ID * @ param classNameId the class name ID * @ param classPK the class pk * @ param languageId the language ID * @ param urlTitle the url title * @ return the matching cp friendly url entry * @ throws NoSuchCPFriendlyURLEntryException if a matching cp friendly url entry could not be found */ @ Override public CPFriendlyURLEntry findByG_C_C_L_U ( long groupId , long classNameId , long classPK , String languageId , String urlTitle ) throws NoSuchCPFriendlyURLEntryException { } }
CPFriendlyURLEntry cpFriendlyURLEntry = fetchByG_C_C_L_U ( groupId , classNameId , classPK , languageId , urlTitle ) ; if ( cpFriendlyURLEntry == null ) { StringBundler msg = new StringBundler ( 12 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "groupId=" ) ; msg . append ( groupId ) ; msg . append ( ", classNameId=" ) ; msg . append ( classNameId ) ; msg . append ( ", classPK=" ) ; msg . append ( classPK ) ; msg . append ( ", languageId=" ) ; msg . append ( languageId ) ; msg . append ( ", urlTitle=" ) ; msg . append ( urlTitle ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCPFriendlyURLEntryException ( msg . toString ( ) ) ; } return cpFriendlyURLEntry ;
public class HomeResolver { /** * Method that changes any string starting with ~ to user . home property . * @ param path * to change . * @ return String with ~ changed . */ public static String resolveHomeDirectory ( String path ) { } }
if ( path . startsWith ( "~" ) ) { return path . replace ( "~" , System . getProperty ( "user.home" ) ) ; } return path ;
public class AsyncGroup { /** * Handle exceptions that callback function thrown . < br > * if there ' s already an exception thrown when this method invoked , the * handling process would be taken in current thread . < br > * Otherwise , the process would be taken at callback thread when * exception occurred . * @ param handler a function to invoke when occurred an exception < br > * for all Throwable caught from the callback function * would be packed into StyleRuntimeException * @ see StyleRuntimeException */ public void onError ( def < Void > handler ) { } }
while ( ! inProcess ) { // block Style . sleep ( 1 ) ; } synchronized ( lock ) { this . handler = handler ; if ( err != null ) { handler . apply ( err ) ; } }
public class Cache { /** * F86406 */ public void introspect ( IntrospectionWriter writer ) { } }
writer . begin ( "Cache : " + this ) ; writer . println ( "Name of Cache: " + this . ivName ) ; writer . println ( "Number of buckets: " + this . numBuckets ) ; synchronized ( this ) { writer . println ( "Number of objects currently in cache: " + numObjects ) ; writer . println ( "Number of evictions attempted (since last dump): " + numEvictionAttempts ) ; writer . println ( "Number of evictions (since last dump): " + numEvictions ) ; numEvictionAttempts = 0 ; numEvictions = 0 ; } writer . end ( ) ;
public class OkHttp { /** * Get a response to a GET request for the URL and headers . */ public Response response ( String url , String ... headers ) throws IOException { } }
return call ( url , headers ) . execute ( ) ;
public class ValueClientExample { /** * Starts the client . */ public static void main ( String [ ] args ) throws Exception { } }
if ( args . length < 1 ) throw new IllegalArgumentException ( "must supply a set of host:port tuples" ) ; // Build a list of all member addresses to which to connect . List < Address > members = new ArrayList < > ( ) ; for ( String arg : args ) { String [ ] parts = arg . split ( ":" ) ; members . add ( new Address ( parts [ 0 ] , Integer . valueOf ( parts [ 1 ] ) ) ) ; } CopycatClient client = CopycatClient . builder ( ) . withTransport ( new NettyTransport ( ) ) . withConnectionStrategy ( ConnectionStrategies . FIBONACCI_BACKOFF ) . withRecoveryStrategy ( RecoveryStrategies . RECOVER ) . withServerSelectionStrategy ( ServerSelectionStrategies . LEADER ) . withSessionTimeout ( Duration . ofSeconds ( 15 ) ) . build ( ) ; client . serializer ( ) . register ( SetCommand . class , 1 ) ; client . serializer ( ) . register ( GetQuery . class , 2 ) ; client . serializer ( ) . register ( DeleteCommand . class , 3 ) ; client . connect ( members ) . join ( ) ; recursiveSet ( client ) ; while ( client . state ( ) != CopycatClient . State . CLOSED ) { try { Thread . sleep ( 1000 ) ; } catch ( InterruptedException e ) { break ; } }
public class MessageAction { /** * Clears all previously added files * @ param finalizer * BiConsumer useful to < b > close < / b > remaining resources , * the consumer will receive the name as a string parameter and the resource as { @ code InputStream } . * @ return Updated MessageAction for chaining convenience * @ see java . io . Closeable */ @ CheckReturnValue public MessageAction clearFiles ( BiConsumer < String , InputStream > finalizer ) { } }
Checks . notNull ( finalizer , "Finalizer" ) ; for ( Iterator < Map . Entry < String , InputStream > > it = files . entrySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { Map . Entry < String , InputStream > entry = it . next ( ) ; finalizer . accept ( entry . getKey ( ) , entry . getValue ( ) ) ; it . remove ( ) ; } clearResources ( ) ; return this ;
public class MkAppTree { /** * Determines the maximum and minimum number of entries in a node . */ @ Override protected void initializeCapacities ( MkAppEntry exampleLeaf ) { } }
int distanceSize = ByteArrayUtil . SIZE_DOUBLE ; // exampleLeaf . getParentDistance ( ) . externalizableSize ( ) ; // overhead = index ( 4 ) , numEntries ( 4 ) , id ( 4 ) , isLeaf ( 0.125) double overhead = 12.125 ; if ( getPageSize ( ) - overhead < 0 ) { throw new RuntimeException ( "Node size of " + getPageSize ( ) + " Bytes is chosen too small!" ) ; } // dirCapacity = ( file . getPageSize ( ) - overhead ) / ( nodeID + objectID + // coveringRadius + parentDistance + approx ) + 1 dirCapacity = ( int ) ( getPageSize ( ) - overhead ) / ( 4 + 4 + distanceSize + distanceSize + ( settings . p + 1 ) * 4 + 2 ) + 1 ; if ( dirCapacity <= 1 ) { throw new RuntimeException ( "Node size of " + getPageSize ( ) + " Bytes is chosen too small!" ) ; } if ( dirCapacity < 10 ) { LOG . warning ( "Page size is choosen too small! Maximum number of entries " + "in a directory node = " + ( dirCapacity - 1 ) ) ; } // leafCapacity = ( file . getPageSize ( ) - overhead ) / ( objectID + // parentDistance + // approx ) + 1 leafCapacity = ( int ) ( getPageSize ( ) - overhead ) / ( 4 + distanceSize + ( settings . p + 1 ) * 4 + 2 ) + 1 ; if ( leafCapacity <= 1 ) { throw new RuntimeException ( "Node size of " + getPageSize ( ) + " Bytes is chosen too small!" ) ; } if ( leafCapacity < 10 ) { LOG . warning ( "Page size is choosen too small! Maximum number of entries " + "in a leaf node = " + ( leafCapacity - 1 ) ) ; } initialized = true ; if ( LOG . isVerbose ( ) ) { LOG . verbose ( "Directory Capacity: " + ( dirCapacity - 1 ) + "\nLeaf Capacity: " + ( leafCapacity - 1 ) ) ; }
public class InteractionWrapper { /** * Binds to participants and controllers . */ @ Override public void initDownstream ( ) { } }
for ( Entity entity : interaction . getParticipant ( ) ) { addToDownstream ( entity , getGraph ( ) ) ; } for ( Control control : interaction . getControlledOf ( ) ) { addToDownstream ( control , getGraph ( ) ) ; }
public class MemoryManager { /** * Called on any OOM allocation */ static void set_goals ( String msg , boolean oom , long bytes ) { } }
// Our best guess of free memory , as of the last GC cycle final long heapUsedGC = Cleaner . HEAP_USED_AT_LAST_GC ; final long timeGC = Cleaner . TIME_AT_LAST_GC ; final long freeHeap = MEM_MAX - heapUsedGC ; assert freeHeap >= 0 : "I am really confused about the heap usage; MEM_MAX=" + MEM_MAX + " heapUsedGC=" + heapUsedGC ; // Current memory held in the K / V store . final long cacheUsageGC = Cleaner . KV_USED_AT_LAST_GC ; // Our best guess of POJO object usage : Heap _ used minus cache used final long pojoUsedGC = Math . max ( heapUsedGC - cacheUsageGC , 0 ) ; // Block allocations if : // the cache is > 7/8 MEM _ MAX , OR // we cannot allocate an equal amount of POJOs , pojoUsedGC > freeHeap . // Decay POJOS _ USED by 1/8th every 5 sec : assume we got hit with a single // large allocation which is not repeating - so we do not need to have // double the POJO amount . // Keep at least 1/8th heap for caching . // Emergency - clean the cache down to the blocking level . long d = MEM_CRITICAL ; // Block - allocation level ; cache can grow till this // Decay POJO amount long p = pojoUsedGC ; long age = ( System . currentTimeMillis ( ) - timeGC ) ; // Age since last FullGC age = Math . min ( age , 10 * 60 * 1000 ) ; // Clip at 10mins while ( ( age -= 5000 ) > 0 ) p = p - ( p >> 3 ) ; // Decay effective POJO by 1/8th every 5sec d -= 2 * p - bytes ; // Allow for the effective POJO , and again to throttle GC rate ( and allow for this allocation ) d = Math . max ( d , MEM_MAX >> 3 ) ; // Keep at least 1/8th heap if ( Cleaner . DESIRED != - 1 ) // Set to - 1 only for OOM / Cleaner testing . Never negative normally Cleaner . DESIRED = d ; // Desired caching level final long cacheUsageNow = Cleaner . Histo . cached ( ) ; boolean skipThisLogMessageToAvoidSpammingTheLogs = false ; String m = "" ; if ( cacheUsageNow > Cleaner . DESIRED ) { m = ( CAN_ALLOC ? "Swapping! " : "blocked: " ) ; if ( oom ) setMemLow ( ) ; // Stop allocations ; trigger emergency clean Cleaner . kick_store_cleaner ( ) ; } else { // Else we are not * emergency * cleaning , but may be lazily cleaning . setMemGood ( ) ; // Cache is below desired level ; unblock allocations if ( oom ) { // But still have an OOM ? m = "Unblock allocations; cache below desired, but also OOM: " ; // Means the heap is full of uncached POJO ' s - which cannot be spilled . // Here we enter the zone of possibly dieing for OOM . There ' s no point // in blocking allocations , as no more memory can be freed by more // cache - flushing . Might as well proceed on a " best effort " basis . long now = System . currentTimeMillis ( ) ; if ( ( now - oomLastLogTimestamp ) >= SIXTY_SECONDS_IN_MILLIS ) { oomLastLogTimestamp = now ; } else { skipThisLogMessageToAvoidSpammingTheLogs = true ; } } else { m = "MemGood: " ; // Cache is low enough , room for POJO allocation - full steam ahead ! } } if ( skipThisLogMessageToAvoidSpammingTheLogs ) { return ; } // No logging if under memory pressure : can deadlock the cleaner thread String s = m + msg + ", (K/V:" + PrettyPrint . bytes ( cacheUsageGC ) + " + POJO:" + PrettyPrint . bytes ( pojoUsedGC ) + " + FREE:" + PrettyPrint . bytes ( freeHeap ) + " == MEM_MAX:" + PrettyPrint . bytes ( MEM_MAX ) + "), desiredKV=" + PrettyPrint . bytes ( Cleaner . DESIRED ) + ( oom ? " OOM!" : " NO-OOM" ) ; if ( CAN_ALLOC ) { if ( oom ) Log . warn ( s ) ; else Log . debug ( s ) ; } else System . err . println ( s ) ;
public class DescribeScheduledActionsResult { /** * Information about the scheduled actions . * @ param scheduledActions * Information about the scheduled actions . */ public void setScheduledActions ( java . util . Collection < ScheduledAction > scheduledActions ) { } }
if ( scheduledActions == null ) { this . scheduledActions = null ; return ; } this . scheduledActions = new java . util . ArrayList < ScheduledAction > ( scheduledActions ) ;
public class IoUtil { /** * 拷贝文件流 , 使用NIO * @ param in 输入 * @ param out 输出 * @ return 拷贝的字节数 * @ throws IORuntimeException IO异常 */ public static long copy ( FileInputStream in , FileOutputStream out ) throws IORuntimeException { } }
Assert . notNull ( in , "FileInputStream is null!" ) ; Assert . notNull ( out , "FileOutputStream is null!" ) ; final FileChannel inChannel = in . getChannel ( ) ; final FileChannel outChannel = out . getChannel ( ) ; try { return inChannel . transferTo ( 0 , inChannel . size ( ) , outChannel ) ; } catch ( IOException e ) { throw new IORuntimeException ( e ) ; }
public class EventFilterLexer { /** * $ ANTLR start " GT " */ public final void mGT ( ) throws RecognitionException { } }
try { int _type = GT ; int _channel = DEFAULT_TOKEN_CHANNEL ; // EventFilter . g : 39:4 : ( ' > ' ) // EventFilter . g : 39:6 : ' > ' { match ( '>' ) ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class SecurityContext { /** * Use securityForOperation instead * @ since 2.8.1 * @ param path path to secure * @ return list of applicable security references * @ deprecated { @ link SecurityContext # securityForOperation } */ @ Deprecated public List < SecurityReference > securityForPath ( String path ) { } }
if ( selector . test ( path ) ) { return securityReferences ; } return new ArrayList < SecurityReference > ( ) ;
public class FileSystem { /** * Replace the special characters by HTML entities . * @ param string the string to decode . * @ return decoded string or { @ code s } . */ private static String encodeHTMLEntities ( String string ) { } }
if ( string == null ) { return null ; } try { return URLEncoder . encode ( string , Charset . defaultCharset ( ) . displayName ( ) ) ; } catch ( UnsupportedEncodingException exception ) { return string ; }
public class URLEncodedUtils { /** * Encode a String using the { @ link # FRAGMENT } set of characters . * Used by URIBuilder to encode the userinfo segment . * @ param content the string to encode , does not convert space to ' + ' * @ param charset the charset to use * @ return the encoded string */ static String encFragment ( final String content , final Charset charset ) { } }
return urlencode ( content , charset , FRAGMENT , false ) ;
public class MicroMetaDao { /** * 锟斤拷荼锟斤拷锟斤拷询锟斤拷录锟斤拷 */ public int queryObjCountByCondition ( String tableName , String condition , Object [ ] paramArray ) { } }
/* JdbcTemplate jdbcTemplate = ( JdbcTemplate ) MicroDbHolder . getDbSource ( dbName ) ; */ // String tableName = changeTableNameCase ( otableName ) ; JdbcTemplate jdbcTemplate = getMicroJdbcTemplate ( ) ; String sql = "" ; sql = "select count(1) from " + tableName + " where " + condition ; logger . debug ( sql ) ; logger . debug ( Arrays . toString ( paramArray ) ) ; Integer total = jdbcTemplate . queryForObject ( sql , Integer . class , paramArray ) ; return total ;
public class ODataJsonParser { /** * Gets the entity type name . * @ return the entity type * @ throws ODataUnmarshallingException */ private String getEntityName ( ) throws ODataUnmarshallingException { } }
String odataType = odataValues . get ( JsonConstants . TYPE ) ; if ( isNullOrEmpty ( odataType ) ) { TargetType targetType = getTargetType ( ) ; if ( targetType == null ) { throw new ODataUnmarshallingException ( "Could not find entity name" ) ; } return targetType . typeName ( ) ; } else { if ( odataType . startsWith ( "#" ) ) { odataType = odataType . substring ( 1 ) ; } return odataType ; }
public class AWSResourceGroupsClient { /** * Returns a list of ARNs of resources that are members of a specified resource group . * @ param listGroupResourcesRequest * @ return Result of the ListGroupResources operation returned by the service . * @ throws UnauthorizedException * The request has not been applied because it lacks valid authentication credentials for the target * resource . * @ throws BadRequestException * The request does not comply with validation rules that are defined for the request parameters . * @ throws ForbiddenException * The caller is not authorized to make the request . * @ throws NotFoundException * One or more resources specified in the request do not exist . * @ throws MethodNotAllowedException * The request uses an HTTP method which is not allowed for the specified resource . * @ throws TooManyRequestsException * The caller has exceeded throttling limits . * @ throws InternalServerErrorException * An internal error occurred while processing the request . * @ sample AWSResourceGroups . ListGroupResources * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / resource - groups - 2017-11-27 / ListGroupResources " * target = " _ top " > AWS API Documentation < / a > */ @ Override public ListGroupResourcesResult listGroupResources ( ListGroupResourcesRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListGroupResources ( request ) ;
public class Solo { /** * Long clicks the specified coordinates . * @ param x the x coordinate * @ param y the y coordinate */ public void clickLongOnScreen ( float x , float y ) { } }
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "clickLongOnScreen(" + x + ", " + y + ")" ) ; } clicker . clickLongOnScreen ( x , y , 0 , null ) ;
public class MergePath { /** * Adds a new path to the end of the merge path . * @ param path the new path to search */ public void addMergePath ( PathImpl path ) { } }
if ( ! ( path instanceof MergePath ) ) { // Need to normalize so directory paths ends with a " . / " // XXX : // if ( path . isDirectory ( ) ) // path = path . lookup ( " . / " ) ; ArrayList < PathImpl > pathList = ( ( MergePath ) _root ) . _pathList ; if ( ! pathList . contains ( path ) ) pathList . add ( path ) ; } else if ( ( ( MergePath ) path ) . _root == _root ) return ; else { MergePath mergePath = ( MergePath ) path ; ArrayList < PathImpl > subPaths = mergePath . getMergePaths ( ) ; String pathName = "./" + mergePath . _pathname + "/" ; for ( int i = 0 ; i < subPaths . size ( ) ; i ++ ) { PathImpl subPath = subPaths . get ( i ) ; addMergePath ( subPath . lookup ( pathName ) ) ; } }
public class GroupMatcher { /** * Create a GroupMatcher that matches groups ending with the given string . */ public static < T extends Key < T > > GroupMatcher < T > groupEndsWith ( final String compareTo ) { } }
return new GroupMatcher < > ( compareTo , StringOperatorName . ENDS_WITH ) ;
public class vlan { /** * Use this API to update vlan . */ public static base_response update ( nitro_service client , vlan resource ) throws Exception { } }
vlan updateresource = new vlan ( ) ; updateresource . id = resource . id ; updateresource . aliasname = resource . aliasname ; updateresource . ipv6dynamicrouting = resource . ipv6dynamicrouting ; return updateresource . update_resource ( client ) ;
public class TargetMethodFinder { /** * public void post ( TaskCacheFragmentInterface cacheFragment , Object result , Task < ? > task ) { * Pair < Method , Object > target = getMethod ( cacheFragment , getResultType ( result , task ) , task ) ; * if ( target ! = null ) { * invoke ( target , result , task ) ; */ private static Pair < Method , Object > findMethodInActivityAndFragments ( FragmentActivity activity , Class < ? > resultType , Class < ? extends TaskResult > annotation , Task < ? > task , boolean compareFragmentIndex ) { } }
Pair < Method , Object > pair = findMethodInActivity ( activity , activity . getClass ( ) , resultType , annotation , task ) ; if ( pair != null ) { return pair ; } return findMethodInFragmentManager ( FragmentHack . getFragmentManager ( activity ) , resultType , annotation , task , compareFragmentIndex ) ;
public class AnalyticFormulas { /** * Calculates the Black - Scholes option value of a call , i . e . , the payoff max ( S ( T ) - K , 0 ) P , where S follows a log - normal process with constant log - volatility . * The model specific quantities are considered to be random variable , i . e . , * the function may calculate an per - path valuation in a single call . * @ param forward The forward of the underlying . * @ param volatility The Black - Scholes volatility . * @ param optionMaturity The option maturity T . * @ param optionStrike The option strike . If the option strike is & le ; 0.0 the method returns the value of the forward contract paying S ( T ) - K in T . * @ param payoffUnit The payoff unit ( e . g . , the discount factor ) * @ return Returns the value of a European call option under the Black - Scholes model . */ public static RandomVariable blackScholesGeneralizedOptionValue ( RandomVariable forward , RandomVariable volatility , double optionMaturity , double optionStrike , RandomVariable payoffUnit ) { } }
if ( optionMaturity < 0 ) { return forward . mult ( 0.0 ) ; } else { RandomVariable dPlus = forward . div ( optionStrike ) . log ( ) . add ( volatility . squared ( ) . mult ( 0.5 * optionMaturity ) ) . div ( volatility ) . div ( Math . sqrt ( optionMaturity ) ) ; RandomVariable dMinus = dPlus . sub ( volatility . mult ( Math . sqrt ( optionMaturity ) ) ) ; RandomVariable valueAnalytic = dPlus . apply ( NormalDistribution :: cumulativeDistribution ) . mult ( forward ) . sub ( dMinus . apply ( NormalDistribution :: cumulativeDistribution ) . mult ( optionStrike ) ) . mult ( payoffUnit ) ; return valueAnalytic ; }
public class FunctionSQL { /** * Evaluates and returns this Function in the context of the session . < p > */ @ Override public Object getValue ( Session session ) { } }
Object [ ] data = new Object [ nodes . length ] ; for ( int i = 0 ; i < nodes . length ; i ++ ) { Expression e = nodes [ i ] ; if ( e != null ) { data [ i ] = e . getValue ( session , e . dataType ) ; } } return getValue ( session , data ) ;
public class JPEGDecoder { /** * Starts the decode process . This will advance the JPEG stream to the start * of the image data . It also checks if that JPEG file can be decoded by this * library . * @ return true if the JPEG can be decoded . * @ throws IOException if an IO error occurred */ public boolean startDecode ( ) throws IOException { } }
if ( insideSOS ) { throw new IllegalStateException ( "decode already started" ) ; } if ( foundEOI ) { return false ; } decodeHeader ( ) ; int m = getMarker ( ) ; while ( m != 0xD9 ) { // EOI if ( m == 0xDA ) { // SOS processScanHeader ( ) ; insideSOS = true ; currentMCURow = 0 ; reset ( ) ; return true ; } else { processMarker ( m ) ; } m = getMarker ( ) ; } foundEOI = true ; return false ;
public class FastLimitedQueue { /** * Add an element at the tail of the queue , replacing the least recently added item if the queue is full . * The item is only added to the queue if it is not already contained . Runs in constant time , assuming the * hash function disperses the elements properly among the buckets of the underlying hash set . * @ param e element to add at the tail of the queue , if not already contained in the queue * @ return < code > true < / code > if the given item was not yet contained in the queue */ public boolean add ( E e ) { } }
// not yet contained ? if ( set . contains ( e ) ) { return false ; } // add new element queue . add ( e ) ; set . add ( e ) ; // maintain size limit while ( queue . size ( ) > sizeLimit ) { remove ( ) ; } return true ;
public class NaturalLanguageClassifier { /** * Create classifier . * Sends data to create and train a classifier and returns information about the new classifier . * @ param createClassifierOptions the { @ link CreateClassifierOptions } containing the options for the call * @ return a { @ link ServiceCall } with a response type of { @ link Classifier } */ public ServiceCall < Classifier > createClassifier ( CreateClassifierOptions createClassifierOptions ) { } }
Validator . notNull ( createClassifierOptions , "createClassifierOptions cannot be null" ) ; String [ ] pathSegments = { "v1/classifiers" } ; RequestBuilder builder = RequestBuilder . post ( RequestBuilder . constructHttpUrl ( getEndPoint ( ) , pathSegments ) ) ; Map < String , String > sdkHeaders = SdkCommon . getSdkHeaders ( "natural_language_classifier" , "v1" , "createClassifier" ) ; for ( Entry < String , String > header : sdkHeaders . entrySet ( ) ) { builder . header ( header . getKey ( ) , header . getValue ( ) ) ; } builder . header ( "Accept" , "application/json" ) ; MultipartBody . Builder multipartBuilder = new MultipartBody . Builder ( ) ; multipartBuilder . setType ( MultipartBody . FORM ) ; RequestBody trainingMetadataBody = RequestUtils . inputStreamBody ( createClassifierOptions . metadata ( ) , "application/json" ) ; multipartBuilder . addFormDataPart ( "training_metadata" , "filename" , trainingMetadataBody ) ; RequestBody trainingDataBody = RequestUtils . inputStreamBody ( createClassifierOptions . trainingData ( ) , "text/csv" ) ; multipartBuilder . addFormDataPart ( "training_data" , "filename" , trainingDataBody ) ; builder . body ( multipartBuilder . build ( ) ) ; return createServiceCall ( builder . build ( ) , ResponseConverterUtils . getObject ( Classifier . class ) ) ;
public class CRCTables { /** * Method for CRC calculation for the just one value ( type long ) Algorithm : Load the register with zero bits . * Reverse message Augment the message by appending W zero bits to the end of it . While ( more message bits ) Begin * Shift the register left by one bit , reading the next bit of the augmented message into register bit position 0. * If ( a 1 bit popped out of the register during step 3 ) Register = Register XOR Poly . End Reverse register The * register now contains the CRC . Notes : W = 32 , that ' s why we have offsets 32 , 40 , 48 and 56 instead of 0,8,16,24 * Size of register = W ; * @ param value A < code > long < / code > value , for which the CRC should be calculated . * @ return The remainder of the polynomial division - - > CRC . */ protected final int calculateCRC32 ( final long value ) { } }
long l = value ; int register = 0 ; // We are going to throw the first bit away . Then making int from long . final int gxInt = ( int ) ( generatorPolynom & BIN_MASK_33BIT ) ; // the first bit before bit shifting in the input polynom int bit = 0 ; for ( int i = 0 ; i < Long . SIZE ; i ++ ) { // is the highest bit set ? if ( ( l & BIN_MASK_1FIRST_ONLY ) == 0 ) { bit = 0 ; } else { bit = 1 ; } // is the highest bit set ? if ( ( register & BIN_MASK_1FIRST_ONLY_INT ) == 0 ) { register = register << 1 ; l = l << 1 ; register += bit ; } else { register = register << 1 ; l = l << 1 ; register += bit ; register = register ^ gxInt ; } } return Integer . reverse ( register ) ;
public class CommonOps_DDRM { /** * Finds the element with the minimum value along column in the input matrix and returns the results in a vector : < br > * < br > * b < sub > j < / sub > = min ( i = 1 : m ; a < sub > ij < / sub > ) * @ param input Input matrix * @ param output Optional storage for output . Reshaped into a row vector . Modified . * @ return Vector containing the maximum of each column */ public static DMatrixRMaj maxCols ( DMatrixRMaj input , DMatrixRMaj output ) { } }
if ( output == null ) { output = new DMatrixRMaj ( 1 , input . numCols ) ; } else { output . reshape ( 1 , input . numCols ) ; } for ( int cols = 0 ; cols < input . numCols ; cols ++ ) { double maximum = - Double . MAX_VALUE ; int index = cols ; int end = index + input . numCols * input . numRows ; for ( ; index < end ; index += input . numCols ) { double v = input . data [ index ] ; if ( v > maximum ) maximum = v ; } output . set ( cols , maximum ) ; } return output ;
public class HerokuAPI { /** * Rollback an app to a specific release . * @ param appName App name . See { @ link # listApps } for a list of apps that can be used . * @ param releaseUuid Release UUID . See { @ link # listReleases } for a list of the app ' s releases . * @ return the release object */ public Release rollback ( String appName , String releaseUuid ) { } }
return connection . execute ( new Rollback ( appName , releaseUuid ) , apiKey ) ;
public class SREsPreferencePage { /** * Removes the given SREs from the table . * @ param sres the SREs to remove . */ @ SuppressWarnings ( "checkstyle:npathcomplexity" ) public void removeSREs ( ISREInstall ... sres ) { } }
final ISREInstall defaultSRE = getDefaultSRE ( ) ; final String defaultId = defaultSRE == null ? null : defaultSRE . getId ( ) ; int defaultIndex = - 1 ; if ( defaultId != null ) { for ( int i = 0 ; defaultIndex == - 1 && i < this . sreTable . getItemCount ( ) ; ++ i ) { if ( defaultId . equals ( ( ( ISREInstall ) this . sreTable . getItem ( i ) . getData ( ) ) . getId ( ) ) ) { defaultIndex = i ; } } } final String normedDefaultId = Strings . nullToEmpty ( defaultId ) ; boolean defaultIsRemoved = false ; for ( final ISREInstall sre : sres ) { if ( this . sreArray . remove ( sre ) && sre . getId ( ) . equals ( normedDefaultId ) ) { defaultIsRemoved = true ; } } refreshSREListUI ( ) ; // Update the default SRE if ( defaultIsRemoved ) { if ( this . sreTable . getItemCount ( ) == 0 ) { setSelection ( null ) ; } else { if ( defaultIndex < 0 ) { defaultIndex = 0 ; } else if ( defaultIndex >= this . sreTable . getItemCount ( ) ) { defaultIndex = this . sreTable . getItemCount ( ) - 1 ; } setSelection ( new StructuredSelection ( this . sreTable . getItem ( defaultIndex ) . getData ( ) ) ) ; } } this . sresList . refresh ( true ) ; if ( defaultIsRemoved ) { fireDefaultSREChanged ( ) ; } updateUI ( ) ;
public class Settings { /** * Returns the temporary directory . * @ return the temporary directory * @ throws java . io . IOException if any . */ public synchronized File getTempDirectory ( ) throws IOException { } }
if ( tempDirectory == null ) { final File baseTemp = new File ( getString ( Settings . KEYS . TEMP_DIRECTORY , System . getProperty ( "java.io.tmpdir" ) ) ) ; tempDirectory = FileUtils . createTempDirectory ( baseTemp ) ; } return tempDirectory ;
public class VerticalViewPager { /** * This method will be invoked when the current page is scrolled , either as part * of a programmatically initiated smooth scroll or a user initiated touch scroll . * If you override this method you must call through to the superclass implementation * ( e . g . super . onPageScrolled ( position , offset , offsetPixels ) ) before onPageScrolled * returns . * @ param position Position index of the first page currently being displayed . * Page position + 1 will be visible if positionOffset is nonzero . * @ param offset Value from [ 0 , 1 ) indicating the offset from the page at position . * @ param offsetPixels Value in pixels indicating the offset from position . */ protected void onPageScrolled ( int position , float offset , int offsetPixels ) { } }
// Offset any decor views if needed - keep them on - screen at all times . if ( mDecorChildCount > 0 ) { final int scrollY = getScrollY ( ) ; int paddingTop = getPaddingTop ( ) ; int paddingBottom = getPaddingBottom ( ) ; final int height = getHeight ( ) ; final int childCount = getChildCount ( ) ; for ( int i = 0 ; i < childCount ; i ++ ) { final View child = getChildAt ( i ) ; final LayoutParams lp = ( LayoutParams ) child . getLayoutParams ( ) ; if ( ! lp . isDecor ) continue ; final int vgrav = lp . gravity & Gravity . VERTICAL_GRAVITY_MASK ; int childTop = 0 ; switch ( vgrav ) { default : childTop = paddingTop ; break ; case Gravity . TOP : childTop = paddingTop ; paddingTop += child . getHeight ( ) ; break ; case Gravity . CENTER_VERTICAL : childTop = Math . max ( ( height - child . getMeasuredHeight ( ) ) / 2 , paddingTop ) ; break ; case Gravity . BOTTOM : childTop = height - paddingBottom - child . getMeasuredHeight ( ) ; paddingBottom += child . getMeasuredHeight ( ) ; break ; } childTop += scrollY ; final int childOffset = childTop - child . getTop ( ) ; if ( childOffset != 0 ) { child . offsetTopAndBottom ( childOffset ) ; } } } if ( mOnPageChangeListener != null ) { mOnPageChangeListener . onPageScrolled ( position , offset , offsetPixels ) ; } if ( mInternalPageChangeListener != null ) { mInternalPageChangeListener . onPageScrolled ( position , offset , offsetPixels ) ; } if ( mPageTransformer != null ) { final int scrollY = getScrollY ( ) ; final int childCount = getChildCount ( ) ; for ( int i = 0 ; i < childCount ; i ++ ) { final View child = getChildAt ( i ) ; final LayoutParams lp = ( LayoutParams ) child . getLayoutParams ( ) ; if ( lp . isDecor ) continue ; final float transformPos = ( float ) ( child . getTop ( ) - scrollY ) / getClientHeight ( ) ; mPageTransformer . transformPage ( child , transformPos ) ; } } mCalledSuper = true ;
public class IdentityPatchContext { /** * Record a content loader for a given patch id . * @ param patchID the patch id * @ param contentLoader the content loader */ protected void recordContentLoader ( final String patchID , final PatchContentLoader contentLoader ) { } }
if ( contentLoaders . containsKey ( patchID ) ) { throw new IllegalStateException ( "Content loader already registered for patch " + patchID ) ; // internal wrong usage , no i18n } contentLoaders . put ( patchID , contentLoader ) ;
public class LoggingHandler { /** * Formats an event and returns the formatted message . This method is currently only used for formatting * { @ link ChannelOutboundHandler # connect ( ChannelHandlerContext , SocketAddress , SocketAddress , ChannelPromise ) } . * @ param eventName the name of the event * @ param firstArg the first argument of the event * @ param secondArg the second argument of the event */ protected String format ( ChannelHandlerContext ctx , String eventName , Object firstArg , Object secondArg ) { } }
if ( secondArg == null ) { return formatSimple ( ctx , eventName , firstArg ) ; } String chStr = ctx . channel ( ) . toString ( ) ; String arg1Str = String . valueOf ( firstArg ) ; String arg2Str = secondArg . toString ( ) ; StringBuilder buf = new StringBuilder ( chStr . length ( ) + 1 + eventName . length ( ) + 2 + arg1Str . length ( ) + 2 + arg2Str . length ( ) ) ; buf . append ( chStr ) . append ( ' ' ) . append ( eventName ) . append ( ": " ) . append ( arg1Str ) . append ( ", " ) . append ( arg2Str ) ; return buf . toString ( ) ;
public class API_Api { /** * Normal Init level APIs * @ param gwAPI * @ param facade * @ throws Exception */ public static void init ( final GwAPI gwAPI , GwFacade facade ) throws Exception { } }
// Overall APIs gwAPI . route ( HttpMethods . GET , "/api" , API . VOID , new GwCode ( facade , "Document API" , true ) { @ Override public void handle ( AuthzTrans trans , HttpServletRequest req , HttpServletResponse resp ) throws Exception { Result < Void > r = context . getAPI ( trans , resp , gwAPI ) ; switch ( r . status ) { case OK : resp . setStatus ( HttpStatus . OK_200 ) ; break ; default : context . error ( trans , resp , r ) ; } } } ) ; // Overall Examples gwAPI . route ( HttpMethods . GET , "/api/example/*" , API . VOID , new GwCode ( facade , "Document API" , true ) { @ Override public void handle ( AuthzTrans trans , HttpServletRequest req , HttpServletResponse resp ) throws Exception { String pathInfo = req . getPathInfo ( ) ; int question = pathInfo . lastIndexOf ( '?' ) ; pathInfo = pathInfo . substring ( 13 , question < 0 ? pathInfo . length ( ) : question ) ; // IMPORTANT , this is size of " / api / example / " String nameOrContextType = Symm . base64noSplit . decode ( pathInfo ) ; // String param = req . getParameter ( " optional " ) ; Result < Void > r = context . getAPIExample ( trans , resp , nameOrContextType , question >= 0 && "optional=true" . equalsIgnoreCase ( req . getPathInfo ( ) . substring ( question + 1 ) ) ) ; switch ( r . status ) { case OK : resp . setStatus ( HttpStatus . OK_200 ) ; break ; default : context . error ( trans , resp , r ) ; } } } ) ;
public class Converter { /** * Convert . * @ param _ value the value * @ return the string * @ throws EFapsException */ public static String convert ( final Object _value ) throws EFapsException { } }
String ret = null ; if ( _value instanceof String ) { ret = ( String ) _value ; } else if ( _value instanceof Instance ) { ret = ( ( Instance ) _value ) . getOid ( ) ; } else if ( _value instanceof Number ) { ret = ( ( Number ) _value ) . toString ( ) ; } else if ( _value instanceof LocalDate ) { ret = ( ( LocalDate ) _value ) . toString ( ) ; } else { LOG . warn ( "No specific converter defined for: {}" , _value ) ; ret = String . valueOf ( _value ) ; } return ret ;
public class JDALogger { /** * Will get the { @ link org . slf4j . Logger } for the given Class * or create and cache a fallback logger if there is no SLF4J implementation present . * The fallback logger will be an instance of a slightly modified version of SLF4Js SimpleLogger . * @ param clazz * The class used for the Logger name * @ return Logger for given Class */ public static Logger getLog ( Class < ? > clazz ) { } }
synchronized ( LOGS ) { if ( SLF4J_ENABLED ) return LoggerFactory . getLogger ( clazz ) ; return LOGS . computeIfAbsent ( clazz . getName ( ) , ( n ) -> new SimpleLogger ( clazz . getSimpleName ( ) ) ) ; }
public class JSON { /** * Mutant factory for constructing an instance with specified { @ link PrettyPrinter } , * and returning new instance ( or , if there would be no change , this instance ) . */ public JSON with ( PrettyPrinter pp ) { } }
if ( _prettyPrinter == pp ) { return this ; } return _with ( _features , _streamFactory , _treeCodec , _reader , _writer , pp ) ;
public class JBBPDslBuilder { /** * Add anonymous fixed length bit array . * @ param bits length of the field , must not be null * @ param size number of elements in array , if negative then till the end of stream * @ return the builder instance , must not be null */ public JBBPDslBuilder BitArray ( final JBBPBitNumber bits , final int size ) { } }
return this . BitArray ( null , bits , arraySizeToString ( size ) ) ;
public class AppenderatorImpl { /** * Unannounce the segments and wait for outstanding persists to finish . * Do not unlock base persist dir as we are not waiting for push executor to shut down * relying on current JVM to shutdown to not cause any locking problem if the task is restored . * In case when task is restored and current task is still active because of push executor ( which it shouldn ' t be * since push executor starts daemon threads ) then the locking should fail and new task should fail to start . * This also means that this method should only be called when task is shutting down . */ @ Override public void closeNow ( ) { } }
if ( ! closed . compareAndSet ( false , true ) ) { log . info ( "Appenderator already closed" ) ; return ; } log . info ( "Shutting down immediately..." ) ; for ( Map . Entry < SegmentIdWithShardSpec , Sink > entry : sinks . entrySet ( ) ) { try { segmentAnnouncer . unannounceSegment ( entry . getValue ( ) . getSegment ( ) ) ; } catch ( Exception e ) { log . makeAlert ( e , "Failed to unannounce segment[%s]" , schema . getDataSource ( ) ) . addData ( "identifier" , entry . getKey ( ) . toString ( ) ) . emit ( ) ; } } try { shutdownExecutors ( ) ; // We don ' t wait for pushExecutor to be terminated . See Javadoc for more details . Preconditions . checkState ( persistExecutor == null || persistExecutor . awaitTermination ( 365 , TimeUnit . DAYS ) , "persistExecutor not terminated" ) ; Preconditions . checkState ( intermediateTempExecutor == null || intermediateTempExecutor . awaitTermination ( 365 , TimeUnit . DAYS ) , "intermediateTempExecutor not terminated" ) ; persistExecutor = null ; intermediateTempExecutor = null ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw new ISE ( "Failed to shutdown executors during close()" ) ; }
public class ProcessStore { /** * set an already defined variable , first from the highest block hierarchy * down to the global variables . * @ param key * name of the variable * @ param value * value of the variable * @ return true if successfully assignd to an existing variable else false */ public boolean setVariable ( Object key , Object value ) { } }
boolean success = false ; Object object = null ; for ( int i = working . size ( ) - 1 ; i >= 0 ; -- i ) { Map < Object , Object > map = working . get ( i ) ; object = map . get ( key ) ; if ( object != null ) { map . put ( key , value ) ; success = true ; break ; } } if ( ! success ) { object = global . get ( key ) ; if ( object != null ) { global . put ( key , value ) ; success = true ; } } return success ;
public class CmsJlanThreadManager { /** * Tries to stop the JLAN server and return after it is stopped , but will also return if the thread hasn ' t stopped after MAX _ SHUTDOWN _ WAIT _ MILLIS . */ public synchronized void stop ( ) { } }
if ( m_thread != null ) { long timeBeforeShutdownWasCalled = System . currentTimeMillis ( ) ; JLANServer . shutdownServer ( new String [ ] { } ) ; while ( m_thread . isAlive ( ) && ( ( System . currentTimeMillis ( ) - timeBeforeShutdownWasCalled ) < MAX_SHUTDOWN_WAIT_MILLIS ) ) { try { Thread . sleep ( 500 ) ; } catch ( InterruptedException e ) { // ignore } } }
public class XMLHolidayHelper { /** * Gets the type . * @ param eType * the type of holiday in the config * @ return the type of holiday */ @ Nonnull public static IHolidayType getType ( @ Nonnull final HolidayType eType ) { } }
switch ( eType ) { case OFFICIAL_HOLIDAY : return EHolidayType . OFFICIAL_HOLIDAY ; case UNOFFICIAL_HOLIDAY : return EHolidayType . UNOFFICIAL_HOLIDAY ; default : throw new IllegalArgumentException ( "Unknown type " + eType ) ; }
public class MetadataContext { /** * Invokes { @ link DatabaseMetaData # getColumnPrivileges ( java . lang . String , java . lang . String , java . lang . String , * java . lang . String ) } with given arguments and returns bound information . * @ param catalog the value for { @ code catalog } parameter * @ param schema the value for { @ code schema } parameter * @ param table the value for { @ code table } parameter * @ param columnNamePattern the value for { @ code columnNamePattern } parameter * @ return a list of column privileges * @ throws SQLException if a database error occurs . * @ see DatabaseMetaData # getColumnPrivileges ( String , String , String , String ) */ public List < ColumnPrivilege > getColumnPrivileges ( final String catalog , final String schema , final String table , final String columnNamePattern ) throws SQLException { } }
final List < ColumnPrivilege > list = new ArrayList < > ( ) ; try ( ResultSet results = databaseMetadata . getColumnPrivileges ( catalog , schema , table , columnNamePattern ) ) { if ( results != null ) { bind ( results , ColumnPrivilege . class , list ) ; } } return list ;
public class AbstractDecorator { /** * Checks if a error belongs to this widget . * @ param perror editor error to check * @ return true if the error belongs to this widget */ protected boolean editorErrorMatches ( final EditorError perror ) { } }
return perror != null && perror . getEditor ( ) != null && ( equals ( perror . getEditor ( ) ) || perror . getEditor ( ) . equals ( editor ) ) ;
public class JobExecutionsInner { /** * Starts an elastic job execution . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param jobAgentName The name of the job agent . * @ param jobName The name of the job to get . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the JobExecutionInner object if successful . */ public JobExecutionInner beginCreate ( String resourceGroupName , String serverName , String jobAgentName , String jobName ) { } }
return beginCreateWithServiceResponseAsync ( resourceGroupName , serverName , jobAgentName , jobName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Scheduler { /** * Shuts the scheduler down . After shut down no more tasks can be added to the scheduler . */ public void shutdown ( ) { } }
synchronized ( globalLock ) { for ( Instance i : allInstances ) { i . removeSlotListener ( ) ; i . cancelAndReleaseAllSlots ( ) ; } allInstances . clear ( ) ; allInstancesByHost . clear ( ) ; instancesWithAvailableResources . clear ( ) ; taskQueue . clear ( ) ; }
public class TangoDeviceAppender { /** * Release any resources allocated within the appender . */ public void close ( ) { } }
if ( lc_dev_proxy != null ) { try { DeviceData dd = new DeviceData ( ) ; dd . insert ( dev_name ) ; lc_dev_proxy . command_inout_asynch ( "UnRegister" , dd , true ) ; } catch ( DevFailed dv ) { // Ignore : some old LogViewer may not support the Unregister cmd } } lc_dev_proxy = null ; dev_name = null ;
public class LocalTranCoordImpl { /** * Complete the deferred LTC start by retrieving the remaining LTC config from component metadata */ protected void getComponentMetadataForLTC ( ) { } }
if ( _deferredConfig ) { if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "getComponentMetadataForLTC" ) ; _deferredConfig = false ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getComponentMetadataForLTC" ) ; }
public class SarlLinkFactory { /** * Create the label for a procedure lambda . * @ param linkInfo the type . * @ return the label . */ protected Content createProcedureLambdaLabel ( LinkInfoImpl linkInfo ) { } }
final ParameterizedType type = linkInfo . type . asParameterizedType ( ) ; if ( type != null ) { final Type [ ] arguments = type . typeArguments ( ) ; if ( arguments != null && arguments . length > 0 ) { return createLambdaLabel ( linkInfo , arguments , arguments . length ) ; } } return linkInfo . label ;
public class DBPIDGenerator { /** * Read the highest value from the old pidGen directory if it exists , and * ensure it is never used . */ private void upgradeIfNeeded ( File oldPidGenDir ) throws IOException { } }
if ( oldPidGenDir != null && oldPidGenDir . isDirectory ( ) ) { String [ ] names = oldPidGenDir . list ( ) ; Arrays . sort ( names ) ; if ( names . length > 0 ) { BufferedReader in = new BufferedReader ( new InputStreamReader ( new FileInputStream ( new File ( oldPidGenDir , names [ names . length - 1 ] ) ) ) ) ; String lastLine = null ; String line ; while ( ( line = in . readLine ( ) ) != null ) { lastLine = line ; } in . close ( ) ; if ( lastLine != null ) { String [ ] parts = lastLine . split ( "|" ) ; if ( parts . length == 2 ) { neverGeneratePID ( parts [ 0 ] ) ; } } } }
public class ApplicationGatewaysInner { /** * Starts the specified application gateway . * @ param resourceGroupName The name of the resource group . * @ param applicationGatewayName The name of the application gateway . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void start ( String resourceGroupName , String applicationGatewayName ) { } }
startWithServiceResponseAsync ( resourceGroupName , applicationGatewayName ) . toBlocking ( ) . last ( ) . body ( ) ;
public class MeshGenerator { /** * Converts a standard triangle model to a wireframe model , with the option to perform de - triangulation ( convert faces of triangles back to a polygon ) . This assumes that the positions have 3 * components , in the x , y , z order . No excess data is kept . The final model will have no duplicate vertices or edges . * @ param positions The position list * @ param indices The indices * @ param deTriangulation Merge triangles into faces when possible */ public static void toWireframe ( TFloatList positions , TIntList indices , boolean deTriangulation ) { } }
int indicesSize = indices . size ( ) ; int positionsSize = positions . size ( ) ; // Remove duplicate vertices for ( int i = 0 ; i < positionsSize ; i += 3 ) { final float x = positions . get ( i ) ; final float y = positions . get ( i + 1 ) ; final float z = positions . get ( i + 2 ) ; // Search for a duplicate for ( int ii = i + 3 ; ii < positionsSize ; ii += 3 ) { final float ox = positions . get ( ii ) ; final float oy = positions . get ( ii + 1 ) ; final float oz = positions . get ( ii + 2 ) ; if ( x == ox && y == oy && z == oz ) { // If one is removed , we need to fix the indices for ( int iii = 0 ; iii < indicesSize ; iii ++ ) { final int index = indices . get ( iii ) ; if ( index == ii / 3 ) { // Any index referring to it is replaced by the original indices . replace ( iii , i / 3 ) ; } else if ( index > ii / 3 ) { // Any index above is decremented indices . replace ( iii , index - 1 ) ; } } // Then we can remove it properly positions . remove ( ii , 3 ) ; positionsSize -= 3 ; ii -= 3 ; } } } // Next we remove duplicate edges using a hash set final Set < Vector2i > edges = new HashSet < > ( ) ; final Set < Vector2i > cancelled = new HashSet < > ( ) ; for ( int i = 0 ; i < indicesSize ; i += 3 ) { final int i0 = indices . get ( i ) ; final int i1 = indices . get ( i + 1 ) ; final int i2 = indices . get ( i + 2 ) ; // If we need to remove unnecessary edges if ( deTriangulation ) { // Get the points of the triangle final Vector3f p00 = new Vector3f ( positions . get ( i0 * 3 ) , positions . get ( i0 * 3 + 1 ) , positions . get ( i0 * 3 + 2 ) ) ; final Vector3f p01 = new Vector3f ( positions . get ( i1 * 3 ) , positions . get ( i1 * 3 + 1 ) , positions . get ( i1 * 3 + 2 ) ) ; final Vector3f p02 = new Vector3f ( positions . get ( i2 * 3 ) , positions . get ( i2 * 3 + 1 ) , positions . get ( i2 * 3 + 2 ) ) ; // Test with all the other triangles for ( int ii = i + 3 ; ii < indicesSize ; ii += 3 ) { // Get the indices of the other triangle final int ii0 = indices . get ( ii ) ; final int ii1 = indices . get ( ii + 1 ) ; final int ii2 = indices . get ( ii + 2 ) ; // Get the vertices of the other triangle final Vector3f p10 = new Vector3f ( positions . get ( ii0 * 3 ) , positions . get ( ii0 * 3 + 1 ) , positions . get ( ii0 * 3 + 2 ) ) ; final Vector3f p11 = new Vector3f ( positions . get ( ii1 * 3 ) , positions . get ( ii1 * 3 + 1 ) , positions . get ( ii1 * 3 + 2 ) ) ; final Vector3f p12 = new Vector3f ( positions . get ( ii2 * 3 ) , positions . get ( ii2 * 3 + 1 ) , positions . get ( ii2 * 3 + 2 ) ) ; // Test for a common edge final Vector2i edge = getCommonEdge ( p00 , p01 , p02 , p10 , p11 , p12 ) ; if ( edge != null ) { // If we have one , add it to the cancelled list to be removed later since we can ' t do that now final int c00 = indices . get ( i + edge . getX ( ) ) ; final int c01 = indices . get ( i + ( edge . getX ( ) + 1 ) % 3 ) ; final int c10 = indices . get ( ii + edge . getY ( ) ) ; final int c11 = indices . get ( ii + ( edge . getY ( ) + 1 ) % 3 ) ; cancelled . add ( new Vector2i ( Math . min ( c00 , c01 ) , Math . max ( c00 , c01 ) ) ) ; cancelled . add ( new Vector2i ( Math . min ( c10 , c11 ) , Math . max ( c10 , c11 ) ) ) ; } } } // Sorting the indices for the edges ensure the equalities will work correctly edges . add ( new Vector2i ( Math . min ( i0 , i1 ) , Math . max ( i0 , i1 ) ) ) ; edges . add ( new Vector2i ( Math . min ( i1 , i2 ) , Math . max ( i1 , i2 ) ) ) ; edges . add ( new Vector2i ( Math . min ( i2 , i0 ) , Math . max ( i2 , i0 ) ) ) ; } // Removed any edge that was flagged as unnecessary by the de - triangulation edges . removeAll ( cancelled ) ; // Finally , clear the indices and re - add them from the now unique edges indices . clear ( ) ; for ( Vector2i edge : edges ) { indices . add ( edge . getX ( ) ) ; indices . add ( edge . getY ( ) ) ; }
public class XmlHelper { /** * 获取某个节点 * @ param expression * 路径 * @ return { Node } */ public Node getNode ( String expression ) { } }
return ( Node ) evalXPath ( expression , null , XPathConstants . NODE ) ;
public class SansOrm { /** * Use this one if you have custom / provided { @ link TransactionManager } , e . g . to run within web app container . * @ param dataSource the { @ link DataSource } to use by the default * @ param txManager the { @ link TransactionManager } to use for tx management * @ param userTx the { @ link UserTransaction } to use for tx management together with txManager * @ return dataSource that will be used for queries */ public static DataSource initializeTxCustom ( DataSource dataSource , TransactionManager txManager , UserTransaction userTx ) { } }
TransactionElf . setTransactionManager ( txManager ) ; TransactionElf . setUserTransaction ( userTx ) ; return initializeTxNone ( dataSource ) ;
public class NotificationViewCallback { /** * Called only once after this callback is set . * @ param view */ public void onViewSetup ( NotificationView view ) { } }
if ( DBG ) Log . v ( TAG , "onViewSetup" ) ; view . setCornerRadius ( 8.0f ) ; view . setContentMargin ( 50 , 50 , 50 , 50 ) ; view . setShadowEnabled ( true ) ;
public class IntuitResponseDeserializer { /** * Method to add custom deserializer for CustomFieldDefinition * @ param objectMapper the Jackson object mapper */ private void registerModulesForCustomFieldDef ( ObjectMapper objectMapper ) { } }
SimpleModule simpleModule = new SimpleModule ( "CustomFieldDefinition" , new Version ( 1 , 0 , 0 , null ) ) ; simpleModule . addDeserializer ( CustomFieldDefinition . class , new CustomFieldDefinitionDeserializer ( ) ) ; objectMapper . registerModule ( simpleModule ) ; objectMapper . configure ( DeserializationFeature . FAIL_ON_UNKNOWN_PROPERTIES , false ) ;
public class StatsDReporter { private void reportCounter ( final String name , final Counter counter ) { } }
send ( name , String . valueOf ( counter . getCount ( ) ) ) ;
public class SEQUENCER2 { /** * / * - - - - - Private Methods - - - - - */ protected void handleViewChange ( View v ) { } }
List < Address > mbrs = v . getMembers ( ) ; if ( mbrs . isEmpty ( ) ) return ; if ( view == null || view . compareTo ( v ) < 0 ) view = v ; else return ; Address existing_coord = coord , new_coord = mbrs . get ( 0 ) ; boolean coord_changed = ! Objects . equals ( existing_coord , new_coord ) ; if ( coord_changed && new_coord != null ) { coord = new_coord ; // todo : if I ' m the new coord , get the highest seqno from all members . If not , re - send my pending seqno reqs } if ( new_coord != null ) is_coord = new_coord . equals ( local_addr ) ;