signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ServersResource { /** * Evacuates the server to a new host , supplying a new administration password ( optional ) and allowing specification * of the use of shared storage . * @ param serverId * The id ( UUID ) of the server to be evacuated . * @ param host * The host name or ID of the new physical host for the server . This is the host that the server is to be * moved to . * @ param adminPassword * An optional administration password . If null , the admin password is not set . * @ param sharedStorage * True if the server currently resides on shared storage . If null , the default ( false ) is used . * @ return The action to be performed . */ public EvacuateAction evacuate ( String serverId , String host , String adminPassword , Boolean sharedStorage ) { } }
Evacuate entity = new Evacuate ( ) ; entity . setHost ( host ) ; if ( adminPassword != null && adminPassword . length ( ) > 0 ) { entity . setAdminPassword ( adminPassword ) ; } if ( sharedStorage != null ) { entity . setSharedStorage ( sharedStorage . booleanValue ( ) ) ; } else { entity . setSharedStorage ( false ) ; } return new EvacuateAction ( serverId , entity ) ;
public class Gauge { /** * Defines if the markers should be drawn * @ param VISIBLE */ public void setMarkersVisible ( final boolean VISIBLE ) { } }
if ( null == markersVisible ) { _markersVisible = VISIBLE ; fireUpdateEvent ( VISIBILITY_EVENT ) ; } else { markersVisible . set ( VISIBLE ) ; }
public class Transformers { /** * Groups the items emitted by an { @ code Observable } according to a * specified criterion , and emits these grouped items as * { @ link GroupedObservable } s . The emitted { @ code GroupedObservable } allows * only a single { @ link Subscriber } during its lifetime and if this * { @ code Subscriber } unsubscribes before the source terminates , the next * emission by the source having the same key will trigger a new * { @ code GroupedObservable } emission . * < img width = " 640 " height = " 360 " src = * " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / groupBy . png " * alt = " " > * < em > Note : < / em > A { @ link GroupedObservable } will cache the items it is to * emit until such time as it is subscribed to . For this reason , in order to * avoid memory leaks , you should not simply ignore those * { @ code GroupedObservable } s that do not concern you . Instead , you can * signal to them that they may discard their buffers by applying an * operator like { @ code . ignoreElements ( ) } to them . * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code groupBy } does not operate by default on a particular * { @ link Scheduler } . < / dd > * < / dl > * @ param keySelector * a function that extracts the key for each item * @ param elementSelector * a function that extracts the return element for each item * @ param evictingMapFactory * a function that given an eviction action returns a { @ link Map } * instance that will be used to assign items to the appropriate * { @ code GroupedObservable } s . The { @ code Map } instance must be * thread - safe and any eviction must trigger a call to the * supplied action ( synchronously or asynchronously ) . This can be * used to limit the size of the map by evicting keys by maximum * size or access time for instance . If * { @ code evictingMapFactory } is null then no eviction strategy * will be applied ( and a suitable default thread - safe * implementation of { @ code Map } will be supplied ) . Here ' s an * example using Guava ' s { @ code CacheBuilder } from v19.0: * < pre > * { @ code * Func1 < Action1 < K > , Map < K , Object > > mapFactory * = action - > CacheBuilder . newBuilder ( ) * . maximumSize ( 1000) * . expireAfterAccess ( 12 , TimeUnit . HOUR ) * . removalListener ( key - > action . call ( key ) ) * . < K , Object > build ( ) . asMap ( ) ; * < / pre > * @ param < T > * the type of the input observable * @ param < K > * the key type * @ param < R > * the element type * @ return an { @ code Observable } that emits { @ link GroupedObservable } s , each * of which corresponds to a unique key value and each of which * emits those items from the source Observable that share that key * value * @ see < a href = " http : / / reactivex . io / documentation / operators / groupby . html " > * ReactiveX operators documentation : GroupBy < / a > */ public static < T , K , R > Transformer < T , GroupedObservable < K , R > > groupByEvicting ( final Func1 < ? super T , ? extends K > keySelector , final Func1 < ? super T , ? extends R > elementSelector , final Func1 < Action1 < K > , Map < K , Object > > evictingMapFactory ) { } }
return new Transformer < T , GroupedObservable < K , R > > ( ) { @ Override public Observable < GroupedObservable < K , R > > call ( Observable < T > o ) { return o . groupBy ( keySelector , elementSelector , evictingMapFactory ) ; } } ;
public class JodaBeanSimpleJsonWriter { /** * write simple type */ private void writeSimple ( Class < ? > declaredType , Object value ) throws IOException { } }
Class < ? > realType = value . getClass ( ) ; if ( realType == Integer . class ) { output . writeInt ( ( ( Integer ) value ) . intValue ( ) ) ; } else if ( realType == Long . class ) { output . writeLong ( ( ( Long ) value ) . longValue ( ) ) ; } else if ( realType == Short . class ) { output . writeInt ( ( ( Short ) value ) . shortValue ( ) ) ; } else if ( realType == Byte . class ) { output . writeInt ( ( ( Byte ) value ) . byteValue ( ) ) ; } else if ( realType == Float . class ) { float flt = ( ( Float ) value ) . floatValue ( ) ; if ( Float . isNaN ( flt ) ) { // write as string output . writeNull ( ) ; } else if ( Float . isInfinite ( flt ) ) { // write as string output . writeString ( Float . toString ( flt ) ) ; } else { output . writeFloat ( flt ) ; } } else if ( realType == Double . class ) { double dbl = ( ( Double ) value ) . doubleValue ( ) ; if ( Double . isNaN ( dbl ) ) { // write as string output . writeNull ( ) ; } else if ( Double . isInfinite ( dbl ) ) { // write as string output . writeString ( Double . toString ( dbl ) ) ; } else { output . writeDouble ( dbl ) ; } } else if ( realType == Boolean . class ) { output . writeBoolean ( ( ( Boolean ) value ) . booleanValue ( ) ) ; } else { // write as a string try { String converted = settings . getConverter ( ) . convertToString ( realType , value ) ; if ( converted == null ) { throw new IllegalArgumentException ( "Unable to write because converter returned a null string: " + value ) ; } output . writeString ( converted ) ; } catch ( RuntimeException ex ) { throw new IllegalArgumentException ( "Unable to convert type " + declaredType . getName ( ) + " for real type: " + realType . getName ( ) , ex ) ; } }
public class FieldErrorAttributeProcessor { /** * / * ( 非 Javadoc ) * @ see org . thymeleaf . processor . element . AbstractAttributeTagProcessor # doProcess ( org . thymeleaf . context . ITemplateContext , org . thymeleaf . model . IProcessableElementTag , org . thymeleaf . engine . AttributeName , java . lang . String , org . thymeleaf . processor . element . IElementTagStructureHandler ) */ @ Override protected void doProcess ( ITemplateContext context , IProcessableElementTag tag , AttributeName attributeName , String attributeValue , IElementTagStructureHandler structureHandler ) { } }
final IEngineConfiguration configuration = context . getConfiguration ( ) ; /* * Obtain the Thymeleaf Standard Expression parser */ final IStandardExpressionParser parser = StandardExpressions . getExpressionParser ( configuration ) ; /* * Parse the attribute value as a Thymeleaf Standard Expression */ // final IStandardExpression expression = parser . parseExpression ( context , attributeValue ) ; // get field name . String fieldname = tag . getAttributeValue ( null , "name" ) ; // get field value from struts2 ognl Object parameterValue = getFieldValue ( fieldname ) ; if ( parameterValue != null ) { structureHandler . setAttribute ( "value" , HtmlEscape . escapeHtml5 ( parameterValue . toString ( ) ) ) ; } if ( ! hasFieldError ( fieldname ) ) { return ; } // add field - error css class . IAttribute cssClass = tag . getAttribute ( "class" ) ; String css = cssClass . getValue ( ) ; if ( StringUtils . isBlank ( css ) ) { structureHandler . setAttribute ( "class" , fieldErrorClass ( tag ) ) ; } else { structureHandler . setAttribute ( "class" , fieldErrorClass ( tag ) + " " + css ) ; }
public class MiscUtils { /** * I heart commutativity * @ param buffer ByteBuffer assumed position is at end of data * @ return the cheesy checksum of this VoltTable */ public static final long cheesyBufferCheckSum ( ByteBuffer buffer ) { } }
final int mypos = buffer . position ( ) ; buffer . position ( 0 ) ; long checksum = 0 ; if ( buffer . hasArray ( ) ) { final byte bytes [ ] = buffer . array ( ) ; final int end = buffer . arrayOffset ( ) + mypos ; for ( int ii = buffer . arrayOffset ( ) ; ii < end ; ii ++ ) { checksum += bytes [ ii ] ; } } else { for ( int ii = 0 ; ii < mypos ; ii ++ ) { checksum += buffer . get ( ) ; } } buffer . position ( mypos ) ; return checksum ;
public class CalendarPeriod { /** * / * [ deutsch ] * < p > Formatiert dieses Intervall in einem benutzerdefinierten Format . < / p > * @ param printer format object for printing start and end components * @ param intervalPattern interval pattern containing placeholders { 0 } and { 1 } ( for start and end ) * @ return formatted string in given pattern format */ public String print ( ChronoPrinter < T > printer , String intervalPattern ) { } }
AttributeQuery attrs = printer . getAttributes ( ) ; StringBuilder sb = new StringBuilder ( 32 ) ; int i = 0 ; int n = intervalPattern . length ( ) ; while ( i < n ) { char c = intervalPattern . charAt ( i ) ; if ( ( c == '{' ) && ( i + 2 < n ) && ( intervalPattern . charAt ( i + 2 ) == '}' ) ) { char next = intervalPattern . charAt ( i + 1 ) ; if ( next == '0' ) { printer . print ( this . t1 , sb , attrs ) ; i += 3 ; continue ; } else if ( next == '1' ) { printer . print ( this . t2 , sb , attrs ) ; i += 3 ; continue ; } } sb . append ( c ) ; i ++ ; } return sb . toString ( ) ;
public class MapReduceServletImpl { /** * Handle GET http requests . */ public static void doGet ( HttpServletRequest request , HttpServletResponse response ) throws IOException { } }
String handler = getHandler ( request ) ; if ( handler . startsWith ( COMMAND_PATH ) ) { if ( ! checkForAjax ( request , response ) ) { return ; } StatusHandler . handleCommand ( handler . substring ( COMMAND_PATH . length ( ) + 1 ) , request , response ) ; } else { handleStaticResources ( handler , response ) ; }
public class BlockInlineChecksumReader { /** * Return the generation stamp from the name of the block file . */ public static GenStampAndChecksum getGenStampAndChecksumFromInlineChecksumFile ( String fileName ) throws IOException { } }
String [ ] vals = StringUtils . split ( fileName , '_' ) ; if ( vals . length != 6 ) { // blk , blkid , genstamp , version , checksumtype , byte per checksum throw new IOException ( "unidentified block name format: " + fileName ) ; } if ( Integer . parseInt ( vals [ 3 ] ) != FSDataset . FORMAT_VERSION_INLINECHECKSUM ) { // We only support one version of meta version now . throw new IOException ( "Unsupported format version for file " + fileName ) ; } return new GenStampAndChecksum ( Long . parseLong ( vals [ 2 ] ) , Integer . parseInt ( vals [ 4 ] ) , Integer . parseInt ( vals [ 5 ] ) ) ;
public class FunctionExtensions { /** * Returns a composed function that first applies the { @ code before } * function to its input , and then applies the { @ code after } function to the result . * If evaluation of either function throws an exception , it is relayed to * the caller of the composed function . * @ param < V1 > the type of the first parameter to the { @ code before } function , and to the composed function * @ param < V2 > the type of the second parameter to the { @ code before } function , and to the composed function * @ param < T > the type of output of the { @ code before } function , and input to the { @ code after } function * @ param < R > the type of output to the { @ code after } function , and to the composed function * @ param before the function to apply before the { @ code after } function is applied * @ param after the function to apply after the { @ code before } function is applied * @ return a composed function that first applies the { @ code before } * function and then applies the { @ code after } function * @ throws NullPointerException if { @ code before } or { @ code after } is null * @ see # compose ( Functions . Function1 , Functions . Function1) * @ since 2.9 */ public static < V1 , V2 , T , R > Function2 < V1 , V2 , R > andThen ( final Function2 < ? super V1 , ? super V2 , ? extends T > before , final Function1 < ? super T , ? extends R > after ) { } }
if ( after == null ) throw new NullPointerException ( "after" ) ; if ( before == null ) throw new NullPointerException ( "before" ) ; return new Function2 < V1 , V2 , R > ( ) { @ Override public R apply ( V1 v1 , V2 v2 ) { return after . apply ( before . apply ( v1 , v2 ) ) ; } } ;
public class GroupApi { /** * Deletes an LDAP group link for a specific LDAP provider . * < pre > < code > GitLab Endpoint : DELETE / groups / : id / ldap _ group _ links / : provider / : cn < / code > < / pre > * @ param groupIdOrPath the group ID , path of the group , or a Group instance holding the group ID or path * @ param cn the CN of the LDAP group link to delete * @ param provider the name of the LDAP provider * @ throws GitLabApiException if any exception occurs */ public void deleteLdapGroupLink ( Object groupIdOrPath , String cn , String provider ) throws GitLabApiException { } }
if ( cn == null || cn . trim ( ) . isEmpty ( ) ) { throw new RuntimeException ( "cn cannot be null or empty" ) ; } if ( provider == null || provider . trim ( ) . isEmpty ( ) ) { throw new RuntimeException ( "LDAP provider cannot be null or empty" ) ; } delete ( Response . Status . OK , null , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "ldap_group_links" , provider , cn ) ;
public class XMLDocument { /** * / * - - - - - [ Text ] - - - - - */ public XMLDocument addText ( String text ) throws SAXException { } }
if ( ! StringUtil . isEmpty ( text ) ) { finishStartElement ( ) ; xml . characters ( text . toCharArray ( ) , 0 , text . length ( ) ) ; } return this ;
public class Cycle { /** * Cycle is usually used within a loop to alternate * between values , like colors or DOM classes . */ @ Override public Object render ( Map < String , Object > context , LNode ... nodes ) { } }
// The group - name is either the first token - expression , or if that is // null ( indicating there is no name ) , give it the name PREPEND followed // by the number of expressions in the cycle - group . String groupName = nodes [ 0 ] == null ? PREPEND + ( nodes . length - 1 ) : super . asString ( nodes [ 0 ] . render ( context ) ) ; // Prepend a groupName with a single - and double quote as to not // let the groupName conflict with other variable assignments groupName = PREPEND + groupName ; Object obj = context . remove ( groupName ) ; List < Object > elements = new ArrayList < Object > ( ) ; for ( int i = 1 ; i < nodes . length ; i ++ ) { elements . add ( nodes [ i ] . render ( context ) ) ; } CycleGroup group ; if ( obj == null ) { group = new CycleGroup ( elements . size ( ) ) ; } else { group = ( CycleGroup ) obj ; } context . put ( groupName , group ) ; return group . next ( elements ) ;
public class HylaFaxClientSpi { /** * This function will resume an existing fax job . * @ param client * The client instance * @ param faxJob * The fax job object containing the needed information * @ throws Exception * Any exception */ protected void resumeFaxJob ( HylaFaxJob faxJob , HylaFAXClient client ) throws Exception { } }
// get job Job job = faxJob . getHylaFaxJob ( ) ; // get job ID long faxJobID = job . getId ( ) ; // resume job client . retry ( faxJobID ) ;
public class DeviceManagerClient { /** * Lists the last few versions of the device configuration in descending order ( i . e . : newest * first ) . * < p > Sample code : * < pre > < code > * try ( DeviceManagerClient deviceManagerClient = DeviceManagerClient . create ( ) ) { * DeviceName name = DeviceName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ REGISTRY ] " , " [ DEVICE ] " ) ; * ListDeviceConfigVersionsResponse response = deviceManagerClient . listDeviceConfigVersions ( name . toString ( ) ) ; * < / code > < / pre > * @ param name The name of the device . For example , * ` projects / p0 / locations / us - central1 / registries / registry0 / devices / device0 ` or * ` projects / p0 / locations / us - central1 / registries / registry0 / devices / { num _ id } ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListDeviceConfigVersionsResponse listDeviceConfigVersions ( String name ) { } }
ListDeviceConfigVersionsRequest request = ListDeviceConfigVersionsRequest . newBuilder ( ) . setName ( name ) . build ( ) ; return listDeviceConfigVersions ( request ) ;
public class WebHandler { /** * Populates the given { @ link UriComponentsBuilder } with request parameters found in the given * { @ link AnnotatedParametersParameterAccessor . BoundMethodParameter } . * @ param builder must not be { @ literal null } . * @ param parameter must not be { @ literal null } . */ @ SuppressWarnings ( "unchecked" ) private static void bindRequestParameters ( UriComponentsBuilder builder , AnnotatedParametersParameterAccessor . BoundMethodParameter parameter ) { } }
Object value = parameter . getValue ( ) ; String key = parameter . getVariableName ( ) ; if ( value instanceof MultiValueMap ) { MultiValueMap < String , String > requestParams = ( MultiValueMap < String , String > ) value ; for ( Map . Entry < String , List < String > > multiValueEntry : requestParams . entrySet ( ) ) { for ( String singleEntryValue : multiValueEntry . getValue ( ) ) { builder . queryParam ( multiValueEntry . getKey ( ) , encodeParameter ( singleEntryValue ) ) ; } } } else if ( value instanceof Map ) { Map < String , String > requestParams = ( Map < String , String > ) value ; for ( Map . Entry < String , String > requestParamEntry : requestParams . entrySet ( ) ) { builder . queryParam ( requestParamEntry . getKey ( ) , encodeParameter ( requestParamEntry . getValue ( ) ) ) ; } } else if ( value instanceof Collection ) { for ( Object element : ( Collection < ? > ) value ) { if ( key != null ) { builder . queryParam ( key , encodeParameter ( element ) ) ; } } } else if ( SKIP_VALUE . equals ( value ) ) { if ( parameter . isRequired ( ) ) { if ( key != null ) { builder . queryParam ( key , String . format ( "{%s}" , parameter . getVariableName ( ) ) ) ; } } } else { if ( key != null ) { builder . queryParam ( key , encodeParameter ( parameter . asString ( ) ) ) ; } }
public class MDateDocument { /** * Retourne la valeur de la propriété alternateDateFormat . * @ return SimpleDateFormat */ public static SimpleDateFormat getAlternateDateFormat ( ) { } }
if ( alternateDateFormat == null ) { // NOPMD // pour construire ce dateFormat on enlève les ' / ' et ' . ' du pattern de dateFormat // ie on peut saisir en France 251202 au lieu de 25/12/02 ou 25/12/2002 final StringBuilder patternSb = new StringBuilder ( getDateFormat ( ) . toPattern ( ) ) ; // note : il faut réévaluer pattern . length ( ) chaque fois dans la condition // puisque la longueur diminue au fur et à mesure for ( int i = 0 ; i < patternSb . length ( ) ; i ++ ) { if ( ! Character . isLetter ( patternSb . charAt ( i ) ) ) { patternSb . deleteCharAt ( i ) ; } } final Locale locale = Locale . getDefault ( ) ; final String pattern = patternSb . toString ( ) ; final String pattern2 = pattern . replaceAll ( "y" , "" ) ; final String pattern3 = pattern2 . replaceAll ( "M" , "" ) ; final SimpleDateFormat myAlternateDateFormat2 = new SimpleDateFormat ( pattern2 , locale ) ; final SimpleDateFormat myAlternateDateFormat3 = new SimpleDateFormat ( pattern3 , locale ) ; // CHECKSTYLE : OFF final SimpleDateFormat myAlternateDateFormat = new SimpleDateFormat ( pattern , locale ) { // CHECKSTYLE : ON private static final long serialVersionUID = 1L ; @ Override public Date parse ( final String text ) throws ParseException { try { return super . parse ( text ) ; } catch ( final ParseException ex ) { final Calendar myCalendar = Calendar . getInstance ( ) ; final int year = myCalendar . get ( Calendar . YEAR ) ; final int month = myCalendar . get ( Calendar . MONTH ) ; try { myCalendar . setTime ( myAlternateDateFormat2 . parse ( text ) ) ; myCalendar . set ( Calendar . YEAR , year ) ; return myCalendar . getTime ( ) ; } catch ( final ParseException ex1 ) { try { myCalendar . setTime ( myAlternateDateFormat3 . parse ( text ) ) ; myCalendar . set ( Calendar . YEAR , year ) ; myCalendar . set ( Calendar . MONTH , month ) ; return myCalendar . getTime ( ) ; } catch ( final ParseException ex2 ) { throw ex ; } } } } } ; // on n ' accepte pas le 30/02 ( qui serait alors le 02/03) myAlternateDateFormat . setLenient ( false ) ; myAlternateDateFormat2 . setLenient ( false ) ; myAlternateDateFormat3 . setLenient ( false ) ; alternateDateFormat = myAlternateDateFormat ; // NOPMD } return alternateDateFormat ; // NOPMD
public class DownloadImpl { /** * Only for internal use . * For parallel downloads , Updates the persistableTransfer each time a * part is successfully merged into download file . * Then notify the listeners that new persistableTransfer is available . */ @ SdkInternalApi void updatePersistableTransfer ( Integer lastFullyDownloadedPartNumber ) { } }
synchronized ( this ) { this . lastFullyDownloadedPartNumber = lastFullyDownloadedPartNumber ; } persistableDownload = captureDownloadState ( getObjectRequest , file ) ; S3ProgressPublisher . publishTransferPersistable ( progressListenerChain , persistableDownload ) ;
public class DataSynchronizer { /** * Returns the local collection representing the given namespace . * @ param namespace the namespace referring to the local collection . * @ param resultClass the { @ link Class } that represents documents in the collection . * @ param < T > the type documents in the collection . * @ return the local collection representing the given namespace . */ private < T > MongoCollection < T > getLocalCollection ( final MongoNamespace namespace , final Class < T > resultClass , final CodecRegistry codecRegistry ) { } }
return localClient . getDatabase ( String . format ( "sync_user_%s" , namespace . getDatabaseName ( ) ) ) . getCollection ( namespace . getCollectionName ( ) , resultClass ) . withCodecRegistry ( codecRegistry ) ;
public class DependencyInfoFactory { /** * check if lines with ( c ) are actual copyright references of simple code lines */ private boolean isMathExpression ( String line ) { } }
String cleanLine = cleanLine ( line ) . trim ( ) ; boolean mathExpression = false ; if ( cleanLine . startsWith ( DEFINE ) ) { return true ; } else if ( cleanLine . matches ( CODE_LINE_SUFFIX ) ) { return true ; } // go forward int index = cleanLine . indexOf ( COPYRIGHT_SYMBOL ) ; for ( int i = index + 1 ; i < cleanLine . length ( ) ; i ++ ) { char c = cleanLine . charAt ( i ) ; if ( c == Constants . OPEN_BRACKET || c == Constants . CLOSE_BRACKET || c == Constants . WHITESPACE_CHAR ) { continue ; } else if ( MATH_SYMBOLS . contains ( c ) ) { mathExpression = true ; break ; } else { break ; } } // go backwards if ( mathExpression ) { for ( int i = index - 1 ; i >= 0 ; i -- ) { char c = cleanLine . charAt ( i ) ; if ( c == Constants . OPEN_BRACKET || c == Constants . CLOSE_BRACKET || c == Constants . WHITESPACE_CHAR ) { continue ; } else if ( MATH_SYMBOLS . contains ( c ) ) { mathExpression = true ; break ; } else { break ; } } } return mathExpression ;
public class WriteQueue { /** * Enqueue the runnable . It is not safe for another thread to queue an Runnable directly to the * event loop , because it will be out - of - order with writes . This method allows the Runnable to be * processed in - order with writes . */ void enqueue ( Runnable runnable , boolean flush ) { } }
queue . add ( new RunnableCommand ( runnable ) ) ; if ( flush ) { scheduleFlush ( ) ; }
public class ProductPartitionTreeImpl { /** * Adds to the operations list all operations required to mutate the children of * { @ code originalParentNode } to { @ code newParentNode } . * @ param originalParentNode required - must not be null * @ param newParentNode required - must not be null * @ param childDimensionsToProcess the child dimensions to process * @ param ops the operations list to add to */ private void addMutateOperationsByParent ( ProductPartitionNode originalParentNode , ProductPartitionNode newParentNode , Set < ProductDimension > childDimensionsToProcess , List < OperationPair > ops ) { } }
for ( ProductDimension dimensionToProcess : childDimensionsToProcess ) { ProductPartitionNode originalChild = originalParentNode . hasChild ( dimensionToProcess ) ? originalParentNode . getChild ( dimensionToProcess ) : null ; ProductPartitionNode newChild = newParentNode . hasChild ( dimensionToProcess ) ? newParentNode . getChild ( dimensionToProcess ) : null ; Set < ProductDimension > grandchildDimensionsToProcess = addMutateOperations ( originalChild , newChild , ops ) ; if ( ! grandchildDimensionsToProcess . isEmpty ( ) ) { // Logic check - the only condition where further processing of children is required // is when the parent exists in both trees . If the parent is null in one tree but // not the other , then the node for dimensionToProcess was either : // 1 ) removed from the original OR // 2 ) added to the new tree // In both cases , the call to addMutateOperations above will have already added all of the // necessary operations to handle the node and all of its children . Preconditions . checkState ( originalChild != null , "Original child should not be null if there are children to process" ) ; Preconditions . checkState ( newChild != null , "New child should not be null if there are children to process" ) ; addMutateOperationsByParent ( originalChild , newChild , grandchildDimensionsToProcess , ops ) ; } }
public class Base64 { /** * Prints command line usage . * @ param msg A message to include with usage info . */ private final static void usage ( final String msg ) { } }
System . err . println ( msg ) ; System . err . println ( "Usage: java Base64 -e|-d inputfile outputfile" ) ;
public class CmsWidgetDialog { /** * Creates the complete widget dialog end block HTML that finishes a widget block . < p > * @ return the complete widget dialog end block HTML that finishes a widget block */ protected String createWidgetBlockEnd ( ) { } }
StringBuffer result = new StringBuffer ( 8 ) ; result . append ( createWidgetTableEnd ( ) ) ; result . append ( dialogBlockEnd ( ) ) ; return result . toString ( ) ;
public class SoftTFIDF { /** * Explain how the distance was computed . In the output , the tokens in S and T are listed , and the common tokens are * marked with an asterisk . */ public String explainScore ( StringWrapper s , StringWrapper t ) { } }
BagOfTokens sBag = ( BagOfTokens ) s ; BagOfTokens tBag = ( BagOfTokens ) t ; StringBuilder buf = new StringBuilder ( "" ) ; PrintfFormat fmt = new PrintfFormat ( "%.3f" ) ; buf . append ( "Common tokens: " ) ; for ( Iterator < Token > i = sBag . tokenIterator ( ) ; i . hasNext ( ) ; ) { Token tok = i . next ( ) ; if ( tBag . contains ( tok ) ) { buf . append ( " " + tok . getValue ( ) + ": " ) ; buf . append ( fmt . sprintf ( sBag . getWeight ( tok ) ) ) ; buf . append ( "*" ) ; buf . append ( fmt . sprintf ( tBag . getWeight ( tok ) ) ) ; } else { // find best matching token double matchScore = tokenMatchThreshold ; Token matchTok = null ; for ( Iterator < Token > j = tBag . tokenIterator ( ) ; j . hasNext ( ) ; ) { Token tokJ = j . next ( ) ; double distItoJ = tokenDistance . score ( tok . getValue ( ) , tokJ . getValue ( ) ) ; if ( distItoJ >= matchScore ) { matchTok = tokJ ; matchScore = distItoJ ; } } if ( matchTok != null ) { buf . append ( " '" + tok . getValue ( ) + "'~='" + matchTok . getValue ( ) + "': " ) ; buf . append ( fmt . sprintf ( sBag . getWeight ( tok ) ) ) ; buf . append ( "*" ) ; buf . append ( fmt . sprintf ( tBag . getWeight ( matchTok ) ) ) ; buf . append ( "*" ) ; buf . append ( fmt . sprintf ( matchScore ) ) ; } } } buf . append ( "\nscore = " + score ( s , t ) ) ; return buf . toString ( ) ;
public class DefaultOrg { /** * Default Policy is to set to 6 Months for Notification Types . * add others / change as required */ @ Override public Date whenToValidate ( Notify type , Date lastValidated ) { } }
switch ( type ) { case Approval : case PasswordExpiration : return null ; default : GregorianCalendar gc = new GregorianCalendar ( ) ; gc . setTime ( lastValidated ) ; gc . add ( GregorianCalendar . MONTH , 6 ) ; // 6 month policy return gc . getTime ( ) ; }
public class DMatrix { /** * Transpose the Frame as if it was a matrix ( rows < - > columns ) . * Must be all numeric , will fail if there are too many rows ( > = ~ . 5M ) . * Result is made to be compatible ( i . e . the same vector group and chunking ) with the target frame . * @ param src * @ return */ public static Frame transpose ( Frame src , Frame tgt ) { } }
if ( src . numRows ( ) != tgt . numCols ( ) || src . numCols ( ) != tgt . numRows ( ) ) throw new IllegalArgumentException ( "dimension do not match!" ) ; for ( Vec v : src . vecs ( ) ) { if ( v . isCategorical ( ) ) throw new IllegalArgumentException ( "transpose can only be applied to all-numeric frames (representing a matrix)" ) ; if ( v . length ( ) > 1000000 ) throw new IllegalArgumentException ( "too many rows, transpose only works for frames with < 1M rows." ) ; } new TransposeTsk ( tgt ) . doAll ( src ) ; return tgt ;
public class AbstractGoogleAsyncWriteChannel { /** * Initialize this channel object for writing . * @ throws IOException */ public void initialize ( ) throws IOException { } }
// Create a pipe such that its one end is connected to the input stream used by // the uploader and the other end is the write channel used by the caller . PipedInputStream pipeSource = new PipedInputStream ( pipeBufferSize ) ; OutputStream pipeSink = new PipedOutputStream ( pipeSource ) ; pipeSinkChannel = Channels . newChannel ( pipeSink ) ; // Connect pipe - source to the stream used by uploader . InputStreamContent objectContentStream = new InputStreamContent ( contentType , pipeSource ) ; // Indicate that we do not know length of file in advance . objectContentStream . setLength ( - 1 ) ; objectContentStream . setCloseInputStream ( false ) ; T request = createRequest ( objectContentStream ) ; request . setDisableGZipContent ( true ) ; // Change chunk size from default value ( 10MB ) to one that yields higher performance . clientRequestHelper . setChunkSize ( request , uploadChunkSize ) ; // Given that the two ends of the pipe must operate asynchronous relative // to each other , we need to start the upload operation on a separate thread . uploadOperation = threadPool . submit ( new UploadOperation ( request , pipeSource ) ) ; isInitialized = true ;
public class AbstractContext { /** * / * ( non - Javadoc ) * @ see com . abubusoft . kripton . BinderContext # parseList ( java . io . Reader , java . lang . Class ) */ @ Override public < E > List < E > parseList ( Reader source , Class < E > objectClazz ) { } }
return parseCollection ( source , new ArrayList < E > ( ) , objectClazz ) ;
public class SolrWrapperQueueConsumer { /** * Method to fire on timeout ( ) events to ensure buffers don ' t go stale after * the last item in a harvest passes through . */ private void checkTimeout ( ) { } }
if ( timerMDC != null ) { MDC . put ( "name" , timerMDC ) ; } if ( docBuffer . isEmpty ( ) ) { return ; } // How long has the NEWest item been waiting ? long wait = ( ( new Date ( ) . getTime ( ) ) - bufferYoungest ) / 1000 ; // If the buffer has been updated in the last 20s ignore it if ( wait < 20 ) { return ; } // Else , time to flush the buffer log . debug ( "=== Flushing old buffer: {}s" , wait ) ; submitBuffer ( true ) ;
public class backup_policy { /** * Use this API to fetch filtered set of backup _ policy resources . * filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */ public static backup_policy [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
backup_policy obj = new backup_policy ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; backup_policy [ ] response = ( backup_policy [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class PtoPLocalMsgsItemStream { /** * Sets the current alteration time fro this localization . * @ param newAlterationTime */ public void setAlterationTime ( long newAlterationTime ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "setAlterationTime" , new Long ( newAlterationTime ) ) ; this . _alterationTime = newAlterationTime ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "setAlterationTime" ) ;
public class FYShuffle { /** * Randomly shuffle a double array * @ param doubleArray array of double to shuffle . */ public static void shuffle ( double [ ] doubleArray ) { } }
int swapPlace = - 1 ; for ( int i = 0 ; i < doubleArray . length ; i ++ ) { swapPlace = ( int ) ( Math . random ( ) * ( doubleArray . length - 1 ) ) ; TrivialSwap . swap ( doubleArray , i , swapPlace ) ; }
public class ValidDBInstanceModificationsMessage { /** * Valid storage options for your DB instance . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setStorage ( java . util . Collection ) } or { @ link # withStorage ( java . util . Collection ) } if you want to override * the existing values . * @ param storage * Valid storage options for your DB instance . * @ return Returns a reference to this object so that method calls can be chained together . */ public ValidDBInstanceModificationsMessage withStorage ( ValidStorageOptions ... storage ) { } }
if ( this . storage == null ) { setStorage ( new com . amazonaws . internal . SdkInternalList < ValidStorageOptions > ( storage . length ) ) ; } for ( ValidStorageOptions ele : storage ) { this . storage . add ( ele ) ; } return this ;
public class AccountHeaderBuilder { /** * You can pass a custom layout for the drawer lib . see the drawer . xml in layouts of this lib on GitHub * @ param resLayout * @ return */ public AccountHeaderBuilder withAccountHeader ( @ LayoutRes int resLayout ) { } }
if ( mActivity == null ) { throw new RuntimeException ( "please pass an activity first to use this call" ) ; } if ( resLayout != - 1 ) { this . mAccountHeaderContainer = mActivity . getLayoutInflater ( ) . inflate ( resLayout , null , false ) ; } else { if ( mCompactStyle ) { this . mAccountHeaderContainer = mActivity . getLayoutInflater ( ) . inflate ( R . layout . material_drawer_compact_header , null , false ) ; } else { this . mAccountHeaderContainer = mActivity . getLayoutInflater ( ) . inflate ( R . layout . material_drawer_header , null , false ) ; } } return this ;
public class BootModule { /** * Extract the current value of the PUB _ URI from the system ' s property or form the platform default value . * @ return the current PUB _ URI */ private static String getPUBURIAsString ( ) { } }
String pubUri = JanusConfig . getSystemProperty ( JanusConfig . PUB_URI ) ; if ( pubUri == null || pubUri . isEmpty ( ) ) { InetAddress a = NetworkUtil . getPrimaryAddress ( ) ; if ( a == null ) { a = NetworkUtil . getLoopbackAddress ( ) ; } if ( a != null ) { pubUri = NetworkUtil . toURI ( a , - 1 ) . toString ( ) ; System . setProperty ( JanusConfig . PUB_URI , pubUri ) ; } } return pubUri ;
public class ConfigurationService { /** * Judge whether schema has data source configuration . * @ param shardingSchemaName shading schema name * @ return has data source configuration or not */ public boolean hasDataSourceConfiguration ( final String shardingSchemaName ) { } }
return ! Strings . isNullOrEmpty ( regCenter . get ( configNode . getDataSourcePath ( shardingSchemaName ) ) ) ;
public class PhpHttpdSshDriver { /** * TODO refactos using the strategy pattern */ private String installPhp ( ) { } }
log . debug ( "Installing PHP v" , new Object [ ] { getEntity ( ) . getPhpVersion ( ) } ) ; if ( getEntity ( ) . getPhpVersion ( ) . equals ( "5.4" ) ) { return instalPhp54v ( ) ; } else { return installPhpSuggestedVersionByDefault ( ) ; }
public class TypicalFaicliApiFailureHook { protected FaicliFailureErrorPart createSimpleError ( String field , String code ) { } }
return newFailureErrorPart ( field , code , Collections . emptyMap ( ) ) ;
public class SeleniumSpec { /** * Check if the length of the cookie set match with the number of cookies thas must be saved * @ param numberOfCookies number of cookies that must be saved */ @ Then ( "^I have '(.+?)' selenium cookies saved$" ) public void getSeleniumCookiesSize ( int numberOfCookies ) throws Exception { } }
Assertions . assertThat ( commonspec . getSeleniumCookies ( ) . size ( ) ) . isEqualTo ( numberOfCookies ) ;
public class BufferedFileLineReader { /** * If the sought position is within the buffer - simply sets the current * buffer position so the next read will be from the buffer . Otherwise seeks * in the RAF and reset the buffer end and current positions . * @ param pos * @ throws IOException */ public void seek ( long pos ) throws IOException { } }
int n = ( int ) ( real_pos - pos ) ; if ( n >= 0 && n <= buf_end ) { buf_pos = buf_end - n ; } else { raf . seek ( pos ) ; buf_end = 0 ; buf_pos = 0 ; real_pos = raf . getFilePointer ( ) ; }
public class ArtifactResolver { /** * Creates a new RepositorySystemSession . * @ param system A RepositorySystem to get a LocalRepositoryManager from . * @ return The new instance of a RespositorySystemSession . */ protected RepositorySystemSession newSession ( RepositorySystem system ) { } }
MavenRepositorySystemSession session = new MavenRepositorySystemSession ( ) ; LocalRepository localRepo = new LocalRepository ( localRepository ) ; session . setLocalRepositoryManager ( system . newLocalRepositoryManager ( localRepo ) ) ; return session ;
public class Component { /** * Convenience method for retrieving a named property . * @ param name name of the property to retrieve * @ return the first matching property in the property list with the specified name */ public final < T extends Property > T getProperty ( final String name ) { } }
return ( T ) getProperties ( ) . getProperty ( name ) ;
public class GroupApi { /** * Get a list of visible direct subgroups in this group . * < pre > < code > GitLab Endpoint : GET / groups / : id / subgroups < / code > < / pre > * @ param groupIdOrPath the group ID , path of the group , or a Group instance holding the group ID or path , required * @ return a List & lt ; Group & gt ; containing the group ' s sub - groups * @ throws GitLabApiException if any exception occurs * @ since GitLab 10.3.0 */ public List < Group > getSubGroups ( Object groupIdOrPath ) throws GitLabApiException { } }
return ( getSubGroups ( groupIdOrPath , getDefaultPerPage ( ) ) . all ( ) ) ;
public class Files { /** * Copy source file to target . Copy destination should be a file and this method throws access denied if attempt to * write to a directory . Source file should exist but target is created by this method , but if not already exist . * @ param source file to read from , should exist , * @ param target file to write to . * @ return the number of bytes transfered . * @ throws FileNotFoundException if source file does not exist or target file does not exist and cannot be created . * @ throws IOException if copy operation fails , including if < code > target < / code > is a directory . */ public static long copy ( File source , File target ) throws FileNotFoundException , IOException { } }
return copy ( new FileInputStream ( source ) , new FileOutputStream ( target ) ) ;
public class JMPredicate { /** * Gets less . * @ param target the target * @ return the less */ public static Predicate < Number > getLess ( Number target ) { } }
return number -> number . doubleValue ( ) < target . doubleValue ( ) ;
public class WebAppConfiguration { /** * { @ inheritDoc } */ @ Override public String getMimeType ( String extension ) { } }
// look for app - configured mime type String mimeType = super . getMimeType ( extension ) ; if ( mimeType == null ) { mimeType = dVirtualHost . getMimeType ( extension ) ; } return mimeType ;
public class ExceptionUtil { /** * This method changes the given async cause , and it adds the also given local stacktrace . < br / > * If the remoteCause is an { @ link java . util . concurrent . ExecutionException } and it has a non - null inner * cause , this inner cause is unwrapped and the local stacktrace and exception message are added to the * that instead of the given asyncCause itself . * @ param asyncCause the async exception * @ param localSideStackTrace the local stacktrace to add to the exception stacktrace */ public static void fixAsyncStackTrace ( Throwable asyncCause , StackTraceElement [ ] localSideStackTrace ) { } }
Throwable throwable = asyncCause ; if ( asyncCause instanceof ExecutionException && throwable . getCause ( ) != null ) { throwable = throwable . getCause ( ) ; } StackTraceElement [ ] remoteStackTrace = throwable . getStackTrace ( ) ; StackTraceElement [ ] newStackTrace = new StackTraceElement [ localSideStackTrace . length + remoteStackTrace . length ] ; System . arraycopy ( remoteStackTrace , 0 , newStackTrace , 0 , remoteStackTrace . length ) ; newStackTrace [ remoteStackTrace . length ] = new StackTraceElement ( EXCEPTION_SEPARATOR , "" , null , - 1 ) ; System . arraycopy ( localSideStackTrace , 1 , newStackTrace , remoteStackTrace . length + 1 , localSideStackTrace . length - 1 ) ; throwable . setStackTrace ( newStackTrace ) ;
public class PersistentEntityStoreImpl { /** * Deletes all outgoing links of specified entity . * @ param entity the entity . */ private void deleteLinks ( @ NotNull final PersistentStoreTransaction txn , @ NotNull final PersistentEntity entity ) { } }
final PersistentEntityId id = entity . getId ( ) ; final int entityTypeId = id . getTypeId ( ) ; final long entityLocalId = id . getLocalId ( ) ; final Transaction envTxn = txn . getEnvironmentTransaction ( ) ; final LinksTable links = getLinksTable ( txn , entityTypeId ) ; final IntHashSet deletedLinks = new IntHashSet ( ) ; try ( Cursor cursor = links . getFirstIndexCursor ( envTxn ) ) { for ( boolean success = cursor . getSearchKeyRange ( PropertyKey . propertyKeyToEntry ( new PropertyKey ( entityLocalId , 0 ) ) ) != null ; success ; success = cursor . getNext ( ) ) { final ByteIterable keyEntry = cursor . getKey ( ) ; final PropertyKey key = PropertyKey . entryToPropertyKey ( keyEntry ) ; if ( key . getEntityLocalId ( ) != entityLocalId ) { break ; } final ByteIterable valueEntry = cursor . getValue ( ) ; if ( links . delete ( envTxn , keyEntry , valueEntry ) ) { int linkId = key . getPropertyId ( ) ; if ( getLinkName ( txn , linkId ) != null ) { deletedLinks . add ( linkId ) ; final LinkValue linkValue = LinkValue . entryToLinkValue ( valueEntry ) ; txn . linkDeleted ( entity . getId ( ) , ( PersistentEntityId ) linkValue . getEntityId ( ) , linkValue . getLinkId ( ) ) ; } } } } for ( Integer linkId : deletedLinks ) { links . deleteAllIndex ( envTxn , linkId , entityLocalId ) ; }
public class JKTableModel { public Vector < Vector > getDeletedRecordsAsDataVector ( ) { } }
final Vector < Vector > data = new Vector < Vector > ( ) ; for ( final JKTableRecord rec : this . deletedRecords ) { data . add ( rec . toValuesVector ( ) ) ; } return data ;
public class FileChannelLogWriter { /** * Writes a stream of chunks such that no chunk is split across a block boundary */ @ Override public synchronized void addRecord ( Slice record , boolean force ) throws IOException { } }
checkState ( ! closed . get ( ) , "Log has been closed" ) ; SliceInput sliceInput = record . input ( ) ; // used to track first , middle and last blocks boolean begin = true ; // Fragment the record int chunks as necessary and write it . Note that if record // is empty , we still want to iterate once to write a single // zero - length chunk . do { int bytesRemainingInBlock = BLOCK_SIZE - blockOffset ; checkState ( bytesRemainingInBlock >= 0 ) ; // Switch to a new block if necessary if ( bytesRemainingInBlock < HEADER_SIZE ) { if ( bytesRemainingInBlock > 0 ) { // Fill the rest of the block with zeros // todo lame . . . need a better way to write zeros fileChannel . write ( ByteBuffer . allocate ( bytesRemainingInBlock ) ) ; } blockOffset = 0 ; bytesRemainingInBlock = BLOCK_SIZE - blockOffset ; } // Invariant : we never leave less than HEADER _ SIZE bytes available in a block int bytesAvailableInBlock = bytesRemainingInBlock - HEADER_SIZE ; checkState ( bytesAvailableInBlock >= 0 ) ; // if there are more bytes in the record then there are available in the block , // fragment the record ; otherwise write to the end of the record boolean end ; int fragmentLength ; if ( sliceInput . available ( ) > bytesAvailableInBlock ) { end = false ; fragmentLength = bytesAvailableInBlock ; } else { end = true ; fragmentLength = sliceInput . available ( ) ; } // determine block type LogChunkType type ; if ( begin && end ) { type = LogChunkType . FULL ; } else if ( begin ) { type = LogChunkType . FIRST ; } else if ( end ) { type = LogChunkType . LAST ; } else { type = LogChunkType . MIDDLE ; } // write the chunk writeChunk ( type , sliceInput . readSlice ( fragmentLength ) ) ; // we are no longer on the first chunk begin = false ; } while ( sliceInput . isReadable ( ) ) ; if ( force ) { fileChannel . force ( false ) ; }
public class UnindexedMatcher { /** * 4 . BLOB */ protected static boolean compareLT ( Object l , Object r ) { } }
if ( l == null || r == null ) { return false ; // null fails all lt / gt / lte / gte tests } else if ( ! ( l instanceof String || l instanceof Number ) ) { String msg = String . format ( "Value in document not a Number or String: %s" , l ) ; logger . log ( Level . WARNING , msg ) ; return false ; // Not sure how to compare values that are not numbers or strings } else if ( l instanceof String ) { if ( r instanceof Number ) { return false ; // INT < STRING } String lStr = ( String ) l ; String rStr = ( String ) r ; return lStr . compareTo ( rStr ) < 0 ; } else if ( r instanceof String ) { // At this point in the logic l can only be a number return true ; // INT < STRING } else { // At this point in the logic both l and r can only be numbers Number lNum = ( Number ) l ; Number rNum = ( Number ) r ; return lNum . doubleValue ( ) < rNum . doubleValue ( ) ; }
public class NetworkServiceDescriptorAgent { /** * Returns a List of all Security objects that are contained in a specific * NetworkServiceDescriptor . * @ param idNsd the ID of the NetworkServiceDescriptor * @ return the List of Security objects * @ throws SDKException if the request fails */ @ Help ( help = "Get all the Security of a NetworkServiceDescriptor with specific id" ) public Security getSecurities ( final String idNsd ) throws SDKException { } }
String url = idNsd + "/security" ; return ( ( Security ) requestGet ( url , Security . class ) ) ;
public class CommerceWishListPersistenceImpl { /** * Returns the commerce wish lists before and after the current commerce wish list in the ordered set where uuid = & # 63 ; . * @ param commerceWishListId the primary key of the current commerce wish list * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce wish list * @ throws NoSuchWishListException if a commerce wish list with the primary key could not be found */ @ Override public CommerceWishList [ ] findByUuid_PrevAndNext ( long commerceWishListId , String uuid , OrderByComparator < CommerceWishList > orderByComparator ) throws NoSuchWishListException { } }
CommerceWishList commerceWishList = findByPrimaryKey ( commerceWishListId ) ; Session session = null ; try { session = openSession ( ) ; CommerceWishList [ ] array = new CommerceWishListImpl [ 3 ] ; array [ 0 ] = getByUuid_PrevAndNext ( session , commerceWishList , uuid , orderByComparator , true ) ; array [ 1 ] = commerceWishList ; array [ 2 ] = getByUuid_PrevAndNext ( session , commerceWishList , uuid , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class JolokiaServer { /** * Initialize this JolokiaServer and use an own created HttpServer * @ param pConfig configuartion to use * @ param pLazy whether to do the inialization lazy or not * @ throws IOException if the creation of the HttpServer fails */ protected final void init ( JolokiaServerConfig pConfig , boolean pLazy ) throws IOException { } }
// We manage it on our own httpServer = createHttpServer ( pConfig ) ; init ( httpServer , pConfig , pLazy ) ;
public class GpsTracesDao { /** * Get information about a given GPS trace or null if it does not exist . * @ throws OsmAuthorizationException if this application is not authorized to read the user ' s * traces ( Permission . READ _ GPS _ TRACES ) * OR if the trace in question is not the user ' s own trace * and at the same time it is not public * OR if not logged in at all */ public GpsTraceDetails get ( long id ) { } }
SingleElementHandler < GpsTraceDetails > handler = new SingleElementHandler < > ( ) ; try { osm . makeAuthenticatedRequest ( GPX + "/" + id , "GET" , new GpsTracesParser ( handler ) ) ; } catch ( OsmNotFoundException e ) { return null ; } return handler . get ( ) ;
public class ReflectionUtils { /** * Return the Field for the specified name . * Java reflection will either give you all the public fields all the way up the class hierarchy ( getField ( ) ) , * or will give you all the private / protected / public only in the single class ( getDeclaredField ( ) ) . * This method uses the latter but walks up the class hierarchy . */ public static Field findNamedField ( Object o , String field_name ) { } }
Class clz = o . getClass ( ) ; Field f = null ; do { try { f = clz . getDeclaredField ( field_name ) ; f . setAccessible ( true ) ; return f ; } catch ( NoSuchFieldException e ) { // fall through and try our parent } clz = clz . getSuperclass ( ) ; } while ( clz != Object . class ) ; return null ;
public class JCasUtil2 { /** * Counts the number of features structures of the given type in the JCas * @ param type the type * @ param aJCas the JCas * @ return the number of occurrences */ public static < T extends TOP > int count ( final Class < T > type , final JCas aJCas ) { } }
return JCasUtil . select ( aJCas , type ) . size ( ) ;
public class GraphHopper { /** * Opens existing graph folder . * @ param graphHopperFolder is the folder containing graphhopper files . Can be a compressed file * too ala folder - content . ghz . */ @ Override public boolean load ( String graphHopperFolder ) { } }
if ( isEmpty ( graphHopperFolder ) ) throw new IllegalStateException ( "GraphHopperLocation is not specified. Call setGraphHopperLocation or init before" ) ; if ( fullyLoaded ) throw new IllegalStateException ( "graph is already successfully loaded" ) ; File tmpFileOrFolder = new File ( graphHopperFolder ) ; if ( ! tmpFileOrFolder . isDirectory ( ) && tmpFileOrFolder . exists ( ) ) { throw new IllegalArgumentException ( "GraphHopperLocation cannot be an existing file. Has to be either non-existing or a folder." ) ; } else { File compressed = new File ( graphHopperFolder + ".ghz" ) ; if ( compressed . exists ( ) && ! compressed . isDirectory ( ) ) { try { new Unzipper ( ) . unzip ( compressed . getAbsolutePath ( ) , graphHopperFolder , removeZipped ) ; } catch ( IOException ex ) { throw new RuntimeException ( "Couldn't extract file " + compressed . getAbsolutePath ( ) + " to " + graphHopperFolder , ex ) ; } } } setGraphHopperLocation ( graphHopperFolder ) ; if ( encodingManager == null ) setEncodingManager ( EncodingManager . create ( flagEncoderFactory , ghLocation ) ) ; if ( ! allowWrites && dataAccessType . isMMap ( ) ) dataAccessType = DAType . MMAP_RO ; GHDirectory dir = new GHDirectory ( ghLocation , dataAccessType ) ; GraphExtension ext = encodingManager . needsTurnCostsSupport ( ) ? new TurnCostExtension ( ) : new GraphExtension . NoOpExtension ( ) ; if ( lmFactoryDecorator . isEnabled ( ) ) initLMAlgoFactoryDecorator ( ) ; if ( chFactoryDecorator . isEnabled ( ) ) { initCHAlgoFactoryDecorator ( ) ; ghStorage = new GraphHopperStorage ( chFactoryDecorator . getNodeBasedWeightings ( ) , chFactoryDecorator . getEdgeBasedWeightings ( ) , dir , encodingManager , hasElevation ( ) , ext ) ; } else { ghStorage = new GraphHopperStorage ( dir , encodingManager , hasElevation ( ) , ext ) ; } ghStorage . setSegmentSize ( defaultSegmentSize ) ; if ( ! new File ( graphHopperFolder ) . exists ( ) ) return false ; GHLock lock = null ; try { // create locks only if writes are allowed , if they are not allowed a lock cannot be created // ( e . g . on a read only filesystem locks would fail ) if ( ghStorage . getDirectory ( ) . getDefaultType ( ) . isStoring ( ) && isAllowWrites ( ) ) { lockFactory . setLockDir ( new File ( ghLocation ) ) ; lock = lockFactory . create ( fileLockName , false ) ; if ( ! lock . tryLock ( ) ) throw new RuntimeException ( "To avoid reading partial data we need to obtain the read lock but it failed. In " + ghLocation , lock . getObtainFailedReason ( ) ) ; } if ( ! ghStorage . loadExisting ( ) ) return false ; postProcessing ( ) ; fullyLoaded = true ; return true ; } finally { if ( lock != null ) lock . release ( ) ; }
public class AgentPremain { /** * JavaAgent premain entry point as specified in the MANIFEST . MF file . See * < a href = " http : / / java . sun . com / javase / 6 / docs / api / java / lang / instrument / package - summary . html " > http : / / java . sun . com / javase / 6 / docs / api / java / lang / instrument / package - summary . html < / a > for details . * @ param agentArgument * string provided after " = " up to first space * @ param instrumentation * instrumentation environment provided by the JVM */ public static void premain ( String agentArgument , Instrumentation instrumentation ) { } }
// We cannot do sanity checks for slf4j here as the jars loaded // by the application are not visible here . LogTransformer . Builder builder = new LogTransformer . Builder ( ) ; builder = builder . addEntryExit ( true ) ; if ( agentArgument != null ) { Properties args = parseArguments ( agentArgument , "," ) ; if ( args . containsKey ( AgentOptions . VERBOSE ) ) { builder = builder . verbose ( true ) ; } if ( args . containsKey ( AgentOptions . TIME ) ) { printStartStopTimes ( ) ; } if ( args . containsKey ( AgentOptions . IGNORE ) ) { String ignore = args . getProperty ( AgentOptions . IGNORE ) ; builder = builder . ignore ( ignore . split ( ":" ) ) ; } if ( args . containsKey ( AgentOptions . LEVEL ) ) { builder = builder . level ( args . getProperty ( AgentOptions . LEVEL ) ) ; } } instrumentation . addTransformer ( builder . build ( ) ) ;
public class WorkflowsInner { /** * Gets a list of workflows by subscription . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; WorkflowInner & gt ; object */ public Observable < Page < WorkflowInner > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < WorkflowInner > > , Page < WorkflowInner > > ( ) { @ Override public Page < WorkflowInner > call ( ServiceResponse < Page < WorkflowInner > > response ) { return response . body ( ) ; } } ) ;
public class EventHandler { /** * Function to copy the event buffer , clear it , and iterate of the copy , * calling each event ' s flush ( ) method one by one . * NOTE : This function is subject to a race condition ; while the buffer copy * is being iterated over , the next round of buffer entries could be flushed * creating a flush order that is not " strictly consistent " . While this could * hypothetically also cause the system to run out of memory due to an * unbounded number of eventBuffer copies , that scenario is unlikely . */ private void flushEventBuffer ( ) { } }
ArrayList < Event > eventBufferCopy ; logger . debug ( "Flushing eventBuffer" ) ; // Copy event buffer because this may be long running synchronized ( this ) { eventBufferCopy = new ArrayList < > ( eventBuffer ) ; eventBuffer . clear ( ) ; } for ( Event event : eventBufferCopy ) { event . flush ( ) ; }
public class IfcSpatialStructureElementImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcRelContainedInSpatialStructure > getContainsElements ( ) { } }
return ( EList < IfcRelContainedInSpatialStructure > ) eGet ( Ifc2x3tc1Package . Literals . IFC_SPATIAL_STRUCTURE_ELEMENT__CONTAINS_ELEMENTS , true ) ;
public class ControlFactory { /** * Create a new instance of the frontend network control for the desired * version . * @ param version * the desired network control version * @ return a new network control instance */ public static Control createInstance ( ControlVersion version ) { } }
switch ( version ) { case _0_24 : return new Control0_24 ( ) ; case _0_25 : return new Control0_25 ( ) ; default : throw new IllegalArgumentException ( "Unknown control version: " + version ) ; }
public class GADImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GAD__GOC_ADAT : return GOC_ADAT_EDEFAULT == null ? gocAdat != null : ! GOC_ADAT_EDEFAULT . equals ( gocAdat ) ; } return super . eIsSet ( featureID ) ;
public class CoreSynonymDictionary { /** * 将分词结果转换为同义词列表 * @ param sentence 句子 * @ param withUndefinedItem 是否保留词典中没有的词语 * @ return */ public static List < CommonSynonymDictionary . SynonymItem > convert ( List < Term > sentence , boolean withUndefinedItem ) { } }
List < CommonSynonymDictionary . SynonymItem > synonymItemList = new ArrayList < CommonSynonymDictionary . SynonymItem > ( sentence . size ( ) ) ; for ( Term term : sentence ) { CommonSynonymDictionary . SynonymItem item = get ( term . word ) ; if ( item == null ) { if ( withUndefinedItem ) { item = CommonSynonymDictionary . SynonymItem . createUndefined ( term . word ) ; synonymItemList . add ( item ) ; } } else { synonymItemList . add ( item ) ; } } return synonymItemList ;
public class SnowballAnalyzerBuilder { /** * { @ inheritDoc } */ @ Override public Analyzer analyzer ( ) { } }
// Setup stopwords CharArraySet stops = stopwords == null ? getDefaultStopwords ( language ) : getStopwords ( stopwords ) ; return buildAnalyzer ( language , stops ) ;
public class HighwayHash { /** * Updates the hash with 32 bytes of data given as 4 longs . This function is * more efficient than updatePacket when you can use it . * @ param a0 first 8 bytes in little endian 64 - bit long * @ param a1 next 8 bytes in little endian 64 - bit long * @ param a2 next 8 bytes in little endian 64 - bit long * @ param a3 last 8 bytes in little endian 64 - bit long */ public void update ( long a0 , long a1 , long a2 , long a3 ) { } }
if ( done ) { throw new IllegalStateException ( "Can compute a hash only once per instance" ) ; } v1 [ 0 ] += mul0 [ 0 ] + a0 ; v1 [ 1 ] += mul0 [ 1 ] + a1 ; v1 [ 2 ] += mul0 [ 2 ] + a2 ; v1 [ 3 ] += mul0 [ 3 ] + a3 ; for ( int i = 0 ; i < 4 ; ++ i ) { mul0 [ i ] ^= ( v1 [ i ] & 0xffffffffL ) * ( v0 [ i ] >>> 32 ) ; v0 [ i ] += mul1 [ i ] ; mul1 [ i ] ^= ( v0 [ i ] & 0xffffffffL ) * ( v1 [ i ] >>> 32 ) ; } v0 [ 0 ] += zipperMerge0 ( v1 [ 1 ] , v1 [ 0 ] ) ; v0 [ 1 ] += zipperMerge1 ( v1 [ 1 ] , v1 [ 0 ] ) ; v0 [ 2 ] += zipperMerge0 ( v1 [ 3 ] , v1 [ 2 ] ) ; v0 [ 3 ] += zipperMerge1 ( v1 [ 3 ] , v1 [ 2 ] ) ; v1 [ 0 ] += zipperMerge0 ( v0 [ 1 ] , v0 [ 0 ] ) ; v1 [ 1 ] += zipperMerge1 ( v0 [ 1 ] , v0 [ 0 ] ) ; v1 [ 2 ] += zipperMerge0 ( v0 [ 3 ] , v0 [ 2 ] ) ; v1 [ 3 ] += zipperMerge1 ( v0 [ 3 ] , v0 [ 2 ] ) ;
public class SARLProjectConfigurator { /** * Replies the configuration value . * @ param < T > - the expected type . * @ param project the project . * @ param parameter the parameter name . * @ param asType the expected type . * @ param mojoExecution the mojo execution . * @ param monitor the monitor . * @ param defaultValue the default value . * @ return the value of the parameter . * @ throws CoreException if cannot read the value . */ protected < T > T getParameterValue ( MavenProject project , String parameter , Class < T > asType , MojoExecution mojoExecution , IProgressMonitor monitor , T defaultValue ) throws CoreException { } }
T value = getParameterValue ( project , parameter , asType , mojoExecution , monitor ) ; if ( value == null ) { value = defaultValue ; } return value ;
public class RootDocumentRepositoryMongoImpl { /** * { @ inheritDoc } */ @ Override public final Iterable < RootDocument > findAll ( final RootDocument rootDocument ) { } }
final Iterable < RootDocument > rootDocuments = findAll ( rootDocument . getFilename ( ) ) ; if ( rootDocuments == null ) { return null ; } for ( final RootDocument rootDocumentOut : rootDocuments ) { final Root root = rootDocumentOut . getGedObject ( ) ; root . setParent ( rootDocumentOut . getGedObject ( ) ) ; } return rootDocuments ;
public class DnsConfigChange { /** * An array that contains one < code > DnsRecord < / code > object for each Route 53 record that you want AWS Cloud Map to * create when you register an instance . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDnsRecords ( java . util . Collection ) } or { @ link # withDnsRecords ( java . util . Collection ) } if you want to * override the existing values . * @ param dnsRecords * An array that contains one < code > DnsRecord < / code > object for each Route 53 record that you want AWS Cloud * Map to create when you register an instance . * @ return Returns a reference to this object so that method calls can be chained together . */ public DnsConfigChange withDnsRecords ( DnsRecord ... dnsRecords ) { } }
if ( this . dnsRecords == null ) { setDnsRecords ( new java . util . ArrayList < DnsRecord > ( dnsRecords . length ) ) ; } for ( DnsRecord ele : dnsRecords ) { this . dnsRecords . add ( ele ) ; } return this ;
public class AppAPI { /** * Installs the app with the given id on the space . * @ param appId * The id of the app to be installed * @ param spaceId * The id of the space the app should be installed o n * @ return The id of the newly installed app */ public int install ( int appId , int spaceId ) { } }
return getResourceFactory ( ) . getApiResource ( "/app/" + appId + "/install" ) . entity ( new ApplicationInstall ( spaceId ) , MediaType . APPLICATION_JSON_TYPE ) . post ( ApplicationCreateResponse . class ) . getId ( ) ;
public class SimpleBox { /** * Encrypt the plaintext with the given key . * @ param plaintext any arbitrary bytes * @ return the ciphertext */ public byte [ ] seal ( byte [ ] plaintext ) { } }
final byte [ ] nonce = box . nonce ( plaintext ) ; final byte [ ] ciphertext = box . seal ( nonce , plaintext ) ; final byte [ ] combined = new byte [ nonce . length + ciphertext . length ] ; System . arraycopy ( nonce , 0 , combined , 0 , nonce . length ) ; System . arraycopy ( ciphertext , 0 , combined , nonce . length , ciphertext . length ) ; return combined ;
public class ObjectHasher { /** * Create a hash of a serializable object * @ param obj Serializable object * @ return Hash of the serializable object */ public static String hash ( Serializable obj ) { } }
if ( obj == null ) { return "" ; } StringBuilder hexString = new StringBuilder ( ) ; try { MessageDigest m = MessageDigest . getInstance ( "SHA1" ) ; m . update ( SerializationUtils . serialize ( obj ) ) ; byte [ ] mdbytes = m . digest ( ) ; for ( byte mdbyte : mdbytes ) { hexString . append ( Integer . toHexString ( 0xFF & mdbyte ) ) ; } } catch ( NoSuchAlgorithmException e ) { return "" ; } return hexString . toString ( ) ;
public class EncodingSchemeIDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . ENCODING_SCHEME_ID__ESID_CP : setESidCP ( ESID_CP_EDEFAULT ) ; return ; case AfplibPackage . ENCODING_SCHEME_ID__ESID_UD : setESidUD ( ESID_UD_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class ScalingPolicy { /** * The CloudWatch alarms related to the policy . * @ return The CloudWatch alarms related to the policy . */ public java . util . List < Alarm > getAlarms ( ) { } }
if ( alarms == null ) { alarms = new com . amazonaws . internal . SdkInternalList < Alarm > ( ) ; } return alarms ;
public class ELContext { /** * Called to indicate that a < code > ELResolver < / code > has successfully * resolved a given ( base , property ) pair and to notify the * { @ link EvaluationListener } s . * < p > The { @ link CompositeELResolver } checks this property to determine * whether it should consider or skip other component resolvers . < / p > * @ see CompositeELResolver * @ param base The base object * @ param property The property object * @ since EL 3.0 */ public void setPropertyResolved ( Object base , Object property ) { } }
setPropertyResolved ( true ) ; // Don ' t set the variable here , for 2.2 users // ELContext may be overridden or delegated . notifyPropertyResolved ( base , property ) ;
public class KAMUpdateDaoImpl { /** * Remaps outgoing and incoming edges to { @ code collapseTo } . * @ param collapsing { @ link KamNode } being collapsed * @ param collapseTo { @ link KamNode } collapsing to * @ param kam { @ link Kam } to retrieve adjacent edges * @ param esps { @ link PreparedStatement } for updating edge source * @ param etps { @ link PreparedStatement } for updating edge target * @ return { @ code int } update count * @ throws SQLException when a SQL error occurred with update */ private static int remapEdges ( KamNode collapsing , KamNode collapseTo , Kam kam , PreparedStatement esps , PreparedStatement etps ) throws SQLException { } }
int updates = 0 ; Set < KamEdge > outgoing = kam . getAdjacentEdges ( collapsing , FORWARD ) ; int collapseToId = collapseTo . getId ( ) ; for ( KamEdge edge : outgoing ) { esps . setInt ( 1 , collapseToId ) ; esps . setInt ( 2 , edge . getId ( ) ) ; updates += esps . executeUpdate ( ) ; } Set < KamEdge > incoming = kam . getAdjacentEdges ( collapsing , REVERSE ) ; for ( KamEdge edge : incoming ) { etps . setInt ( 1 , collapseToId ) ; etps . setInt ( 2 , edge . getId ( ) ) ; updates += etps . executeUpdate ( ) ; } return updates ;
public class EnumSetJsonDeserializer { /** * < p > newInstance < / p > * @ param deserializer { @ link EnumJsonDeserializer } used to deserialize the enums inside the { @ link EnumSet } . * @ return a new instance of { @ link EnumSetJsonDeserializer } * @ param < E > a E object . */ public static < E extends Enum < E > > EnumSetJsonDeserializer < E > newInstance ( EnumJsonDeserializer < E > deserializer ) { } }
return new EnumSetJsonDeserializer < E > ( deserializer ) ;
public class Binder { /** * Maps two objects together . All matching fields will then be two - way * data - bound . */ public static DataBinding map ( Object source , Object destination ) { } }
return bindObject ( source ) . mapTo ( destination ) ;
public class FiguerasSSSRFinder { /** * Selects an optimum edge for elimination in structures without N2 nodes . * < p > This might be severely broken ! Would have helped if there was an * explanation of how this algorithm worked . * @ param ring * @ param molecule */ private IBond checkEdges ( IRing ring , IAtomContainer molecule ) { } }
IRing r1 , r2 ; IRingSet ringSet = ring . getBuilder ( ) . newInstance ( IRingSet . class ) ; IBond bond ; int minMaxSize = Integer . MAX_VALUE ; int minMax = 0 ; logger . debug ( "Molecule: " + molecule ) ; Iterator < IBond > bonds = ring . bonds ( ) . iterator ( ) ; while ( bonds . hasNext ( ) ) { bond = ( IBond ) bonds . next ( ) ; molecule . removeElectronContainer ( bond ) ; r1 = getRing ( bond . getBegin ( ) , molecule ) ; r2 = getRing ( bond . getEnd ( ) , molecule ) ; logger . debug ( "checkEdges: " + bond ) ; if ( r1 . getAtomCount ( ) > r2 . getAtomCount ( ) ) { ringSet . addAtomContainer ( r1 ) ; } else { ringSet . addAtomContainer ( r2 ) ; } molecule . addBond ( bond ) ; } for ( int i = 0 ; i < ringSet . getAtomContainerCount ( ) ; i ++ ) { if ( ( ( IRing ) ringSet . getAtomContainer ( i ) ) . getBondCount ( ) < minMaxSize ) { minMaxSize = ( ( IRing ) ringSet . getAtomContainer ( i ) ) . getBondCount ( ) ; minMax = i ; } } return ( IBond ) ring . getElectronContainer ( minMax ) ;
public class JsonMapper { /** * Serialize a list of objects to an OutputStream . * @ param map The map of objects to serialize . * @ param os The OutputStream to which the list should be serialized */ public void serialize ( Map < String , T > map , OutputStream os ) throws IOException { } }
JsonGenerator jsonGenerator = LoganSquare . JSON_FACTORY . createGenerator ( os ) ; serialize ( map , jsonGenerator ) ; jsonGenerator . close ( ) ;
public class AbstractRemoteClient { /** * { @ inheritDoc } * @ param scope { @ inheritDoc } * @ param participantConfig { @ inheritDoc } * @ throws org . openbase . jul . exception . InitializationException { @ inheritDoc } * @ throws java . lang . InterruptedException { @ inheritDoc } */ @ Override public void init ( final Scope scope , final ParticipantConfig participantConfig ) throws InitializationException , InterruptedException { } }
internalInit ( scope , participantConfig ) ;
public class clusterinstance { /** * Use this API to delete clusterinstance of given name . */ public static base_response delete ( nitro_service client , Long clid ) throws Exception { } }
clusterinstance deleteresource = new clusterinstance ( ) ; deleteresource . clid = clid ; return deleteresource . delete_resource ( client ) ;
public class RestClientUtil { /** * 并行检索索引所有数据 * @ param index * @ param fetchSize 指定每批次返回的数据 , 不指定默认为5000 * @ param scrollHandler 每批数据处理方法 * @ param type * @ param < T > * @ return * @ throws ElasticSearchException */ public < T > ESDatas < T > searchAllParallel ( String index , final int fetchSize , ScrollHandler < T > scrollHandler , final Class < T > type , int max ) throws ElasticSearchException { } }
if ( ! this . client . isV1 ( ) ) { SliceScroll sliceScroll = new SliceScroll ( ) { @ Override public String buildSliceDsl ( int sliceId , int max ) { StringBuilder builder = new StringBuilder ( ) ; String sliceDsl = builder . append ( "{\"slice\": {\"id\": " ) . append ( sliceId ) . append ( ",\"max\": " ) . append ( max ) . append ( "},\"size\":" ) . append ( fetchSize ) . append ( ",\"query\": {\"match_all\": {}},\"sort\": [\"_doc\"]}" ) . toString ( ) ; return sliceDsl ; // return buildSliceDsl ( i , max , params , dslTemplate ) ; } } ; return _slice ( index + "/_search" , scrollHandler , type , max , "1m" , sliceScroll ) ; } else { StringBuilder builder = new StringBuilder ( ) ; String queryAll = builder . append ( "{ \"size\":" ) . append ( fetchSize ) . append ( ",\"query\": {\"match_all\": {}},\"sort\": [\"_doc\"]}" ) . toString ( ) ; builder . setLength ( 0 ) ; return this . scrollParallel ( builder . append ( index ) . append ( "/_search" ) . toString ( ) , queryAll , "10m" , type , scrollHandler ) ; }
public class DelegatedAuthenticationSAML2ClientLogoutAction { /** * Finds the current client name from the context , using the PAC4J Profile Manager . It is assumed that the context has previously been * populated with the profile . * @ param webContext A web context ( request + response ) . * @ return The currently used client ' s name or { @ code null } if there is no active profile . */ private static String findCurrentClientName ( final J2EContext webContext ) { } }
val pm = new ProfileManager < > ( webContext , webContext . getSessionStore ( ) ) ; val profile = pm . get ( true ) ; return profile . map ( CommonProfile :: getClientName ) . orElse ( null ) ;
public class ChineseEnglishWordMap { /** * The main method reads ( segmented , whitespace delimited ) words from a file * and prints them with their English translation ( s ) . * The path and filename of the CEDict Lexicon can be supplied via the * " - dictPath " flag ; otherwise the default filename " cedict _ ts . u8 " in the * current directory is checked . * By default , only the first translation is printed . If the " - all " flag * is given , all translations are printed . * The input and output encoding can be specified using the " - encoding " flag . * Otherwise UTF - 8 is assumed . */ public static void main ( String [ ] args ) throws IOException { } }
Map < String , Integer > flagsToNumArgs = new HashMap < String , Integer > ( ) ; flagsToNumArgs . put ( "-dictPath" , 1 ) ; flagsToNumArgs . put ( "-encoding" , 1 ) ; Map < String , String [ ] > argMap = StringUtils . argsToMap ( args , flagsToNumArgs ) ; String [ ] otherArgs = argMap . get ( null ) ; if ( otherArgs . length < 1 ) { System . err . println ( "usage: ChineseEnglishWordMap [-all] [-dictPath path] [-encoding enc_string] inputFile" ) ; System . exit ( 1 ) ; } String filename = otherArgs [ 0 ] ; boolean allTranslations = argMap . containsKey ( "-all" ) ; String charset = defaultCharset ; if ( argMap . containsKey ( "-encoding" ) ) { charset = argMap . get ( "-encoding" ) [ 0 ] ; } BufferedReader r = new BufferedReader ( new InputStreamReader ( new FileInputStream ( filename ) , charset ) ) ; TreebankLanguagePack tlp = new ChineseTreebankLanguagePack ( ) ; String [ ] dpString = argMap . get ( "-dictPath" ) ; ChineseEnglishWordMap cewm = ( dpString == null ) ? new ChineseEnglishWordMap ( ) : new ChineseEnglishWordMap ( dpString [ 0 ] ) ; int totalWords = 0 , coveredWords = 0 ; PrintWriter pw = new PrintWriter ( new OutputStreamWriter ( System . out , charset ) , true ) ; for ( String line = r . readLine ( ) ; line != null ; line = r . readLine ( ) ) { String [ ] words = line . split ( "\\s" , 1000 ) ; for ( String word : words ) { totalWords ++ ; if ( word . length ( ) == 0 ) continue ; pw . print ( StringUtils . pad ( word + ':' , 8 ) ) ; if ( tlp . isPunctuationWord ( word ) ) { totalWords -- ; pw . print ( word ) ; } else if ( isDigits ( word ) ) { pw . print ( word + " [NUMBER]" ) ; } else if ( cewm . containsKey ( word ) ) { coveredWords ++ ; if ( allTranslations ) { List < String > trans = new ArrayList < String > ( cewm . getAllTranslations ( word ) ) ; for ( String s : trans ) { pw . print ( ( trans . indexOf ( s ) > 0 ? "|" : "" ) + s ) ; } } else { pw . print ( cewm . getFirstTranslation ( word ) ) ; } } else { pw . print ( "[UNK]" ) ; } pw . println ( ) ; } pw . println ( ) ; } r . close ( ) ; System . err . print ( "Finished translating " + totalWords + " words (" ) ; System . err . println ( coveredWords + " were in dictionary)." ) ;
public class Reflection { /** * < p > judge the given data object is instance of the given type or not . < / p > * @ param data the data object . * @ param type the given class to test . * @ return true or false as described . */ public static boolean instanceOf ( Object data , Class type ) { } }
// 这里有一个事实 , 即参数data必定是非原生类型 , 即使传入时是原生类型 , 它也被java强制转化为Object非原生了 . // here we rely on the jdk default behavior of packing primitive type parameter into packed type , // which means that if you passe an int to the data parameter , right here within this method , // you will always get an Integer object . if ( type . isInstance ( data ) ) { return true ; } if ( data == null ) { return false ; } if ( type . isPrimitive ( ) ) { if ( type == int . class ) { type = Integer . class ; } else if ( type == long . class ) { type = Long . class ; } else if ( type == short . class ) { type = Short . class ; } else if ( type == double . class ) { type = Double . class ; } else if ( type == float . class ) { type = Float . class ; } else if ( type == char . class ) { type = Character . class ; } else if ( type == byte . class ) { type = Byte . class ; } else if ( type == boolean . class ) { type = Boolean . class ; } } return type . isInstance ( data ) ;
public class JBaseMenuScreen { /** * Do some applet - wide action . * For example , submit or reset . * Here are how actions are handled : * When a BasePanel receives a command it calls it ' s doAction method . If the doAction * doesn ' t handle the action , it is passed to the parent ' s doAction method , until it * hits the ( this ) applet method . This applet method tries to pass the command to all * sub - BasPanels until one does the action . * For example , submit or reset . Pass this action down to all the JBaseScreens . * Do not override this method , override the handleAction method in the JBasePanel . * @ param strAction The command to pass to all the sub - JBasePanels . * @ param parent The parent to start the sub - search from ( non - inclusive ) . * @ return true If handled . */ public boolean handleAction ( String strAction , Component source , int iOptions ) { } }
BaseApplet applet = this . getBaseApplet ( ) ; Object oldCursor = null ; if ( applet != null ) oldCursor = applet . setStatus ( Cursor . WAIT_CURSOR , applet , null ) ; boolean bFlag = super . handleAction ( strAction , source , iOptions ) ; if ( applet != null ) applet . setStatus ( 0 , applet , oldCursor ) ; return bFlag ;
public class DatatypeConverter { /** * Convert the Primavera string representation of a UUID into a Java UUID instance . * @ param value Primavera UUID * @ return Java UUID instance */ public static final UUID parseUUID ( String value ) { } }
UUID result = null ; if ( value != null && ! value . isEmpty ( ) ) { if ( value . charAt ( 0 ) == '{' ) { // PMXML representation : < GUID > { 0AB9133E - A09A - 9648 - B98A - B2384894AC44 } < / GUID > result = UUID . fromString ( value . substring ( 1 , value . length ( ) - 1 ) ) ; } else { // XER representation : CrkTPqCalki5irI4SJSsRA byte [ ] data = javax . xml . bind . DatatypeConverter . parseBase64Binary ( value + "==" ) ; long msb = 0 ; long lsb = 0 ; for ( int i = 0 ; i < 8 ; i ++ ) { msb = ( msb << 8 ) | ( data [ i ] & 0xff ) ; } for ( int i = 8 ; i < 16 ; i ++ ) { lsb = ( lsb << 8 ) | ( data [ i ] & 0xff ) ; } result = new UUID ( msb , lsb ) ; } } return result ;
public class CmsObject { /** * Returns a user , if the password is correct . < p > * If the user / pwd pair is not valid a < code > { @ link CmsException } < / code > is thrown . < p > * @ param username the name of the user to be returned * @ param password the password of the user to be returned * @ return the validated user * @ throws CmsException if operation was not successful */ public CmsUser readUser ( String username , String password ) throws CmsException { } }
return m_securityManager . readUser ( m_context , username , password ) ;
public class HashUtil { /** * Bernstein ' s hash * @ param key 输入字节数组 * @ return 结果hash */ public static int bernstein ( String key ) { } }
int hash = 0 ; int i ; for ( i = 0 ; i < key . length ( ) ; ++ i ) { hash = 33 * hash + key . charAt ( i ) ; } return hash ;
public class DbPreparedStatement { /** * Sets null for all fields matched by name . */ @ NotNull public DbPreparedStatement < T > setNull ( @ NotNull String name , @ NotNull SQLType type ) throws SQLException { } }
for ( int i : getIndexes ( name ) ) { statement . setNull ( i , type . getVendorTypeNumber ( ) ) ; } return this ;
public class Scanner { /** * Returns true if the whitespace that was skipped included any * line terminators . */ private boolean skipWhitespace ( ) { } }
boolean foundLineTerminator = false ; while ( ! isAtEnd ( ) && peekWhitespace ( ) ) { if ( isLineTerminator ( nextChar ( ) ) ) { foundLineTerminator = true ; } } return foundLineTerminator ;
public class CmsSetupBean { /** * Installed all modules that have been set using { @ link # setInstallModules ( String ) } . < p > * This method is invoked as a shell command . < p > * @ throws Exception if something goes wrong */ public void importModulesFromSetupBean ( ) throws Exception { } }
// read here how the list of modules to be installed is passed from the setup bean to the // setup thread , and finally to the shell process that executes the setup script : // 1 ) the list with the package names of the modules to be installed is saved by setInstallModules // 2 ) the setup thread gets initialized in a JSP of the setup wizard // 3 ) the instance of the setup bean is passed to the setup thread by setAdditionalShellCommand // 4 ) the setup bean is passed to the shell by startSetup // 5 ) because the setup bean implements I _ CmsShellCommands , the shell constructor can pass the shell ' s CmsObject back to the setup bean // 6 ) thus , the setup bean can do things with the Cms if ( ( m_cms != null ) && ( m_installModules != null ) ) { for ( int i = 0 ; i < m_installModules . size ( ) ; i ++ ) { String filename = m_moduleFilenames . get ( m_installModules . get ( i ) ) ; try { importModuleFromDefault ( filename ) ; } catch ( Exception e ) { // log a exception during module import , but make sure the next module is still imported e . printStackTrace ( System . err ) ; } } m_hasIndexHtml = false ; try { m_cms . readResource ( "/index.html" ) ; m_hasIndexHtml = true ; } catch ( Exception e ) { } }
public class EntityCustomSerializationFactory { /** * ignored fields */ @ Override protected void processViews ( SerializationConfig config , BeanSerializerBuilder builder ) { } }
super . processViews ( config , builder ) ; // ignore fields only for concrete class // note , that you can avoid or change this check Class < ? > beanClass = builder . getBeanDescription ( ) . getBeanClass ( ) ; ClassUtil . ClassInfo classInfo = ClassUtil . getClassInfo ( beanClass ) ; // if ( builder . getBeanDescription ( ) . getBeanClass ( ) . equals ( Entity . class ) ) // get original writer List < BeanPropertyWriter > originalWriters = builder . getProperties ( ) ; // create actual writers List < BeanPropertyWriter > writers = new ArrayList < BeanPropertyWriter > ( ) ; String [ ] fs = this . _getFilterFields ( classInfo ) ; for ( BeanPropertyWriter writer : originalWriters ) { final String propName = writer . getName ( ) ; // if it isn ' t ignored field , add to actual writers list boolean find = isFilterField ( classInfo , propName , fs ) ; if ( ! find ) { writers . add ( writer ) ; } } builder . setProperties ( writers ) ;
public class NodeUtil { /** * True for aliases defined with @ const , not for aliases defined with @ constructor / @ interface . */ static boolean isAliasedConstDefinition ( Node lhs ) { } }
JSDocInfo jsdoc = getBestJSDocInfo ( lhs ) ; if ( jsdoc == null && ! lhs . isFromExterns ( ) ) { return false ; } if ( jsdoc != null && ! jsdoc . hasConstAnnotation ( ) ) { return false ; } Node rhs = getRValueOfLValue ( lhs ) ; if ( rhs == null || ! rhs . isQualifiedName ( ) ) { return false ; } Node parent = lhs . getParent ( ) ; return ( lhs . isName ( ) && parent . isVar ( ) ) || ( lhs . isGetProp ( ) && lhs . isQualifiedName ( ) && parent . isAssign ( ) && parent . getParent ( ) . isExprResult ( ) ) ;
public class AbstractDisplayer { /** * Get the current filter intervals for the given data set column . * @ param columnId The column identifier . * @ return A list of intervals . */ public List < Interval > filterIntervals ( String columnId ) { } }
List < Interval > selected = columnSelectionMap . get ( columnId ) ; if ( selected == null ) { return new ArrayList < > ( ) ; } return selected ;
public class LDblToLongFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LDblToLongFunction dblToLongFunctionFrom ( Consumer < LDblToLongFunctionBuilder > buildingFunction ) { } }
LDblToLongFunctionBuilder builder = new LDblToLongFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class MeshHeading { /** * setter for qualifierNameMajorTopic - sets see MeSH * @ generated * @ param v value to set into the feature */ public void setQualifierNameMajorTopic ( boolean v ) { } }
if ( MeshHeading_Type . featOkTst && ( ( MeshHeading_Type ) jcasType ) . casFeat_qualifierNameMajorTopic == null ) jcasType . jcas . throwFeatMissing ( "qualifierNameMajorTopic" , "de.julielab.jules.types.MeshHeading" ) ; jcasType . ll_cas . ll_setBooleanValue ( addr , ( ( MeshHeading_Type ) jcasType ) . casFeatCode_qualifierNameMajorTopic , v ) ;