signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class UpdateDeploymentGroupRequest { /** * Information about triggers to change when the deployment group is updated . For examples , see < a
* href = " https : / / docs . aws . amazon . com / codedeploy / latest / userguide / how - to - notify - edit . html " > Modify Triggers in an AWS
* CodeDeploy Deployment Group < / a > in the AWS CodeDeploy User Guide .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setTriggerConfigurations ( java . util . Collection ) } or
* { @ link # withTriggerConfigurations ( java . util . Collection ) } if you want to override the existing values .
* @ param triggerConfigurations
* Information about triggers to change when the deployment group is updated . For examples , see < a
* href = " https : / / docs . aws . amazon . com / codedeploy / latest / userguide / how - to - notify - edit . html " > Modify Triggers in
* an AWS CodeDeploy Deployment Group < / a > in the AWS CodeDeploy User Guide .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UpdateDeploymentGroupRequest withTriggerConfigurations ( TriggerConfig ... triggerConfigurations ) { } }
|
if ( this . triggerConfigurations == null ) { setTriggerConfigurations ( new com . amazonaws . internal . SdkInternalList < TriggerConfig > ( triggerConfigurations . length ) ) ; } for ( TriggerConfig ele : triggerConfigurations ) { this . triggerConfigurations . add ( ele ) ; } return this ;
|
public class SpanDeriverUtil { /** * Derives fault from Span . Fault is determined by HTTP client / server error code contained in binary annotations .
* @ param span the span
* @ return fault ( ) */
public static String deriveFault ( Span span ) { } }
|
List < SpanHttpDeriverUtil . HttpCode > errorCodes = SpanHttpDeriverUtil . getClientOrServerErrors ( SpanHttpDeriverUtil . getHttpStatusCodes ( span . getBinaryAnnotations ( ) ) ) ; if ( errorCodes . size ( ) > 0 ) { return errorCodes . iterator ( ) . next ( ) . getDescription ( ) ; } return null ;
|
public class ValueDataUtil { /** * Creates value data depending on its type . It avoids storing unnecessary bytes in memory
* every time .
* @ param type
* property data type , can be either { @ link PropertyType } or { @ link ExtendedPropertyType }
* @ param orderNumber
* value data order number
* @ param data
* value data represented in array of bytes */
public static PersistedValueData createValueData ( int type , int orderNumber , byte [ ] data ) throws IOException { } }
|
switch ( type ) { case PropertyType . BINARY : case PropertyType . UNDEFINED : return new ByteArrayPersistedValueData ( orderNumber , data ) ; case PropertyType . BOOLEAN : return new BooleanPersistedValueData ( orderNumber , Boolean . valueOf ( getString ( data ) ) ) ; case PropertyType . DATE : try { return new CalendarPersistedValueData ( orderNumber , JCRDateFormat . parse ( getString ( data ) ) ) ; } catch ( ValueFormatException e ) { throw new IOException ( "Can't create Calendar value" , e ) ; } case PropertyType . DOUBLE : return new DoublePersistedValueData ( orderNumber , Double . valueOf ( getString ( data ) ) ) ; case PropertyType . LONG : return new LongPersistedValueData ( orderNumber , Long . valueOf ( getString ( data ) ) ) ; case PropertyType . NAME : try { return new NamePersistedValueData ( orderNumber , InternalQName . parse ( getString ( data ) ) ) ; } catch ( IllegalNameException e ) { throw new IOException ( e . getMessage ( ) , e ) ; } case PropertyType . PATH : try { return new PathPersistedValueData ( orderNumber , QPath . parse ( getString ( data ) ) ) ; } catch ( IllegalPathException e ) { throw new IOException ( e . getMessage ( ) , e ) ; } case PropertyType . REFERENCE : return new ReferencePersistedValueData ( orderNumber , new Identifier ( data ) ) ; case PropertyType . STRING : return new StringPersistedValueData ( orderNumber , getString ( data ) ) ; case ExtendedPropertyType . PERMISSION : return new PermissionPersistedValueData ( orderNumber , AccessControlEntry . parse ( getString ( data ) ) ) ; default : throw new IllegalStateException ( "Unknown property type " + type ) ; }
|
public class OtpOutputStream { /** * Write an array of bytes to the stream as an Erlang bitstr .
* @ param bin
* the array of bytes to write .
* @ param pad _ bits
* the number of zero pad bits at the low end of the last byte */
public void write_bitstr ( final byte [ ] bin , final int pad_bits ) { } }
|
if ( pad_bits == 0 ) { write_binary ( bin ) ; return ; } write1 ( OtpExternal . bitBinTag ) ; write4BE ( bin . length ) ; write1 ( 8 - pad_bits ) ; writeN ( bin ) ;
|
public class PreferenceActivity { /** * Removes a specific listener , which should not be notified , when a navigation preference has
* been added or removed to / from the activity , anymore .
* @ param listener
* The listener , which should be removed , as an instance of the type { @ link
* NavigationListener } . The listener may not be null */
public final void removeNavigationListener ( @ NonNull final NavigationListener listener ) { } }
|
Condition . INSTANCE . ensureNotNull ( listener , "The listener may not be null" ) ; navigationListeners . remove ( listener ) ;
|
public class CmsLoginHelper { /** * Returns the cookie with the given name , if not cookie is found a new one is created . < p >
* @ param request the current request
* @ param name the name of the cookie
* @ return the cookie */
protected static Cookie getCookie ( HttpServletRequest request , String name ) { } }
|
Cookie [ ] cookies = request . getCookies ( ) ; for ( int i = 0 ; ( cookies != null ) && ( i < cookies . length ) ; i ++ ) { if ( name . equalsIgnoreCase ( cookies [ i ] . getName ( ) ) ) { return cookies [ i ] ; } } return new Cookie ( name , "" ) ;
|
public class MtasExtendedSpanTermQuery { /** * ( non - Javadoc )
* @ see
* org . apache . lucene . search . spans . SpanTermQuery # createWeight ( org . apache . lucene
* . search . IndexSearcher , boolean ) */
@ Override public SpanWeight createWeight ( IndexSearcher searcher , boolean needsScores , float boost ) throws IOException { } }
|
final TermContext context ; final IndexReaderContext topContext = searcher . getTopReaderContext ( ) ; if ( termContext == null ) { context = TermContext . build ( topContext , localTerm ) ; } else { context = termContext ; } return new SpanTermWeight ( context , searcher , needsScores ? Collections . singletonMap ( localTerm , context ) : null , boost ) ;
|
public class ColorLab { /** * Conversion from normalized RGB into LAB . Normalized RGB values have a range of 0:1 */
public static void srgbToLab ( float r , float g , float b , float lab [ ] ) { } }
|
ColorXyz . srgbToXyz ( r , g , b , lab ) ; float X = lab [ 0 ] ; float Y = lab [ 1 ] ; float Z = lab [ 2 ] ; float xr = X / Xr_f ; float yr = Y / Yr_f ; float zr = Z / Zr_f ; float fx , fy , fz ; if ( xr > epsilon_f ) fx = ( float ) Math . pow ( xr , 1.0f / 3.0f ) ; else fx = ( kappa_f * xr + 16.0f ) / 116.0f ; if ( yr > epsilon_f ) fy = ( float ) Math . pow ( yr , 1.0 / 3.0f ) ; else fy = ( kappa_f * yr + 16.0f ) / 116.0f ; if ( zr > epsilon_f ) fz = ( float ) Math . pow ( zr , 1.0 / 3.0f ) ; else fz = ( kappa_f * zr + 16.0f ) / 116.0f ; lab [ 0 ] = 116.0f * fy - 16.0f ; lab [ 1 ] = 500.0f * ( fx - fy ) ; lab [ 2 ] = 200.0f * ( fy - fz ) ;
|
public class VirtualMachinesInner { /** * Run command on the VM .
* @ param resourceGroupName The name of the resource group .
* @ param vmName The name of the virtual machine .
* @ param parameters Parameters supplied to the Run command operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < RunCommandResultInner > runCommandAsync ( String resourceGroupName , String vmName , RunCommandInput parameters ) { } }
|
return runCommandWithServiceResponseAsync ( resourceGroupName , vmName , parameters ) . map ( new Func1 < ServiceResponse < RunCommandResultInner > , RunCommandResultInner > ( ) { @ Override public RunCommandResultInner call ( ServiceResponse < RunCommandResultInner > response ) { return response . body ( ) ; } } ) ;
|
public class CronUtil { /** * 重新启动定时任务 < br >
* 此方法会清除动态加载的任务 , 重新启动后 , 守护线程与否与之前保持一致 */
synchronized public static void restart ( ) { } }
|
if ( null != crontabSetting ) { // 重新读取配置文件
crontabSetting . load ( ) ; } if ( scheduler . isStarted ( ) ) { // 关闭并清除已有任务
scheduler . stop ( true ) ; } // 重新加载任务
schedule ( crontabSetting ) ; // 重新启动
scheduler . start ( ) ;
|
public class ORecordSerializerCSVAbstract { /** * Serialize the link .
* @ param buffer
* @ param iParentRecord
* @ param iFieldName
* TODO
* @ param iLinked
* Can be an instance of ORID or a Record < ? >
* @ return */
private static OIdentifiable linkToStream ( final StringBuilder buffer , final ORecordSchemaAware < ? > iParentRecord , Object iLinked ) { } }
|
if ( iLinked == null ) // NULL REFERENCE
return null ; OIdentifiable resultRid = null ; ORID rid ; final ODatabaseRecord database = ODatabaseRecordThreadLocal . INSTANCE . get ( ) ; if ( iLinked instanceof ORID ) { // JUST THE REFERENCE
rid = ( ORID ) iLinked ; if ( rid . isValid ( ) && rid . isNew ( ) ) { // SAVE AT THE FLY AND STORE THE NEW RID
final ORecord < ? > record = rid . getRecord ( ) ; if ( database . getTransaction ( ) . isActive ( ) ) { // USE THE DEFAULT CLUSTER
database . save ( ( ORecordInternal < ? > ) record ) ; } else database . save ( ( ORecordInternal < ? > ) record ) ; if ( record != null ) rid = record . getIdentity ( ) ; resultRid = rid ; } } else { if ( iLinked instanceof String ) iLinked = new ORecordId ( ( String ) iLinked ) ; else if ( ! ( iLinked instanceof ORecordInternal < ? > ) ) { // NOT RECORD : TRY TO EXTRACT THE DOCUMENT IF ANY
final String boundDocumentField = OObjectSerializerHelperManager . getInstance ( ) . getDocumentBoundField ( iLinked . getClass ( ) ) ; if ( boundDocumentField != null ) iLinked = OObjectSerializerHelperManager . getInstance ( ) . getFieldValue ( iLinked , boundDocumentField ) ; } if ( ! ( iLinked instanceof OIdentifiable ) ) throw new IllegalArgumentException ( "Invalid object received. Expected a OIdentifiable but received type=" + iLinked . getClass ( ) . getName ( ) + " and value=" + iLinked ) ; // RECORD
ORecordInternal < ? > iLinkedRecord = ( ( OIdentifiable ) iLinked ) . getRecord ( ) ; rid = iLinkedRecord . getIdentity ( ) ; if ( rid . isNew ( ) || iLinkedRecord . isDirty ( ) ) { if ( iLinkedRecord instanceof ODocument ) { final OClass schemaClass = ( ( ODocument ) iLinkedRecord ) . getSchemaClass ( ) ; database . save ( iLinkedRecord , schemaClass != null ? database . getClusterNameById ( schemaClass . getDefaultClusterId ( ) ) : null ) ; } else // STORE THE TRAVERSED OBJECT TO KNOW THE RECORD ID . CALL THIS VERSION TO AVOID CLEAR OF STACK IN THREAD - LOCAL
database . save ( iLinkedRecord ) ; final ODatabaseComplex < ? > dbOwner = database . getDatabaseOwner ( ) ; dbOwner . registerUserObjectAfterLinkSave ( iLinkedRecord ) ; resultRid = iLinkedRecord ; } if ( iParentRecord != null && database instanceof ODatabaseRecord ) { final ODatabaseRecord db = database ; if ( ! db . isRetainRecords ( ) ) // REPLACE CURRENT RECORD WITH ITS ID : THIS SAVES A LOT OF MEMORY
resultRid = iLinkedRecord . getIdentity ( ) ; } } if ( rid . isValid ( ) ) rid . toString ( buffer ) ; return resultRid ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcStructuralAnalysisModel ( ) { } }
|
if ( ifcStructuralAnalysisModelEClass == null ) { ifcStructuralAnalysisModelEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 630 ) ; } return ifcStructuralAnalysisModelEClass ;
|
public class FrameworkManager { /** * Register the PauseableComponentController class as a service in the OSGi registry
* @ param systemBundleCtx
* The framework system bundle context */
protected void registerPauseableComponentController ( BundleContext systemContext ) { } }
|
PauseableComponentControllerImpl pauseableComponentController = new PauseableComponentControllerImpl ( systemContext ) ; if ( pauseableComponentController != null ) { Hashtable < String , String > svcProps = new Hashtable < String , String > ( ) ; systemContext . registerService ( PauseableComponentController . class . getName ( ) , pauseableComponentController , svcProps ) ; }
|
public class FileAuthenticationProvider { /** * Returns a UserMapping containing all authorization data given within
* GUACAMOLE _ HOME / user - mapping . xml . If the XML file has been modified or has
* not yet been read , this function may reread the file .
* @ return
* A UserMapping containing all authorization data within the user
* mapping XML file , or null if the file cannot be found / parsed . */
private UserMapping getUserMapping ( ) { } }
|
// Read user mapping from GUACAMOLE _ HOME / user - mapping . xml
File userMappingFile = new File ( environment . getGuacamoleHome ( ) , USER_MAPPING_FILENAME ) ; // Abort if user mapping does not exist
if ( ! userMappingFile . exists ( ) ) { logger . debug ( "User mapping file \"{}\" does not exist and will not be read." , userMappingFile ) ; return null ; } // Refresh user mapping if file has changed
if ( lastModified < userMappingFile . lastModified ( ) ) { logger . debug ( "Reading user mapping file: \"{}\"" , userMappingFile ) ; // Set up XML parser
SAXParser parser ; try { parser = SAXParserFactory . newInstance ( ) . newSAXParser ( ) ; } catch ( ParserConfigurationException e ) { logger . error ( "Unable to create XML parser for reading \"{}\": {}" , USER_MAPPING_FILENAME , e . getMessage ( ) ) ; logger . debug ( "An instance of SAXParser could not be created." , e ) ; return null ; } catch ( SAXException e ) { logger . error ( "Unable to create XML parser for reading \"{}\": {}" , USER_MAPPING_FILENAME , e . getMessage ( ) ) ; logger . debug ( "An instance of SAXParser could not be created." , e ) ; return null ; } // Parse document
try { // Get handler for root element
UserMappingTagHandler userMappingHandler = new UserMappingTagHandler ( ) ; // Set up document handler
DocumentHandler contentHandler = new DocumentHandler ( "user-mapping" , userMappingHandler ) ; // Read and parse file
parser . parse ( userMappingFile , contentHandler ) ; // Store mod time and user mapping
lastModified = userMappingFile . lastModified ( ) ; cachedUserMapping = userMappingHandler . asUserMapping ( ) ; } // If the file is unreadable , return no mapping
catch ( IOException e ) { logger . warn ( "Unable to read user mapping file \"{}\": {}" , userMappingFile , e . getMessage ( ) ) ; logger . debug ( "Error reading user mapping file." , e ) ; return null ; } // If the file cannot be parsed , return no mapping
catch ( SAXException e ) { logger . warn ( "User mapping file \"{}\" is not valid: {}" , userMappingFile , e . getMessage ( ) ) ; logger . debug ( "Error parsing user mapping file." , e ) ; return null ; } } // Return ( possibly cached ) user mapping
return cachedUserMapping ;
|
public class Controller { /** * A version of { @ link # handleAction ( ActionEvent ) } with the parameters
* broken out so that it can be used by non - Swing interface toolkits . */
public boolean handleAction ( Object source , String action , Object arg ) { } }
|
Method method = null ; Object [ ] args = null ; try { // look for the appropriate method
Method [ ] methods = getClass ( ) . getMethods ( ) ; int mcount = methods . length ; for ( int i = 0 ; i < mcount ; i ++ ) { if ( methods [ i ] . getName ( ) . equals ( action ) || // handle our old style of prepending " handle "
methods [ i ] . getName ( ) . equals ( "handle" + action ) ) { // see if we can generate the appropriate arguments
args = generateArguments ( methods [ i ] , source , arg ) ; // if we were able to , go ahead and use this method
if ( args != null ) { method = methods [ i ] ; break ; } } } } catch ( Exception e ) { log . warning ( "Error searching for action handler method" , "controller" , this , "action" , action ) ; return false ; } try { if ( method != null ) { method . invoke ( this , args ) ; return true ; } else { return false ; } } catch ( Exception e ) { log . warning ( "Error invoking action handler" , "controller" , this , "action" , action , e ) ; // even though we choked , we still " handled " the action
return true ; }
|
public class VirtualMachinesInner { /** * Retrieves information about the model view or the instance view of a virtual machine .
* @ param resourceGroupName The name of the resource group .
* @ param vmName The name of the virtual machine .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < VirtualMachineInner > getByResourceGroupAsync ( String resourceGroupName , String vmName , final ServiceCallback < VirtualMachineInner > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( getByResourceGroupWithServiceResponseAsync ( resourceGroupName , vmName ) , serviceCallback ) ;
|
public class SecurityUtils { /** * Converts a string into 128 - bit AES key . */
public static SecretKey toAes128Key ( String s ) { } }
|
try { // turn secretKey into 256 bit hash
MessageDigest digest = MessageDigest . getInstance ( "SHA-256" ) ; digest . reset ( ) ; digest . update ( s . getBytes ( "UTF-8" ) ) ; // Due to the stupid US export restriction JDK only ships 128bit version .
return new SecretKeySpec ( digest . digest ( ) , 0 , 128 / 8 , "AES" ) ; } catch ( NoSuchAlgorithmException | UnsupportedEncodingException e ) { throw new Error ( e ) ; }
|
public class GroupDeviceElement { /** * read _ attribute _ asynch _ i - access limited to package Group */
@ Override int read_attribute_asynch_i ( final String [ ] a , final boolean fwd , final int rid ) throws DevFailed { } }
|
try { final int actual_rid = proxy . read_attribute_asynch ( a ) ; arp . put ( new Integer ( rid ) , new AsynchRequest ( actual_rid , a ) ) ; } catch ( final DevFailed df ) { arp . put ( new Integer ( rid ) , new AsynchRequest ( - 1 , a , df ) ) ; } catch ( final Exception e ) { final DevError [ ] errors = new DevError [ 1 ] ; errors [ 0 ] = new DevError ( ) ; errors [ 0 ] . severity = ErrSeverity . ERR ; errors [ 0 ] . reason = "unknown exception caught" ; errors [ 0 ] . desc = "unknown error" ; errors [ 0 ] . origin = "GroupDeviceElemnt.read_attribute" ; final DevFailed ex = new DevFailed ( errors ) ; arp . put ( new Integer ( rid ) , new AsynchRequest ( - 1 , a , ex ) ) ; } return rid ;
|
public class JndiConfigurationSource { /** * Creates an instance of { @ link JndiConfigurationSource } .
* @ param context Context required to build the { @ link JNDIConfiguration }
* @ throws NullPointerException when context is null */
public static JndiConfigurationSource create ( Context context ) { } }
|
if ( context == null ) { throw new NullPointerException ( "context: null" ) ; } return new JndiConfigurationSource ( createJndiConfiguration ( context , null ) , DEFAULT_PRIORITY ) ;
|
public class IfcPropertyTableValueImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public EList < IfcValue > getDefinedValues ( ) { } }
|
return ( EList < IfcValue > ) eGet ( Ifc4Package . Literals . IFC_PROPERTY_TABLE_VALUE__DEFINED_VALUES , true ) ;
|
public class TileConfig { /** * Export the tile as a node .
* @ param tileRef The tile to export ( must not be < code > null < / code > ) .
* @ return The exported node .
* @ throws LionEngineException If < code > null < / code > argument or error on writing . */
public static Xml exports ( TileRef tileRef ) { } }
|
Check . notNull ( tileRef ) ; final Xml node = new Xml ( NODE_TILE ) ; node . writeInteger ( ATT_TILE_SHEET , tileRef . getSheet ( ) . intValue ( ) ) ; node . writeInteger ( ATT_TILE_NUMBER , tileRef . getNumber ( ) ) ; return node ;
|
public class Reporter { /** * Captures the entire page screen shot , and created an HTML file friendly
* link to place in the output file
* @ return String : the image link string */
public String captureEntirePageScreenshot ( ) { } }
|
String imageName = generateImageName ( ) ; String imageLink = generateImageLink ( imageName ) ; try { app . takeScreenshot ( imageName ) ; screenshots . add ( imageName ) ; } catch ( Exception e ) { log . error ( e ) ; imageLink = "<br/><b><font class='fail'>No Screenshot Available</font></b>" ; } return imageLink ;
|
public class DrilldownProcessor { /** * Adds a { @ link DrilldownFunction } to the { @ link PlotOptions } of the given
* { @ link Options } .
* @ param options
* the { @ link Options } to add a { @ link DrilldownFunction } to */
private void addDrilldownFunction ( Options options , OptionsProcessorContext context ) { } }
|
SeriesType chartType = options . getChartOptions ( ) . getType ( ) ; if ( options . getPlotOptions ( ) == null ) { options . setPlotOptions ( new PlotOptionsChoice ( ) ) ; } if ( options . getPlotOptions ( ) . getPlotOptions ( chartType ) == null ) { options . getPlotOptions ( ) . setPlotOptions ( new PlotOptions ( ) , chartType ) ; } if ( options . getPlotOptions ( ) . getPlotOptions ( chartType ) . getPoint ( ) == null ) { options . getPlotOptions ( ) . getPlotOptions ( chartType ) . setPoint ( new PointOptions ( ) ) ; } if ( options . getPlotOptions ( ) . getPlotOptions ( chartType ) . getPoint ( ) . getEvents ( ) == null ) { options . getPlotOptions ( ) . getPlotOptions ( chartType ) . getPoint ( ) . setEvents ( new Events ( ) ) ; } options . getPlotOptions ( ) . getPlotOptions ( chartType ) . getPoint ( ) . getEvents ( ) . setClick ( new DrilldownFunction ( getDrilldownArrayName ( component ) ) ) ;
|
public class ConnectionDAODefaultImpl { public void init ( final Connection connection , final String devname ) throws DevFailed { } }
|
connection . url = new TangoUrl ( devname ) ; connection . setDevice_is_dbase ( false ) ; connection . devname = connection . url . devname ; // Check if connection is possible
if ( connection . url . protocol == TANGO && connection . url . use_db ) { connection . ior = get_exported_ior ( connection ) ; }
|
public class WhileContextDef { /** * < pre >
* Name of the context .
* < / pre >
* < code > optional string context _ name = 1 ; < / code > */
public com . google . protobuf . ByteString getContextNameBytes ( ) { } }
|
java . lang . Object ref = contextName_ ; if ( ref instanceof java . lang . String ) { com . google . protobuf . ByteString b = com . google . protobuf . ByteString . copyFromUtf8 ( ( java . lang . String ) ref ) ; contextName_ = b ; return b ; } else { return ( com . google . protobuf . ByteString ) ref ; }
|
public class SynchronizingListModel { /** * Synchronizes the list , adding and removing only a minimum number of elements .
* Comparisons are performed using . equals . This must be called from the
* Swing event dispatch thread . */
public void synchronize ( List < ? extends E > list ) { } }
|
assert SwingUtilities . isEventDispatchThread ( ) : ApplicationResources . accessor . getMessage ( "assert.notRunningInSwingEventThread" ) ; // Make sure the first element exists and matches
int modelOffset ; if ( constantFirstRow != null ) { modelOffset = 1 ; if ( isEmpty ( ) ) addElement ( constantFirstRow ) ; else if ( ! getElementAt ( 0 ) . equals ( constantFirstRow ) ) { insertElementAt ( constantFirstRow , 0 ) ; } } else modelOffset = 0 ; // Synchronize the dynamic part of the list
int size = list . size ( ) ; for ( int index = 0 ; index < size ; index ++ ) { E obj = list . get ( index ) ; if ( index >= ( size ( ) - modelOffset ) ) addElement ( obj ) ; else if ( ! obj . equals ( getElementAt ( index + modelOffset ) ) ) { // Objects don ' t match
// If this object is found further down the list , then delete up to that object
int foundIndex = - 1 ; for ( int searchIndex = index + 1 ; searchIndex < ( size ( ) - modelOffset ) ; searchIndex ++ ) { if ( obj . equals ( getElementAt ( searchIndex + modelOffset ) ) ) { foundIndex = searchIndex ; break ; } } if ( foundIndex != - 1 ) removeRange ( index + modelOffset , foundIndex - 1 + modelOffset ) ; // Otherwise , insert in the current index
else insertElementAt ( obj , index + modelOffset ) ; } } // Remove any extra
if ( ( size ( ) - modelOffset ) > size ) removeRange ( size + modelOffset , size ( ) - 1 ) ;
|
public class GetGroupsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetGroupsRequest getGroupsRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( getGroupsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getGroupsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class PushNotificationsManager { /** * Enable push notifications .
* @ param pushJid
* @ param node
* @ param publishOptions
* @ return true if it was successfully enabled , false if not
* @ throws NoResponseException
* @ throws XMPPErrorException
* @ throws NotConnectedException
* @ throws InterruptedException */
public boolean enable ( Jid pushJid , String node , HashMap < String , String > publishOptions ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
|
EnablePushNotificationsIQ enablePushNotificationsIQ = new EnablePushNotificationsIQ ( pushJid , node , publishOptions ) ; return changePushNotificationsStatus ( enablePushNotificationsIQ ) ;
|
public class ObservableAdapterBuilder { /** * Each emission of this observable prepends to the elements of the adapter . */
@ NonNull public ObservableAdapterBuilder < T > prepends ( @ Nullable Observable < ? extends Collection < ? extends T > > prepends ) { } }
|
mPrepends = prepends ; return this ;
|
public class SldUtilities { /** * REmoves the alpha channel from a color .
* @ param color the color .
* @ return the color without alpha . */
public static Color colorWithoutAlpha ( Color color ) { } }
|
return new Color ( color . getRed ( ) , color . getGreen ( ) , color . getBlue ( ) ) ;
|
public class ChunkedInputStream { /** * Read some bytes from the stream .
* @ param b The byte array that will hold the contents from the stream .
* @ param off The offset into the byte array at which bytes will start to be
* placed .
* @ param len the maximum number of bytes that can be returned .
* @ return The number of bytes returned or - 1 if the end of stream has been
* reached .
* @ see java . io . InputStream # read ( byte [ ] , int , int )
* @ throws IOException if an IO problem occurs . */
@ Override public int read ( byte [ ] b , int off , int len ) throws IOException { } }
|
if ( closed ) { throw new IOException ( "Attempted read from closed stream." ) ; } if ( eof ) { return - 1 ; } if ( pos >= chunkSize ) { nextChunk ( ) ; if ( eof ) { return - 1 ; } } len = Math . min ( len , chunkSize - pos ) ; int count = in . read ( b , off , len ) ; pos += count ; return count ;
|
public class ObjectResult { /** * Create an ObjectResult with a status of ERROR and the given error message and
* optional object ID .
* @ param errMsg Error message .
* @ param objID Optional object ID .
* @ return { @ link ObjectResult } with an error status and message . */
public static ObjectResult newErrorResult ( String errMsg , String objID ) { } }
|
ObjectResult result = new ObjectResult ( ) ; result . setStatus ( Status . ERROR ) ; result . setErrorMessage ( errMsg ) ; if ( ! Utils . isEmpty ( objID ) ) { result . setObjectID ( objID ) ; } return result ;
|
public class ClientTypeSignature { /** * This field is deprecated and clients should switch to { @ link # getArguments ( ) } */
@ Deprecated @ JsonProperty public List < ClientTypeSignature > getTypeArguments ( ) { } }
|
List < ClientTypeSignature > result = new ArrayList < > ( ) ; for ( ClientTypeSignatureParameter argument : arguments ) { switch ( argument . getKind ( ) ) { case TYPE : result . add ( argument . getTypeSignature ( ) ) ; break ; case NAMED_TYPE : result . add ( new ClientTypeSignature ( argument . getNamedTypeSignature ( ) . getTypeSignature ( ) ) ) ; break ; default : return new ArrayList < > ( ) ; } } return result ;
|
public class DescribeBatchPredictionsRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeBatchPredictionsRequest describeBatchPredictionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( describeBatchPredictionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeBatchPredictionsRequest . getFilterVariable ( ) , FILTERVARIABLE_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getEQ ( ) , EQ_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getGT ( ) , GT_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getLT ( ) , LT_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getGE ( ) , GE_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getLE ( ) , LE_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getNE ( ) , NE_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getPrefix ( ) , PREFIX_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getSortOrder ( ) , SORTORDER_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( describeBatchPredictionsRequest . getLimit ( ) , LIMIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class ScenarioImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setVersion ( String newVersion ) { } }
|
String oldVersion = version ; version = newVersion ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , BpsimPackage . SCENARIO__VERSION , oldVersion , version ) ) ;
|
public class CudaDirectProvider { /** * This method checks specified device for specified amount of memory
* @ param deviceId
* @ param requiredMemory
* @ return */
public boolean pingDeviceForFreeMemory ( Integer deviceId , long requiredMemory ) { } }
|
/* long [ ] totalMem = new long [ 1 ] ;
long [ ] freeMem = new long [ 1 ] ;
JCuda . cudaMemGetInfo ( freeMem , totalMem ) ;
long free = freeMem [ 0 ] ;
long total = totalMem [ 0 ] ;
long used = total - free ;
We don ' t want to allocate memory if it ' s too close to the end of available ram . */
// if ( configuration ! = null & & used > total * configuration . getMaxDeviceMemoryUsed ( ) ) return false ;
/* if ( free + requiredMemory < total * 0.85)
return true ;
else return false ; */
long freeMem = nativeOps . getDeviceFreeMemory ( - 1 ) ; if ( freeMem - requiredMemory < DEVICE_RESERVED_SPACE ) return false ; else return true ;
|
public class Schema { /** * Validates the given value and throws a ValidationException if errors were
* found .
* @ param correlationId ( optional ) transaction id to trace execution through
* call chain .
* @ param value a value to be validated .
* @ throws ValidationException when errors occured in validation
* @ see ValidationException # throwExceptionIfNeeded ( String , List , boolean ) */
public void validateAndThrowException ( String correlationId , Object value ) throws ValidationException { } }
|
validateAndThrowException ( correlationId , value , false ) ;
|
public class MetricLongBean { /** * Adds a single data point to the metric .
* @ param timestamp
* @ param value
* @ param tags */
public DataPointLongBean addDataPoint ( Date timestamp , long value , Map < String , String > tags ) { } }
|
DataPointLongBean point = new DataPointLongBean ( timestamp , value ) ; for ( Entry < String , String > entry : tags . entrySet ( ) ) { point . addTag ( entry . getKey ( ) , entry . getValue ( ) ) ; } dataPoints . add ( point ) ; return point ;
|
public class DefaultFactory { /** * This method was designed to be called just one time at application startup .
* This could be done in a single static block of a single class , that perhaps
* grabbed the name of the Factory impl from a - D parameter or similar .
* If this method detects that its already been called , it throws a runtime exception .
* I did that so the developer will know whether the " worst case " has happened ( ie , when multiple code locations are trying to set the factory ) .
* @ param myFactory */
public static void setFactory ( IFactory myFactory ) { } }
|
if ( INSTANCE == null ) { INSTANCE = myFactory ; } else { throw new RuntimeException ( "Factory has already been set to value [" + INSTANCE . getClass ( ) . getName ( ) + "]" ) ; }
|
public class OAuth1aToken { /** * Returns additional ( or , user - defined ) parameters . */
public Map < String , String > additionals ( ) { } }
|
final ImmutableMap . Builder < String , String > builder = ImmutableMap . builder ( ) ; for ( Entry < String , String > e : params . entrySet ( ) ) { if ( ! DEFINED_PARAM_KEYS . contains ( e . getKey ( ) ) ) { builder . put ( e . getKey ( ) , e . getValue ( ) ) ; } } return builder . build ( ) ;
|
public class Messenger { /** * MUST be called on profile open
* @ param uid user ' s Id */
@ ObjectiveCName ( "onProfileOpenWithUid:" ) public void onProfileOpen ( int uid ) { } }
|
modules . getEvents ( ) . post ( new PeerInfoOpened ( Peer . user ( uid ) ) ) ;
|
public class JsAPIs { /** * 创建微信卡券JSAPI签名
* @ param wxCardAPISignature
* @ return */
public WxCardAPISignature createWxCardJsAPISignature ( WxCardAPISignature wxCardAPISignature ) { } }
|
if ( wxCardAPITicket == null || wxCardAPITicket . expired ( ) ) { getWxCardAPITicket ( ) ; } long timestamp = System . currentTimeMillis ( ) / 1000 ; String nonce = RandomStringGenerator . getRandomStringByLength ( 16 ) ; String ticket = wxCardAPITicket . getTicket ( ) ; List < String > parameters = new ArrayList < > ( ) ; if ( wxCardAPISignature . isChooseCard ( ) ) { parameters . add ( wxClient . getClientId ( ) ) ; } parameters . add ( ticket ) ; parameters . add ( wxCardAPISignature . getCardId ( ) ) ; parameters . add ( nonce ) ; parameters . add ( String . valueOf ( timestamp ) ) ; if ( ! ( wxCardAPISignature . getCardType ( ) == null || "" . equals ( wxCardAPISignature . getCardType ( ) ) ) ) { parameters . add ( wxCardAPISignature . getCardType ( ) ) ; } if ( ! ( wxCardAPISignature . getCode ( ) == null || "" . equals ( wxCardAPISignature . getCode ( ) ) ) ) { parameters . add ( wxCardAPISignature . getCode ( ) ) ; } if ( ! ( wxCardAPISignature . getBalance ( ) == null || "" . equals ( wxCardAPISignature . getBalance ( ) ) ) ) { parameters . add ( wxCardAPISignature . getBalance ( ) ) ; } if ( ! ( wxCardAPISignature . getOpenId ( ) == null || "" . equals ( wxCardAPISignature . getOpenId ( ) ) ) ) { parameters . add ( wxCardAPISignature . getOpenId ( ) ) ; } if ( ! ( wxCardAPISignature . getLocationId ( ) == null || "" . equals ( wxCardAPISignature . getLocationId ( ) ) ) ) { parameters . add ( wxCardAPISignature . getLocationId ( ) ) ; } try { String signature = SHA1 . getSHA1 ( ( String [ ] ) parameters . toArray ( ) ) ; wxCardAPISignature . setNonce ( nonce ) ; wxCardAPISignature . setTimestamp ( timestamp ) ; wxCardAPISignature . setSignature ( signature ) ; return wxCardAPISignature ; } catch ( AesException e ) { logger . error ( "createWxCardJsAPISignature failed" , e ) ; throw new WxRuntimeException ( 999 , e . getMessage ( ) ) ; }
|
public class SepaUtil { /** * Formatiert den XML - Kalender im angegebenen Format .
* @ param cal der Kalender .
* @ param format das zu verwendende Format . Fuer Beispiele siehe
* { @ link SepaUtil # DATE _ FORMAT }
* { @ link SepaUtil # DATETIME _ FORMAT }
* Wenn keines angegeben ist , wird per Default { @ link SepaUtil # DATE _ FORMAT } verwendet .
* @ return die String das formatierte Datum . */
public static String format ( XMLGregorianCalendar cal , String format ) { } }
|
if ( cal == null ) return null ; if ( format == null ) format = DATE_FORMAT ; SimpleDateFormat df = new SimpleDateFormat ( format ) ; return df . format ( cal . toGregorianCalendar ( ) . getTime ( ) ) ;
|
public class ExtraDataBeaconTracker { /** * The following code is for dealing with merging data fields in beacons */
@ Nullable private Beacon trackGattBeacon ( @ NonNull Beacon beacon ) { } }
|
if ( beacon . isExtraBeaconData ( ) ) { updateTrackedBeacons ( beacon ) ; return null ; } String key = getBeaconKey ( beacon ) ; HashMap < Integer , Beacon > matchingTrackedBeacons = mBeaconsByKey . get ( key ) ; if ( null == matchingTrackedBeacons ) { matchingTrackedBeacons = new HashMap < > ( ) ; } else { Beacon trackedBeacon = matchingTrackedBeacons . values ( ) . iterator ( ) . next ( ) ; beacon . setExtraDataFields ( trackedBeacon . getExtraDataFields ( ) ) ; } matchingTrackedBeacons . put ( beacon . hashCode ( ) , beacon ) ; mBeaconsByKey . put ( key , matchingTrackedBeacons ) ; return beacon ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcDistributionCircuit ( ) { } }
|
if ( ifcDistributionCircuitEClass == null ) { ifcDistributionCircuitEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 180 ) ; } return ifcDistributionCircuitEClass ;
|
import java . lang . Math ; class OddRootElements { /** * Calculate the count of numbers with odd number of factors in a given range .
* Args :
* start : Minimum range value
* end : Maximum range value
* Returns :
* int : The total count of numbers with odd factors
* Examples :
* > > > countOddRootElements ( 5 , 100)
* > > > countOddRootElements ( 8 , 65)
* > > > countOddRootElements ( 2 , 5) */
public static int countOddRootElements ( int start , int end ) { } }
|
return ( int ) Math . sqrt ( end ) - ( int ) Math . sqrt ( start - 1 ) ;
|
public class View { /** * An API REST method to get the allowed { $ link TopLevelItem } s and its categories .
* @ return A { @ link Categories } entity that is shown as JSON file . */
@ Restricted ( DoNotUse . class ) public Categories doItemCategories ( StaplerRequest req , StaplerResponse rsp , @ QueryParameter String iconStyle ) throws IOException , ServletException { } }
|
getOwner ( ) . checkPermission ( Item . CREATE ) ; rsp . addHeader ( "Cache-Control" , "no-cache, no-store, must-revalidate" ) ; rsp . addHeader ( "Pragma" , "no-cache" ) ; rsp . addHeader ( "Expires" , "0" ) ; Categories categories = new Categories ( ) ; int order = 0 ; JellyContext ctx ; if ( StringUtils . isNotBlank ( iconStyle ) ) { ctx = new JellyContext ( ) ; ctx . setVariable ( "resURL" , req . getContextPath ( ) + Jenkins . RESOURCE_PATH ) ; } else { ctx = null ; } for ( TopLevelItemDescriptor descriptor : DescriptorVisibilityFilter . apply ( getOwner ( ) . getItemGroup ( ) , Items . all ( Jenkins . getAuthentication ( ) , getOwner ( ) . getItemGroup ( ) ) ) ) { ItemCategory ic = ItemCategory . getCategory ( descriptor ) ; Map < String , Serializable > metadata = new HashMap < > ( ) ; // Information about Item .
metadata . put ( "class" , descriptor . getId ( ) ) ; metadata . put ( "order" , ++ order ) ; metadata . put ( "displayName" , descriptor . getDisplayName ( ) ) ; metadata . put ( "description" , descriptor . getDescription ( ) ) ; metadata . put ( "iconFilePathPattern" , descriptor . getIconFilePathPattern ( ) ) ; String iconClassName = descriptor . getIconClassName ( ) ; if ( StringUtils . isNotBlank ( iconClassName ) ) { metadata . put ( "iconClassName" , iconClassName ) ; if ( ctx != null ) { Icon icon = IconSet . icons . getIconByClassSpec ( StringUtils . join ( new String [ ] { iconClassName , iconStyle } , " " ) ) ; if ( icon != null ) { metadata . put ( "iconQualifiedUrl" , icon . getQualifiedUrl ( ctx ) ) ; } } } Category category = categories . getItem ( ic . getId ( ) ) ; if ( category != null ) { category . getItems ( ) . add ( metadata ) ; } else { List < Map < String , Serializable > > temp = new ArrayList < > ( ) ; temp . add ( metadata ) ; category = new Category ( ic . getId ( ) , ic . getDisplayName ( ) , ic . getDescription ( ) , ic . getOrder ( ) , ic . getMinToShow ( ) , temp ) ; categories . getItems ( ) . add ( category ) ; } } return categories ;
|
public class DateTieredCompactionStrategy { /** * Removes all sstables with max timestamp older than maxSSTableAge .
* @ param sstables all sstables to consider
* @ param maxSSTableAge the age in milliseconds when an SSTable stops participating in compactions
* @ param now current time . SSTables with max timestamp less than ( now - maxSSTableAge ) are filtered .
* @ return a list of sstables with the oldest sstables excluded */
@ VisibleForTesting static Iterable < SSTableReader > filterOldSSTables ( List < SSTableReader > sstables , long maxSSTableAge , long now ) { } }
|
if ( maxSSTableAge == 0 ) return sstables ; final long cutoff = now - maxSSTableAge ; return Iterables . filter ( sstables , new Predicate < SSTableReader > ( ) { @ Override public boolean apply ( SSTableReader sstable ) { return sstable . getMaxTimestamp ( ) >= cutoff ; } } ) ;
|
public class CommandContext { /** * Parses and verifies the command line options .
* @ throws ParseException */
@ SuppressWarnings ( "unchecked" ) // marshall from apache commons cli
private void parse ( ) throws ParseException { } }
|
_argValues = new HashMap < Argument , String > ( ) ; _varargValues = new ArrayList < String > ( ) ; List < String > argList = _commandLine . getArgList ( ) ; int required = 0 ; boolean hasOptional = false ; boolean hasVarargs = false ; for ( Argument argument : _arguments . getArguments ( ) ) { if ( argument . isRequired ( ) ) { required ++ ; } else { hasOptional = true ; } if ( argument . isVararg ( ) ) { hasVarargs = true ; } } int allowed = hasOptional ? required + 1 : required ; if ( argList . size ( ) < required ) { throw new ParseException ( "Not enough arguments provided. " + required + " required, but only " + argList . size ( ) + " provided." ) ; } if ( ! hasVarargs ) { if ( argList . size ( ) > allowed ) { throw new ParseException ( "Too many arguments provided. Only " + allowed + " allowed, but " + argList . size ( ) + " provided." ) ; } } int index = 0 ; boolean finalArgEncountered = false ; for ( Argument argument : _arguments . getArguments ( ) ) { if ( finalArgEncountered ) throw new IllegalStateException ( "Illegal arguments defined. No additional arguments may be defined after first optional or vararg argument." ) ; if ( argument . isRequired ( ) && ! argument . isVararg ( ) ) { // the normal case
if ( index <= argList . size ( ) ) { _argValues . put ( argument , argList . get ( index ) ) ; } else { throw new IllegalStateException ( "not enough arguments" ) ; // should not happen given above size check
} } else { // it ' s the last argument , either it ' s optional or a vararg
finalArgEncountered = true ; if ( argument . isVararg ( ) ) { _varargValues = argList . subList ( Math . min ( index , argList . size ( ) ) , argList . size ( ) ) ; if ( argument . isRequired ( ) && _varargValues . size ( ) < 1 ) { throw new IllegalStateException ( "not enough arguments" ) ; // should not happen given above size check
} } else { // if it ' s a optional
if ( index < argList . size ( ) ) { _argValues . put ( argument , argList . get ( index ) ) ; } } } index ++ ; }
|
public class Util { /** * Writes the entire remaining contents of the buffer to the channel . May complete in one operation , but the
* documentation is vague , so this keeps going until we are sure .
* @ param buffer the data to be written
* @ param channel the channel to which we want to write data
* @ throws IOException if there is a problem writing to the channel */
public static void writeFully ( ByteBuffer buffer , WritableByteChannel channel ) throws IOException { } }
|
while ( buffer . hasRemaining ( ) ) { channel . write ( buffer ) ; }
|
public class OpProfiler { /** * This method tracks op calls
* @ param op */
public void processOpCall ( Op op ) { } }
|
// total number of invocations
invocationsCount . incrementAndGet ( ) ; // number of invocations for this specific op
opCounter . incrementCount ( op . opName ( ) ) ; // number of invocations for specific class
String opClass = getOpClass ( op ) ; classCounter . incrementCount ( opClass ) ; if ( op . x ( ) == null || ( op . x ( ) != null && op . x ( ) . data ( ) . address ( ) == lastZ && op . z ( ) == op . x ( ) && op . y ( ) == null ) ) { // we have possible shift here
matchingCounter . incrementCount ( prevOpMatching + " -> " + opClass ) ; matchingCounterDetailed . incrementCount ( prevOpMatchingDetailed + " -> " + opClass + " " + op . opName ( ) ) ; } else { matchingCounter . totalsIncrement ( ) ; matchingCounterDetailed . totalsIncrement ( ) ; if ( op . y ( ) != null && op . y ( ) . data ( ) . address ( ) == lastZ ) { matchingCounterInverted . incrementCount ( prevOpMatchingInverted + " -> " + opClass + " " + op . opName ( ) ) ; } else { matchingCounterInverted . totalsIncrement ( ) ; } } lastZ = op . z ( ) != null ? op . z ( ) . data ( ) . address ( ) : 0L ; prevOpMatching = opClass ; prevOpMatchingDetailed = opClass + " " + op . opName ( ) ; prevOpMatchingInverted = opClass + " " + op . opName ( ) ; updatePairs ( op . opName ( ) , opClass ) ; if ( config . isNotOptimalArguments ( ) ) { PenaltyCause [ ] causes = processOperands ( op . x ( ) , op . y ( ) , op . z ( ) ) ; for ( PenaltyCause cause : causes ) { switch ( cause ) { case NON_EWS_ACCESS : nonEwsAggregator . incrementCount ( ) ; break ; case STRIDED_ACCESS : stridedAggregator . incrementCount ( ) ; break ; case MIXED_ORDER : mixedOrderAggregator . incrementCount ( ) ; break ; case NONE : default : break ; } } } for ( OpProfilerListener listener : listeners ) { listener . invoke ( op ) ; }
|
public class RebalanceUtils { /** * Print log to the following logger ( Info level )
* @ param batchId
* @ param taskId
* @ param logger
* @ param message */
public static void printBatchTaskLog ( int batchId , int taskId , Logger logger , String message ) { } }
|
logger . info ( "[Rebalance batch/task id " + batchId + "/" + taskId + "] " + message ) ;
|
public class ScoreFunctions { /** * Illumina positional score function .
* @ return the Illumina positional score function */
public static ScoreFunction illumina ( ) { } }
|
return new ScoreFunction ( ) { @ Override public double evaluate ( final double relativePosition ) { // TODO : this could use improvement ; perhaps re - use quality profiles from ART
if ( relativePosition < 0.05d ) { return 14400.0d * ( relativePosition * relativePosition ) ; } else if ( relativePosition < 0.8d ) { return 36.0d ; } else { return 22600.0d * Math . pow ( relativePosition - 1.0d , 4.0d ) ; } } } ;
|
public class CircularLossyQueue { /** * Returns an array of the current elements in the queue . The order of elements
* is in reverse order of the order items were added .
* @ param type
* destination
* @ return An array containing the current elements in the queue . The first
* element of the array is the tail of the queue and the last element is
* the head of the queue */
public T [ ] toArray ( @ Nonnull final T [ ] type ) { } }
|
if ( type . length > m_nMaxSize ) throw new IllegalArgumentException ( "Size of array passed in cannot be greater than " + m_nMaxSize ) ; final int curIndex = _getCurrentIndex ( ) ; for ( int k = 0 ; k < type . length ; k ++ ) { final int index = _getIndex ( curIndex - k ) ; type [ k ] = m_aCircularArray [ index ] . get ( ) ; } return type ;
|
public class SubscriptionManager { /** * Unbinds a listener to a publisher
* @ param source the event publisher
* @ param listener the event receiver */
public < T extends EventListener > void unsubscribe ( EventPublisher source , T listener ) { } }
|
log . debug ( "[unsubscribe] Removing {} --> {}" , source . getClass ( ) . getName ( ) , listener . getClass ( ) . getName ( ) ) ; GenericEventDispatcher < T > dispatcher = ( GenericEventDispatcher < T > ) dispatchers . get ( source ) ; dispatcher . removeListener ( listener ) ;
|
public class Op { /** * Creates a map containing one entry with the specified key and value , and an < i > operation
* expression < / i > on it . Also enables the addition of new entries to the map by means of the
* < tt > and ( . . . ) < / tt > action .
* @ param key the key for the map ' s first entry
* @ param value the value for the map ' s first entry
* @ return an operator , ready for chaining */
public static < K , V > Level0BuildingMapOperator < Map < K , V > , K , V > onMapFor ( final K key , final V value ) { } }
|
final Map < K , V > newTarget = new LinkedHashMap < K , V > ( ) ; newTarget . put ( key , value ) ; return new Level0BuildingMapOperator < Map < K , V > , K , V > ( ExecutionTarget . forOp ( newTarget , Normalisation . MAP ) ) ;
|
public class CmsVfsSitemapService { /** * Returns the modified list from the current user . < p >
* @ return the modified list */
private LinkedHashMap < CmsUUID , CmsClientSitemapEntry > getModifiedList ( ) { } }
|
CmsObject cms = getCmsObject ( ) ; CmsUser user = cms . getRequestContext ( ) . getCurrentUser ( ) ; Object obj = user . getAdditionalInfo ( ADDINFO_ADE_MODIFIED_LIST ) ; LinkedHashMap < CmsUUID , CmsClientSitemapEntry > result = new LinkedHashMap < CmsUUID , CmsClientSitemapEntry > ( ) ; if ( obj instanceof String ) { try { JSONArray array = new JSONArray ( ( String ) obj ) ; for ( int i = 0 ; i < array . length ( ) ; i ++ ) { try { CmsUUID modId = new CmsUUID ( array . getString ( i ) ) ; CmsResource res = cms . readResource ( modId , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; String sitePath = cms . getSitePath ( res ) ; CmsJspNavElement navEntry = getNavBuilder ( ) . getNavigationForResource ( sitePath , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; if ( navEntry . isInNavigation ( ) ) { CmsClientSitemapEntry modEntry = toClientEntry ( navEntry , false ) ; result . put ( modId , modEntry ) ; } } catch ( Throwable e ) { // should never happen , catches wrong or no longer existing values
LOG . warn ( e . getLocalizedMessage ( ) ) ; } } } catch ( Throwable e ) { // should never happen , catches json parsing
LOG . warn ( e . getLocalizedMessage ( ) ) ; } } return result ;
|
public class MimeType { /** * Add a parameter .
* @ param sAttribute
* Parameter name . Must neither be < code > null < / code > nor empty and must
* match { @ link MimeTypeParser # isToken ( String ) } .
* @ param sValue
* The value to use . May neither be < code > null < / code > nor empty . Must
* not be a valid MIME token .
* @ return this */
@ Nonnull public MimeType addParameter ( @ Nonnull @ Nonempty final String sAttribute , @ Nonnull @ Nonempty final String sValue ) { } }
|
return addParameter ( new MimeTypeParameter ( sAttribute , sValue ) ) ;
|
public class Parser { /** * 12.12 Labelled Statement */
private ParseTree parseLabelledStatement ( ) { } }
|
SourcePosition start = getTreeStartLocation ( ) ; IdentifierToken name = eatId ( ) ; eat ( TokenType . COLON ) ; return new LabelledStatementTree ( getTreeLocation ( start ) , name , parseStatement ( ) ) ;
|
public class FacebookBatcher { /** * Get a user access token from Facebook . Normally you obtain this from the client - side SDK ( javascript , iphone , etc )
* but if you are driving the OAuth flow manually , this method is the last step .
* see https : / / developers . facebook . com / docs / authentication / */
public static String getAccessToken ( String clientId , String clientSecret , String code , String redirectUri ) { } }
|
RequestBuilder call = new RequestBuilder ( GRAPH_ENDPOINT + "oauth/access_token" , HttpMethod . GET ) ; call . setTimeout ( 10 * 1000 ) ; // this is a somewhat crude hack but seems reasonable right now
call . addParam ( "client_id" , clientId ) ; call . addParam ( "client_secret" , clientSecret ) ; if ( code != null || redirectUri != null ) { call . addParam ( "code" , code ) ; call . addParam ( "redirect_uri" , redirectUri ) ; } else call . addParam ( "grant_type" , "client_credentials" ) ; try { HttpResponse response = call . execute ( ) ; // Yet more Facebook API stupidity ; if the response is OK then we parse as urlencoded params ,
// otherwise we must parse as JSON and run through the error detector .
if ( response . getResponseCode ( ) == 200 ) { return URLParser . parseQuery ( StringUtils . read ( response . getContentStream ( ) ) ) . get ( "access_token" ) ; } else { Later < JsonNode > json = new Now < JsonNode > ( new ObjectMapper ( ) . readTree ( response . getContentStream ( ) ) ) ; new ErrorDetectingWrapper ( json ) . get ( ) ; // This should throw an exception
throw new IllegalStateException ( "Impossible, this should have been detected as an error: " + json ) ; } } catch ( IOException ex ) { throw new IOFacebookException ( ex ) ; }
|
public class MediaDescriptorField { /** * Creates or updates video format using payload number and text format description .
* @ param payload the payload number of the format .
* @ param description text description of the format
* @ return format object */
private RTPFormat createVideoFormat ( int payload , Text description ) { } }
|
Iterator < Text > it = description . split ( '/' ) . iterator ( ) ; // encoding name
Text token = it . next ( ) ; token . trim ( ) ; EncodingName name = new EncodingName ( token ) ; // clock rate
// TODO : convert to frame rate
token = it . next ( ) ; token . trim ( ) ; int clockRate = token . toInteger ( ) ; RTPFormat rtpFormat = getFormat ( payload ) ; if ( rtpFormat == null ) { formats . add ( new RTPFormat ( payload , FormatFactory . createVideoFormat ( name , clockRate ) ) ) ; } else { // TODO : recreate format anyway . it is illegal to use clock rate as frame rate
( ( VideoFormat ) rtpFormat . getFormat ( ) ) . setName ( name ) ; ( ( VideoFormat ) rtpFormat . getFormat ( ) ) . setFrameRate ( clockRate ) ; } return rtpFormat ;
|
public class LogRepositorySubManagerImpl { /** * Configures constrain parameters of the repository .
* @ param maxRepositorySize maximum in bytes of the total sum of repository file sizes the manager should maintain .
* This limit is ignored if < code > maxRepositorySize < / code > is less than or equal to zero . */
public synchronized void configure ( long maxRepositorySize ) { } }
|
this . maxLogFileSize = LogRepositoryManagerImpl . calculateFileSplit ( maxRepositorySize ) ; ivMaxListSize = ( maxRepositorySize < 0 ) ? - 1 : maxRepositorySize / maxLogFileSize ; if ( debugLogger . isLoggable ( Level . FINE ) && LogRepositoryBaseImpl . isDebugEnabled ( ) ) { debugLogger . logp ( Level . FINE , thisClass , "configure" , "inMax: " + maxRepositorySize + " outMax: " + this . maxLogFileSize + " Tp: " + managedType + " MaxFilesInList: " + ivMaxListSize ) ; }
|
public class AVFlymePushMessageReceiver { /** * 处理设备注册事件 */
@ Override public void onRegisterStatus ( Context context , com . meizu . cloud . pushsdk . platform . message . RegisterStatus registerStatus ) { } }
|
// 调用新版订阅PushManager . register ( context , appId , appKey ) 回调
if ( null == context || null == registerStatus ) { return ; } LOGGER . d ( "register successed, pushId=" + registerStatus . getPushId ( ) ) ; String pushId = registerStatus . getPushId ( ) ; if ( ! StringUtil . isEmpty ( pushId ) ) { updateAVInstallation ( pushId ) ; }
|
public class PasswordHandler { /** * Replaces all configured elements with a - - - replaced - - - string .
* @ param event the event */
public void handleEvent ( Event event ) { } }
|
LOG . fine ( "PasswordHandler called" ) ; if ( tagnames == null || tagnames . size ( ) == 0 ) { LOG . warning ( "Password filter is active but there is no filter tagname configured!" ) ; } if ( tagnames != null && event . getContent ( ) != null && event . getContent ( ) . length ( ) > 0 ) { LOG . fine ( "Content before: " + event . getContent ( ) ) ; for ( String tagname : tagnames ) { event . setContent ( event . getContent ( ) . replaceAll ( "<([^>]*)" + tagname + "([^>]*)>([^<]*)<([^>]*)/([^>]*)" + tagname + "([^>]*)>" , REPLACE ) ) ; event . setContent ( event . getContent ( ) . replaceAll ( "<([^>]*)" + tagname + "([^>]*)/([^>]*)>" , REPLACE ) ) ; } LOG . fine ( "Content after: " + event . getContent ( ) ) ; }
|
public class Flowable { /** * Returns a Flowable that emits the items in a specified { @ link Publisher } before it begins to emit
* items emitted by the source Publisher .
* < img width = " 640 " height = " 315 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / startWith . o . png " alt = " " >
* < dl >
* < dt > < b > Backpressure : < / b > < / dt >
* < dd > The operator honors backpressure from downstream . Both this and the { @ code other } { @ code Publisher } s
* are expected to honor backpressure as well . If any of then violates this rule , it < em > may < / em > throw an
* { @ code IllegalStateException } when the source { @ code Publisher } completes . < / dd >
* < dt > < b > Scheduler : < / b > < / dt >
* < dd > { @ code startWith } does not operate by default on a particular { @ link Scheduler } . < / dd >
* < / dl >
* @ param other
* a Publisher that contains the items you want the modified Publisher to emit first
* @ return a Flowable that emits the items in the specified { @ link Publisher } and then emits the items
* emitted by the source Publisher
* @ see < a href = " http : / / reactivex . io / documentation / operators / startwith . html " > ReactiveX operators documentation : StartWith < / a > */
@ SuppressWarnings ( "unchecked" ) @ CheckReturnValue @ BackpressureSupport ( BackpressureKind . FULL ) @ SchedulerSupport ( SchedulerSupport . NONE ) public final Flowable < T > startWith ( Publisher < ? extends T > other ) { } }
|
ObjectHelper . requireNonNull ( other , "other is null" ) ; return concatArray ( other , this ) ;
|
public class HdfsFileStatus { /** * Convert an HdfsFileStatus to a FileStatus
* @ param stat an HdfsFileStatus
* @ param src parent path in string representation
* @ return a FileStatus object */
public static FileStatus toFileStatus ( HdfsFileStatus stat , String src ) { } }
|
if ( stat == null ) { return null ; } return new FileStatus ( stat . getLen ( ) , stat . isDir ( ) , stat . getReplication ( ) , stat . getBlockSize ( ) , stat . getModificationTime ( ) , stat . getAccessTime ( ) , stat . getPermission ( ) , stat . getOwner ( ) , stat . getGroup ( ) , stat . getFullPath ( new Path ( src ) ) ) ; // full path
|
public class ConfigurationImpl { /** * Depending upon the command line options provided by the user , set
* configure the output generation environment .
* @ param options The array of option names and values . */
@ Override public void setSpecificDocletOptions ( String [ ] [ ] options ) { } }
|
for ( int oi = 0 ; oi < options . length ; ++ oi ) { String [ ] os = options [ oi ] ; String opt = StringUtils . toLowerCase ( os [ 0 ] ) ; if ( opt . equals ( "-footer" ) ) { footer = os [ 1 ] ; } else if ( opt . equals ( "-header" ) ) { header = os [ 1 ] ; } else if ( opt . equals ( "-packagesheader" ) ) { packagesheader = os [ 1 ] ; } else if ( opt . equals ( "-doctitle" ) ) { doctitle = os [ 1 ] ; } else if ( opt . equals ( "-windowtitle" ) ) { windowtitle = os [ 1 ] . replaceAll ( "\\<.*?>" , "" ) ; } else if ( opt . equals ( "-top" ) ) { top = os [ 1 ] ; } else if ( opt . equals ( "-bottom" ) ) { bottom = os [ 1 ] ; } else if ( opt . equals ( "-helpfile" ) ) { helpfile = os [ 1 ] ; } else if ( opt . equals ( "-stylesheetfile" ) ) { stylesheetfile = os [ 1 ] ; } else if ( opt . equals ( "-charset" ) ) { charset = os [ 1 ] ; } else if ( opt . equals ( "-xdocrootparent" ) ) { docrootparent = os [ 1 ] ; } else if ( opt . equals ( "-nohelp" ) ) { nohelp = true ; } else if ( opt . equals ( "-splitindex" ) ) { splitindex = true ; } else if ( opt . equals ( "-noindex" ) ) { createindex = false ; } else if ( opt . equals ( "-use" ) ) { classuse = true ; } else if ( opt . equals ( "-notree" ) ) { createtree = false ; } else if ( opt . equals ( "-nodeprecatedlist" ) ) { nodeprecatedlist = true ; } else if ( opt . equals ( "-nonavbar" ) ) { nonavbar = true ; } else if ( opt . equals ( "-nooverview" ) ) { nooverview = true ; } else if ( opt . equals ( "-overview" ) ) { overview = true ; } else if ( opt . equals ( "-xdoclint" ) ) { doclintOpts . add ( null ) ; } else if ( opt . startsWith ( "-xdoclint:" ) ) { doclintOpts . add ( opt . substring ( opt . indexOf ( ":" ) + 1 ) ) ; } else if ( opt . equals ( "--allow-script-in-comments" ) ) { allowScriptInComments = true ; } } if ( root . specifiedClasses ( ) . length > 0 ) { Map < String , PackageDoc > map = new HashMap < String , PackageDoc > ( ) ; PackageDoc pd ; ClassDoc [ ] classes = root . classes ( ) ; for ( int i = 0 ; i < classes . length ; i ++ ) { pd = classes [ i ] . containingPackage ( ) ; if ( ! map . containsKey ( pd . name ( ) ) ) { map . put ( pd . name ( ) , pd ) ; } } } setCreateOverview ( ) ; setTopFile ( root ) ; if ( root instanceof RootDocImpl ) { ( ( RootDocImpl ) root ) . initDocLint ( doclintOpts , tagletManager . getCustomTagNames ( ) ) ; JavaScriptScanner jss = ( ( RootDocImpl ) root ) . initJavaScriptScanner ( isAllowScriptInComments ( ) ) ; if ( jss != null ) { // In a more object - oriented world , this would be done by methods on the Option objects .
// Note that - windowtitle silently removes any and all HTML elements , and so does not need
// to be handled here .
checkJavaScript ( jss , "-header" , header ) ; checkJavaScript ( jss , "-footer" , footer ) ; checkJavaScript ( jss , "-top" , top ) ; checkJavaScript ( jss , "-bottom" , bottom ) ; checkJavaScript ( jss , "-doctitle" , doctitle ) ; checkJavaScript ( jss , "-packagesheader" , packagesheader ) ; } }
|
public class Promises { /** * Returns a { @ link BiPredicate } which checks if
* { @ code Promise } wasn ' t completed exceptionally . */
@ Contract ( value = " -> new" , pure = true ) @ NotNull public static < T > BiPredicate < T , Throwable > isResult ( ) { } }
|
return ( $ , e ) -> e == null ;
|
public class Assert { /** * Asserts that an argument is valid .
* The assertion holds if and only if { @ code valid } is { @ literal true } .
* For example , application code might assert that :
* < pre >
* < code >
* Assert . argument ( age & gt ; = 21 , " Person must be 21 years of age to enter " ) ;
* < / code >
* < / pre >
* @ param valid { @ link Boolean } value resulting from the evaluation of the criteria used by the application
* to determine the validity of the argument .
* @ param message { @ link String } containing the message for the { @ link IllegalArgumentException } thrown
* if the assertion fails .
* @ param arguments array of { @ link Object arguments } used as placeholder values
* when formatting the { @ link String message } .
* @ throws java . lang . IllegalArgumentException if the argument is invalid .
* @ see # argument ( Boolean , RuntimeException ) */
public static void argument ( Boolean valid , String message , Object ... arguments ) { } }
|
argument ( valid , new IllegalArgumentException ( format ( message , arguments ) ) ) ;
|
public class DerbyDdlParser { /** * { @ inheritDoc }
* @ see org . modeshape . sequencer . ddl . StandardDdlParser # parseAlterTableStatement ( org . modeshape . sequencer . ddl . DdlTokenStream ,
* org . modeshape . sequencer . ddl . node . AstNode ) */
@ Override protected AstNode parseAlterTableStatement ( DdlTokenStream tokens , AstNode parentNode ) throws ParsingException { } }
|
assert tokens != null ; assert parentNode != null ; markStartOfStatement ( tokens ) ; // ALTER TABLE table - Name
// ADD COLUMN column - definition |
// ADD CONSTRAINT clause |
// DROP [ COLUMN ] column - name [ CASCADE | RESTRICT ] |
// DROP { PRIMARY KEY | FOREIGN KEY constraint - name | UNIQUE constraint - name | CHECK constraint - name | CONSTRAINT
// constraint - name } |
// ALTER [ COLUMN ] column - alteration |
// LOCKSIZE { ROW | TABLE }
tokens . consume ( ALTER , TABLE ) ; // consumes ' ALTER TABLE '
String tableName = parseName ( tokens ) ; AstNode alterTableNode = nodeFactory ( ) . node ( tableName , parentNode , TYPE_ALTER_TABLE_STATEMENT ) ; // System . out . println ( " > > PARSIN ALTER STATEMENT > > TABLE Name = " + tableName ) ;
if ( tokens . canConsume ( "ADD" ) ) { if ( isTableConstraint ( tokens ) ) { parseTableConstraint ( tokens , alterTableNode , true ) ; } else { // This segment can also be enclosed in " ( ) " brackets to handle multiple ColumnDefinition ADDs
if ( tokens . matches ( L_PAREN ) ) { parseColumns ( tokens , alterTableNode , true ) ; } else { parseSingleTerminatedColumnDefinition ( tokens , alterTableNode , true ) ; } } } else if ( tokens . canConsume ( "DROP" ) ) { // DROP { PRIMARY KEY | FOREIGN KEY constraint - name | UNIQUE constraint - name | CHECK constraint - name | CONSTRAINT
// constraint - name }
if ( tokens . canConsume ( "PRIMARY" , "KEY" ) ) { String name = parseName ( tokens ) ; // constraint name
nodeFactory ( ) . node ( name , alterTableNode , TYPE_DROP_TABLE_CONSTRAINT_DEFINITION ) ; } else if ( tokens . canConsume ( "FOREIGN" , "KEY" ) ) { String name = parseName ( tokens ) ; // constraint name
nodeFactory ( ) . node ( name , alterTableNode , TYPE_DROP_TABLE_CONSTRAINT_DEFINITION ) ; } else if ( tokens . canConsume ( "UNIQUE" ) ) { String name = parseName ( tokens ) ; // constraint name
nodeFactory ( ) . node ( name , alterTableNode , TYPE_DROP_TABLE_CONSTRAINT_DEFINITION ) ; } else if ( tokens . canConsume ( "CHECK" ) ) { String name = parseName ( tokens ) ; // constraint name
nodeFactory ( ) . node ( name , alterTableNode , TYPE_DROP_TABLE_CONSTRAINT_DEFINITION ) ; } else if ( tokens . canConsume ( "CONSTRAINT" ) ) { String name = parseName ( tokens ) ; // constraint name
nodeFactory ( ) . node ( name , alterTableNode , TYPE_DROP_TABLE_CONSTRAINT_DEFINITION ) ; } else { // DROP [ COLUMN ] column - name [ CASCADE | RESTRICT ]
tokens . canConsume ( "COLUMN" ) ; // " COLUMN " is optional
String columnName = parseName ( tokens ) ; AstNode columnNode = nodeFactory ( ) . node ( columnName , alterTableNode , TYPE_DROP_COLUMN_DEFINITION ) ; if ( tokens . canConsume ( DropBehavior . CASCADE ) ) { columnNode . setProperty ( StandardDdlLexicon . DROP_BEHAVIOR , DropBehavior . CASCADE ) ; } else if ( tokens . canConsume ( DropBehavior . RESTRICT ) ) { columnNode . setProperty ( StandardDdlLexicon . DROP_BEHAVIOR , DropBehavior . RESTRICT ) ; } } } else if ( tokens . canConsume ( "ALTER" ) ) { // column - alteration
// ALTER [ COLUMN ] column - Name SET DATA TYPE VARCHAR ( integer ) |
// ALTER [ COLUMN ] column - Name SET DATA TYPE VARCHAR FOR BIT DATA ( integer ) |
// ALTER [ COLUMN ] column - name SET INCREMENT BY integer - constant |
// ALTER [ COLUMN ] column - name RESTART WITH integer - constant |
// ALTER [ COLUMN ] column - name [ NOT ] NULL |
// ALTER [ COLUMN ] column - name [ WITH | SET ] DEFAULT default - value |
// ALTER [ COLUMN ] column - name DROP DEFAULT
tokens . canConsume ( "COLUMN" ) ; String alterColumnName = parseName ( tokens ) ; AstNode columnNode = nodeFactory ( ) . node ( alterColumnName , alterTableNode , TYPE_ALTER_COLUMN_DEFINITION ) ; if ( tokens . matches ( "DEFAULT" ) ) { parseDefaultClause ( tokens , columnNode ) ; } else if ( tokens . canConsume ( "SET" ) ) { if ( tokens . canConsume ( "DATA" , "TYPE" ) ) { DataType datatype = getDatatypeParser ( ) . parse ( tokens ) ; columnNode . setProperty ( StandardDdlLexicon . DATATYPE_NAME , datatype . getName ( ) ) ; if ( datatype . getLength ( ) >= 0 ) { columnNode . setProperty ( StandardDdlLexicon . DATATYPE_LENGTH , datatype . getLength ( ) ) ; } if ( datatype . getPrecision ( ) >= 0 ) { columnNode . setProperty ( StandardDdlLexicon . DATATYPE_PRECISION , datatype . getPrecision ( ) ) ; } if ( datatype . getScale ( ) >= 0 ) { columnNode . setProperty ( StandardDdlLexicon . DATATYPE_SCALE , datatype . getScale ( ) ) ; } } else if ( tokens . canConsume ( "INCREMENT" ) ) { tokens . consume ( "BY" , TokenStream . ANY_VALUE ) ; } if ( tokens . matches ( "DEFAULT" ) ) { parseDefaultClause ( tokens , columnNode ) ; } } else if ( tokens . canConsume ( "WITH" ) ) { parseDefaultClause ( tokens , columnNode ) ; } else { tokens . canConsume ( "RESTART" , "WITH" , TokenStream . ANY_VALUE ) ; tokens . canConsume ( "DROP" , "DEFAULT" ) ; if ( tokens . canConsume ( "NOT" , "NULL" ) ) { columnNode . setProperty ( StandardDdlLexicon . NULLABLE , "NOT NULL" ) ; } else if ( tokens . canConsume ( "NULL" ) ) { columnNode . setProperty ( StandardDdlLexicon . NULLABLE , "NULL" ) ; } } } else if ( tokens . canConsume ( "LOCKSIZE" ) ) { tokens . canConsume ( "ROWS" ) ; tokens . canConsume ( "TABLE" ) ; } markEndOfStatement ( tokens , alterTableNode ) ; return alterTableNode ;
|
public class SchemasInner { /** * Gets an integration account schema .
* @ param resourceGroupName The resource group name .
* @ param integrationAccountName The integration account name .
* @ param schemaName The integration account schema name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the IntegrationAccountSchemaInner object */
public Observable < IntegrationAccountSchemaInner > getAsync ( String resourceGroupName , String integrationAccountName , String schemaName ) { } }
|
return getWithServiceResponseAsync ( resourceGroupName , integrationAccountName , schemaName ) . map ( new Func1 < ServiceResponse < IntegrationAccountSchemaInner > , IntegrationAccountSchemaInner > ( ) { @ Override public IntegrationAccountSchemaInner call ( ServiceResponse < IntegrationAccountSchemaInner > response ) { return response . body ( ) ; } } ) ;
|
public class KeyVaultClientCustomImpl { /** * Gets the public part of a stored key . The get key operation is applicable to
* all key types . If the requested key is symmetric , then no key material is
* released in the response . Authorization : Requires the keys / get permission .
* @ param vaultBaseUrl
* The vault name , e . g . https : / / myvault . vault . azure . net
* @ param keyName
* The name of the key
* @ param serviceCallback
* the async ServiceCallback to handle successful and failed
* responses .
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < KeyBundle > getKeyAsync ( String vaultBaseUrl , String keyName , final ServiceCallback < KeyBundle > serviceCallback ) { } }
|
return getKeyAsync ( vaultBaseUrl , keyName , "" , serviceCallback ) ;
|
public class TabbedPane { /** * Must be called when you want to update tab title . If tab is dirty an ' * ' is added before title . This is called automatically
* if using { @ link Tab # setDirty ( boolean ) }
* @ param tab that title will be updated */
public void updateTabTitle ( Tab tab ) { } }
|
TabButtonTable table = tabsButtonMap . get ( tab ) ; if ( table == null ) { throwNotBelongingTabException ( tab ) ; } table . button . setText ( getTabTitle ( tab ) ) ;
|
public class CmsSiteManagerImpl { /** * Returns the current site for the provided OpenCms user context object . < p >
* In the unlikely case that no site matches with the provided OpenCms user context ,
* the default site is returned . < p >
* @ param cms the OpenCms user context object to check for the site
* @ return the current site for the provided OpenCms user context object */
public CmsSite getCurrentSite ( CmsObject cms ) { } }
|
CmsSite site = getSiteForSiteRoot ( cms . getRequestContext ( ) . getSiteRoot ( ) ) ; return ( site == null ) ? m_defaultSite : site ;
|
public class VersionInfo { /** * Returns an instance of VersionInfo with the argument version .
* @ param version version String in the format of " major . minor . milli . micro "
* or " major . minor . milli " or " major . minor " or " major " ,
* where major , minor , milli , micro are non - negative numbers
* & lt ; = 255 . If the trailing version numbers are
* not specified they are taken as 0s . E . g . Version " 3.1 " is
* equivalent to " 3.1.0.0 " .
* @ return an instance of VersionInfo with the argument version .
* @ exception IllegalArgumentException when the argument version
* is not in the right format */
public static VersionInfo getInstance ( String version ) { } }
|
int length = version . length ( ) ; int array [ ] = { 0 , 0 , 0 , 0 } ; int count = 0 ; int index = 0 ; while ( count < 4 && index < length ) { char c = version . charAt ( index ) ; if ( c == '.' ) { count ++ ; } else { c -= '0' ; if ( c < 0 || c > 9 ) { throw new IllegalArgumentException ( INVALID_VERSION_NUMBER_ ) ; } array [ count ] *= 10 ; array [ count ] += c ; } index ++ ; } if ( index != length ) { throw new IllegalArgumentException ( "Invalid version number: String '" + version + "' exceeds version format" ) ; } for ( int i = 0 ; i < 4 ; i ++ ) { if ( array [ i ] < 0 || array [ i ] > 255 ) { throw new IllegalArgumentException ( INVALID_VERSION_NUMBER_ ) ; } } return getInstance ( array [ 0 ] , array [ 1 ] , array [ 2 ] , array [ 3 ] ) ;
|
public class HeaderRegexCondition { /** * { @ inheritDoc } */
@ Override public final boolean checkCondition ( final SipServletRequest initialRequest , final DefaultSipApplicationRouterInfo info ) { } }
|
boolean enabled = true ; for ( String hAux : info . getHeaderPatternMap ( ) . keySet ( ) ) { String headerValue = initialRequest . getHeader ( hAux ) ; // Pattern is ThreadSafe as doc by Java doc
// Matcher is not threadsafe , but a new one is created every time .
// Anyway if performance is degraded , Threadlocal / pool may be used
Pattern headerPattern = info . getHeaderPatternMap ( ) . get ( hAux ) ; Matcher matcher = headerPattern . matcher ( headerValue ) ; enabled = enabled && matcher . find ( ) ; } return enabled ;
|
public class EntityHelper { /** * 通过反射设置MappedStatement的keyProperties字段值
* @ param pkColumns 所有的主键字段
* @ param ms MappedStatement */
public static void setKeyProperties ( Set < EntityColumn > pkColumns , MappedStatement ms ) { } }
|
if ( pkColumns == null || pkColumns . isEmpty ( ) ) { return ; } List < String > keyProperties = new ArrayList < String > ( pkColumns . size ( ) ) ; for ( EntityColumn column : pkColumns ) { keyProperties . add ( column . getProperty ( ) ) ; } MetaObjectUtil . forObject ( ms ) . setValue ( "keyProperties" , keyProperties . toArray ( new String [ ] { } ) ) ;
|
public class ApplicationMonitor { /** * adds an application ' s information to the update monitor */
@ FFDCIgnore ( value = UnableToAdaptException . class ) public void addApplication ( ApplicationInstallInfo installInfo ) { } }
|
// . . . and now create the new . . . start by asking the handler what needs monitoring
final Collection < Notification > notificationsToMonitor ; final boolean listenForRootStructuralChanges ; ApplicationMonitoringInformation ami = installInfo . getApplicationMonitoringInformation ( ) ; if ( ami != null ) { notificationsToMonitor = ami . getNotificationsToMonitor ( ) ; listenForRootStructuralChanges = ami . isListeningForRootStructuralChanges ( ) ; } else { notificationsToMonitor = null ; listenForRootStructuralChanges = true ; } try { // Now create the listeners for these notifications
ApplicationListeners listeners = new ApplicationListeners ( installInfo . getUpdateHandler ( ) , _executorService ) ; /* * Go through all of the notifications to monitor and create a listener for it . Note we also always create a different type of listener for root so if the app is
* deleted we
* know about it . Optionally ( based on the handler ) this might also monitor for files being added or removed from the root to trigger an update , i . e . if a WAR gets
* added to
* the root of an EAR . */
if ( notificationsToMonitor != null ) { for ( Notification notificationToMonitor : notificationsToMonitor ) { ApplicationListener listener = new ApplicationListener ( notificationToMonitor , listeners , installInfo ) ; listeners . addListener ( listener ) ; } // If the handler did request monitoring then we still need a non - recursive handler to check root
listeners . addListener ( new RootApplicationListener ( installInfo . getContainer ( ) , listenForRootStructuralChanges , listeners ) ) ; } else { /* * If the handler didn ' t give us any information about what to monitor then monitor the whole application , note we use another type of listener again here that will
* monitor for root deletions or updates to any part of the application */
listeners . addListener ( new CompleteApplicationListener ( installInfo . getContainer ( ) , listeners ) ) ; } ApplicationListeners old = _appListeners . put ( installInfo . getPid ( ) , listeners ) ; if ( old != null ) { old . stopListeners ( true ) ; } // If we ' re actively scanning , start the new listener
ApplicationMonitorConfig config = _config . get ( ) ; if ( config . getUpdateTrigger ( ) != UpdateTrigger . DISABLED ) { listeners . startListeners ( config . getPollingRate ( ) , config . getUpdateTrigger ( ) == UpdateTrigger . MBEAN ) ; } } catch ( UnableToAdaptException e ) { // Ignore , we just won ' t monitor this application but do put out a warning message
AppMessageHelper . get ( installInfo . getHandler ( ) ) . warning ( "APPLICATION_MONITORING_FAIL" , installInfo . getName ( ) ) ; }
|
public class ViewPoolProcessor { /** * Clear all transient components not created by facelets algorithm . In this way ,
* we ensure the component tree does not have any changes done after markInitialState .
* @ param context
* @ param component */
private void clearTransientAndNonFaceletComponents ( final FacesContext context , final UIComponent component ) { } }
|
// Scan children
int childCount = component . getChildCount ( ) ; if ( childCount > 0 ) { for ( int i = 0 ; i < childCount ; i ++ ) { UIComponent child = component . getChildren ( ) . get ( i ) ; if ( child != null && child . isTransient ( ) && child . getAttributes ( ) . get ( ComponentSupport . MARK_CREATED ) == null ) { component . getChildren ( ) . remove ( i ) ; i -- ; childCount -- ; } else { if ( child . getChildCount ( ) > 0 || ! child . getFacets ( ) . isEmpty ( ) ) { clearTransientAndNonFaceletComponents ( context , child ) ; } } } } // Scan facets
if ( component . getFacetCount ( ) > 0 ) { Map < String , UIComponent > facets = component . getFacets ( ) ; for ( Iterator < UIComponent > itr = facets . values ( ) . iterator ( ) ; itr . hasNext ( ) ; ) { UIComponent fc = itr . next ( ) ; if ( fc != null && fc . isTransient ( ) && fc . getAttributes ( ) . get ( ComponentSupport . MARK_CREATED ) == null ) { itr . remove ( ) ; } else { if ( fc . getChildCount ( ) > 0 || ! fc . getFacets ( ) . isEmpty ( ) ) { clearTransientAndNonFaceletComponents ( context , fc ) ; } } } }
|
public class SpringAndroidObjectPersister { @ Override protected T readCacheDataFromFile ( File file ) throws CacheLoadingException { } }
|
try { String resultJson = null ; synchronized ( file . getAbsolutePath ( ) . intern ( ) ) { resultJson = FileUtils . readFileToString ( file , CharEncoding . UTF_8 ) ; } if ( ! StringUtils . isEmpty ( resultJson ) ) { T result = deserializeData ( resultJson ) ; return result ; } throw new CacheLoadingException ( "Unable to restore cache content : cache file is empty" ) ; } catch ( FileNotFoundException e ) { // Should not occur ( we test before if file exists )
// Do not throw , file is not cached
Ln . w ( "file " + file . getAbsolutePath ( ) + " does not exists" , e ) ; return null ; } catch ( CacheLoadingException e ) { throw e ; } catch ( Exception e ) { throw new CacheLoadingException ( e ) ; }
|
public class AdroitList { private boolean checkEquality ( E it , Object o ) { } }
|
return o . equals ( it ) || ( comparator != null && comparator . compare ( it , ( E ) o ) == 0 ) ;
|
public class InvitationMarshaller { /** * Marshall the given parameter object . */
public void marshall ( Invitation invitation , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( invitation == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( invitation . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( invitation . getInvitationId ( ) , INVITATIONID_BINDING ) ; protocolMarshaller . marshall ( invitation . getInvitedAt ( ) , INVITEDAT_BINDING ) ; protocolMarshaller . marshall ( invitation . getRelationshipStatus ( ) , RELATIONSHIPSTATUS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class DependencyFinder { /** * Resolves the specified artifact ( using its GAV , classifier and packaging ) .
* @ param mojo the mojo
* @ param groupId the groupId of the artifact to resolve
* @ param artifactId the artifactId of the artifact to resolve
* @ param version the version
* @ param type the type
* @ param classifier the classifier
* @ return the artifact ' s file if it can be revolved . The file is located in the local maven repository .
* @ throws MojoExecutionException if the artifact cannot be resolved */
public static File resolve ( AbstractWisdomMojo mojo , String groupId , String artifactId , String version , String type , String classifier ) throws MojoExecutionException { } }
|
ArtifactRequest request = new ArtifactRequest ( ) ; request . setArtifact ( new DefaultArtifact ( groupId , artifactId , classifier , type , version ) ) ; request . setRepositories ( mojo . remoteRepos ) ; mojo . getLog ( ) . info ( "Resolving artifact " + artifactId + " from " + mojo . remoteRepos ) ; ArtifactResult result ; try { result = mojo . repoSystem . resolveArtifact ( mojo . repoSession , request ) ; } catch ( ArtifactResolutionException e ) { mojo . getLog ( ) . error ( "Cannot resolve " + groupId + ":" + artifactId + ":" + version + ":" + type ) ; throw new MojoExecutionException ( e . getMessage ( ) , e ) ; } mojo . getLog ( ) . info ( "Resolved artifact " + artifactId + " to " + result . getArtifact ( ) . getFile ( ) + " from " + result . getRepository ( ) ) ; return result . getArtifact ( ) . getFile ( ) ;
|
public class ProxyImpl { /** * ( non - Javadoc )
* @ see javax . servlet . sip . Proxy # cancel ( java . lang . String [ ] , int [ ] , java . lang . String [ ] ) */
public void cancel ( String [ ] protocol , int [ ] reasonCode , String [ ] reasonText ) { } }
|
if ( ackReceived ) throw new IllegalStateException ( "There has been an ACK received. Can not cancel more brnaches, the INVITE tx has finished." ) ; cancelAllExcept ( null , protocol , reasonCode , reasonText , true ) ;
|
public class JEEMetadataContextImpl { /** * Serialize the given object .
* @ param outStream The stream to write the serialized data .
* @ throws IOException */
private void writeObject ( ObjectOutputStream outStream ) throws IOException { } }
|
PutField fields = outStream . putFields ( ) ; fields . put ( BEGIN_DEFAULT , beginDefaultContext ) ; outStream . writeFields ( ) ;
|
public class TileUtil { /** * Calculate the screen size of a tile . Normally the screen size is expressed in pixels and should therefore be
* integers , but for the sake of accuracy we try to keep a double value as long as possible .
* @ param worldSize
* The width and height of a tile in the layer ' s world coordinate system .
* @ param scale
* The current client side scale .
* @ return Returns an array of double values where the first value is the tile screen width and the second value is
* the tile screen height . */
public static int [ ] getTileScreenSize ( double [ ] worldSize , double scale ) { } }
|
int screenWidth = ( int ) Math . round ( scale * worldSize [ 0 ] ) ; int screenHeight = ( int ) Math . round ( scale * worldSize [ 1 ] ) ; return new int [ ] { screenWidth , screenHeight } ;
|
public class Sources { /** * Convert { @ link InputStream } to { @ link BufferedReader }
* @ param is
* @ return */
public static BufferedReader asReader ( InputStream is , Charset charset ) { } }
|
try { return new BufferedReader ( new InputStreamReader ( is , charset . toString ( ) ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } return null ;
|
public class PreJava9DateFormatProvider { /** * Returns the same DateFormat as { @ code DateFormat . getDateTimeInstance ( dateStyle , timeStyle , Locale . US ) }
* in Java 8 or below . */
public static DateFormat getUSDateTimeFormat ( int dateStyle , int timeStyle ) { } }
|
String pattern = getDatePartOfDateTimePattern ( dateStyle ) + " " + getTimePartOfDateTimePattern ( timeStyle ) ; return new SimpleDateFormat ( pattern , Locale . US ) ;
|
public class ExpressionEvaluator { /** * Utility method for ACaseAlternative
* @ param node
* @ param val
* @ param ctxt
* @ return
* @ throws AnalysisException */
public Value eval ( ACaseAlternative node , Value val , Context ctxt ) throws AnalysisException { } }
|
Context evalContext = new Context ( ctxt . assistantFactory , node . getLocation ( ) , "case alternative" , ctxt ) ; try { evalContext . putList ( ctxt . assistantFactory . createPPatternAssistant ( ) . getNamedValues ( node . getPattern ( ) , val , ctxt ) ) ; return node . getResult ( ) . apply ( VdmRuntime . getExpressionEvaluator ( ) , evalContext ) ; } catch ( PatternMatchException e ) { // Silently fail ( CasesExpression will try the others )
} return null ;
|
public class HttpChannelConfig { /** * Check the input configuration for the default flag on whether to use
* persistent connections or not . If this is false , then the other related
* configuration values will be ignored ( such as MaxKeepAliveRequests ) .
* @ param props */
private void parseKeepAliveEnabled ( Map < Object , Object > props ) { } }
|
boolean flag = this . bKeepAliveEnabled ; Object value = props . get ( HttpConfigConstants . PROPNAME_KEEPALIVE_ENABLED ) ; if ( null != value ) { flag = convertBoolean ( value ) ; } this . bKeepAliveEnabled = flag ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Config: KeepAliveEnabled is " + isKeepAliveEnabled ( ) ) ; }
|
public class LabeledLabelPanel { /** * Factory method for creating the new { @ link Label } . This method is invoked in the constructor
* from the derived classes and can be overridden so users can provide their own version of a
* new { @ link Label } .
* @ param id
* the id
* @ param model
* the model
* @ return the new { @ link Label } */
protected Label newViewableLabel ( final String id , final IModel < T > model ) { } }
|
final PropertyModel < T > viewableLabelModel = new PropertyModel < > ( model . getObject ( ) , this . getId ( ) ) ; return ComponentFactory . newLabel ( id , viewableLabelModel ) ;
|
public class DocumentElement { /** * setter for ElementId - sets
* @ generated
* @ param v value to set into the feature */
public void setElementId ( int v ) { } }
|
if ( DocumentElement_Type . featOkTst && ( ( DocumentElement_Type ) jcasType ) . casFeat_ElementId == null ) jcasType . jcas . throwFeatMissing ( "ElementId" , "ch.epfl.bbp.uima.types.DocumentElement" ) ; jcasType . ll_cas . ll_setIntValue ( addr , ( ( DocumentElement_Type ) jcasType ) . casFeatCode_ElementId , v ) ;
|
public class ColumnMajorSparseMatrix { /** * Creates a random { @ link ColumnMajorSparseMatrix } of the given shape :
* { @ code rows } x { @ code columns } . */
public static ColumnMajorSparseMatrix random ( int rows , int columns , double density , Random random ) { } }
|
return CCSMatrix . random ( rows , columns , density , random ) ;
|
public class DecoratorRegistryImpl { /** * { @ inheritDoc } */
@ Override public void addDecorator ( Class < ? extends AbstractStats > clazz , IDecorator decorator ) { } }
|
addDecorator ( clazz . getName ( ) , decorator ) ;
|
public class MonomerStoreConfiguration { /** * Refreshes the configuration using the local properties file . */
public void refresh ( ) { } }
|
File configFile = new File ( CONFIG_FILE_PATH ) ; if ( ! configFile . exists ( ) ) { BufferedWriter writer = null ; BufferedReader reader = null ; try { configFile . createNewFile ( ) ; InputStream in = Chemistry . class . getResourceAsStream ( "/org/helm/notation2/resources/MonomerStoreConfig.properties" ) ; reader = new BufferedReader ( new InputStreamReader ( in ) ) ; writer = new BufferedWriter ( new FileWriter ( configFile ) ) ; String line ; while ( ( line = reader . readLine ( ) ) != null ) { writer . write ( line + System . getProperty ( "line.separator" ) ) ; } } catch ( Exception e ) { resetConfigToDefault ( ) ; e . printStackTrace ( ) ; } finally { try { if ( writer != null ) { writer . close ( ) ; } if ( reader != null ) { reader . close ( ) ; } } catch ( IOException e ) { // TODO Auto - generated catch block
e . printStackTrace ( ) ; } } } try { PropertiesConfiguration conf = new PropertiesConfiguration ( CONFIG_FILE_PATH ) ; isUseWebservice = conf . getBoolean ( USE_WEBSERVICE ) ; isUpdateAutomatic = conf . getBoolean ( UPDATE_AUTOMATIC ) ; webserviceMonomersURL = conf . getString ( WEBSERVICE_MONOMERS_URL ) ; webserviceMonomersPath = conf . getString ( WEBSERVICE_MONOMERS_PATH ) ; webserviceMonomersPutPath = conf . getString ( WEBSERVICE_MONOMERS_PUT_PATH ) ; webserviceNucleotidesURL = conf . getString ( WEBSERVICE_NUCLEOTIDES_URL ) ; webserviceNucleotidesPath = conf . getString ( WEBSERVICE_NUCLEOTIDES_PATH ) ; webserviceNucleotidesPutPath = conf . getString ( WEBSERVICE_NUCLEOTIDES_PUT_PATH ) ; webserviceEditorCategorizationURL = conf . getString ( WEBSERVICE_EDITOR_CATEGORIZATION_URL ) ; webserviceEditorCategorizationPath = conf . getString ( WEBSERVICE_EDITOR_CATEGORIZATION_PATH ) ; /* load from external xml file */
isUseExternalMonomers = conf . getBoolean ( USE_EXTERNAL_MONOMERS ) ; externalMonomersPath = conf . getString ( EXTERNAL_MONOMERS_PATH ) ; isUseExternalNucleotides = conf . getBoolean ( USE_EXTERNAL_NUCLEOTIDES ) ; externalNucleotidesPath = conf . getString ( EXTERNAL_NUCLEOTIDES_PATH ) ; isUseExternalAttachments = conf . getBoolean ( USE_EXTERNAL_ATTACHMENTS ) ; externalAttachmentsPath = conf . getString ( EXTERNAL_ATTACHMENTS_PATH ) ; } catch ( ConfigurationException | NoSuchElementException e ) { resetConfigToDefault ( ) ; e . printStackTrace ( ) ; }
|
public class LambdaSpringUtil { /** * wires spring into the passed in bean
* @ param o
* @ param myBeanName */
public static void wireInSpring ( Object o , String myBeanName ) { } }
|
// Lambda does not do this for you - though serverless does have a library to do it
if ( ctx == null ) { synchronized ( lck ) { if ( ctx == null ) { LOG . info ( "LamdaSpringUtil CTX is null - initialising spring" ) ; ctx = new ClassPathXmlApplicationContext ( globalRootContextPath ) ; } } } else { LOG . debug ( "LamdaSpringUtil CTX is not null - not initialising spring" ) ; } AutowireCapableBeanFactory factory = ctx . getAutowireCapableBeanFactory ( ) ; factory . autowireBean ( o ) ; factory . initializeBean ( 0 , myBeanName ) ;
|
public class DirectoryLookupService { /** * Query the UP ModelServiceInstance .
* @ param query
* the query criteria .
* @ return
* the ModelServiceInstance list . */
public List < ModelServiceInstance > queryUPModelInstances ( ServiceInstanceQuery query ) { } }
|
List < ModelServiceInstance > upInstances = null ; List < ModelServiceInstance > instances = queryModelInstances ( query ) ; if ( instances != null && instances . size ( ) > 0 ) { for ( ModelServiceInstance instance : instances ) { if ( OperationalStatus . UP . equals ( instance . getStatus ( ) ) ) { if ( upInstances == null ) { upInstances = new ArrayList < ModelServiceInstance > ( ) ; } upInstances . add ( instance ) ; } } } if ( upInstances == null ) { return Collections . emptyList ( ) ; } return upInstances ;
|
public class Authentication { /** * CAUSE : Prefer throwing / catching meaningful exceptions instead of Exception */
protected static boolean saveAuthentication ( URL url , String authenticationToken , boolean authenticationTokenIsPrivate , String applicationKey , int timeToLive , String privateKey , Map < String , LinkedList < ChannelPermissions > > permissions , Proxy proxy ) throws IOException { } }
|
String postBody = String . format ( "AT=%s&AK=%s&PK=%s&TTL=%s&TP=%s&PVT=%s" , authenticationToken , applicationKey , privateKey , timeToLive , permissions . size ( ) , ( authenticationTokenIsPrivate ? "1" : "0" ) ) ; // CAUSE : Inefficient use of keySet iterator instead of entrySet iterator
for ( Map . Entry < String , LinkedList < ChannelPermissions > > channelNamePerms : permissions . entrySet ( ) ) { LinkedList < ChannelPermissions > channelPermissions = channelNamePerms . getValue ( ) ; // CAUSE : Method concatenates strings using + in a loop
// TODO : specify a correct capacity
StringBuilder channelPermissionText = new StringBuilder ( 16 ) ; for ( ChannelPermissions channelPermission : channelPermissions ) { channelPermissionText . append ( channelPermission . getPermission ( ) ) ; } String channelPermission = String . format ( "&%s=%s" , channelNamePerms . getKey ( ) , channelPermissionText ) ; postBody = String . format ( "%s%s" , postBody , channelPermission ) ; } return postSaveAuthentication ( url , postBody , proxy ) ;
|
public class CPOptionValueUtil { /** * Returns the cp option value where CPOptionId = & # 63 ; and key = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache .
* @ param CPOptionId the cp option ID
* @ param key the key
* @ param retrieveFromCache whether to retrieve from the finder cache
* @ return the matching cp option value , or < code > null < / code > if a matching cp option value could not be found */
public static CPOptionValue fetchByC_K ( long CPOptionId , String key , boolean retrieveFromCache ) { } }
|
return getPersistence ( ) . fetchByC_K ( CPOptionId , key , retrieveFromCache ) ;
|
public class JackrabbitContentRepository { /** * Closes the admin session , and in case of local transient respository for unit test , shuts down the repository and
* cleans all temporary files . */
@ Override protected void destroyRepository ( ) { } }
|
final RepositoryImpl repository = ( RepositoryImpl ) getRepository ( ) ; repository . shutdown ( ) ; LOG . info ( "Destroyed repository at {}" , repository . getConfig ( ) . getHomeDir ( ) ) ;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.