signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class HtmlWriter { /** * Generates HTML Output for a { @ link DefinitionList } . */ private static String definitionListToHtml ( DefinitionList dl ) { } }
if ( dl == null ) { return "null" ; } StringBuilder result = new StringBuilder ( ) ; result . append ( "<table class=\"DefinitionList\">\n" + "<tr><th class=\"DefinitionList\">DefinitionList</th></tr>\n" + "<tr><td class=\"DefinitionList\">" ) ; if ( dl . getDefinedTerm ( ) != null ) { result . append ( contentElementToHtml ( dl . getDefinedTerm ( ) ) + "\n" ) ; } result . append ( "<ul>" ) ; for ( ContentElement ce : dl . getDefinitions ( ) ) { result . append ( "<li>" + contentElementToHtml ( ce ) + "</li>" ) ; } result . append ( "</ul>\n" ) ; result . append ( "</td></tr>\n" ) ; result . append ( "</table>\n" ) ; return result . toString ( ) ;
public class PickList { /** * Prohibits client - side manipulation of disabled entries , when CSS style - class ui - state - disabled is removed . See * < a href = " https : / / github . com / primefaces / primefaces / issues / 2127 " > https : / / github . com / primefaces / primefaces / issues / 2127 < / a > * @ param newEntries new / set entries of model source / target list * @ param oldEntries old / former entries of model source / target list */ protected void checkDisabled ( FacesContext facesContext , String label , List < ? > newEntries , List < ? > oldEntries ) { } }
if ( ! isValid ( ) ) { return ; } Map < String , Object > requestMap = facesContext . getExternalContext ( ) . getRequestMap ( ) ; String varName = getVar ( ) ; String clientId = getClientId ( facesContext ) ; Object originalItem = requestMap . get ( varName ) ; for ( int i = 0 ; i < newEntries . size ( ) ; i ++ ) { Object item = newEntries . get ( i ) ; // Set the current item in request map to get its properties via stateHelper ( ) . eval ( ) call requestMap . put ( varName , item ) ; boolean itemDisabled = isItemDisabled ( ) ; // Check if disabled item has been moved from its former / original list if ( itemDisabled && ! oldEntries . contains ( item ) ) { FacesMessage message = MessageFactory . getMessage ( UPDATE_MESSAGE_ID , FacesMessage . SEVERITY_ERROR , new Object [ ] { label } ) ; facesContext . addMessage ( clientId , message ) ; setValid ( false ) ; break ; } } // put the original value back requestMap . put ( varName , originalItem ) ;
public class HmlWriter { /** * Write the specified HML to the specified writer . * @ param data HML to write , must not be null * @ param writer writer to write to , must not be null * @ throws IOException if an I / O error occurs */ public static void write ( final Hml data , final Writer writer ) throws IOException { } }
checkNotNull ( data ) ; checkNotNull ( writer ) ; try { JAXBContext context = JAXBContext . newInstance ( Hml . class ) ; Marshaller marshaller = context . createMarshaller ( ) ; SchemaFactory schemaFactory = SchemaFactory . newInstance ( XMLConstants . W3C_XML_SCHEMA_NS_URI ) ; URL schemaURL = HmlReader . class . getResource ( "/org/nmdp/ngs/hml/xsd/hml-1.0.1.xsd" ) ; Schema schema = schemaFactory . newSchema ( schemaURL ) ; marshaller . setSchema ( schema ) ; marshaller . setProperty ( Marshaller . JAXB_FORMATTED_OUTPUT , true ) ; marshaller . marshal ( data , writer ) ; } catch ( JAXBException | SAXException e ) { e . printStackTrace ( ) ; throw new IOException ( "could not marshal HML" , e ) ; }
public class GVRCameraRig { /** * Sets the rotation and angular velocity data for the camera rig . This * should only be done in response to * { @ link OvrRotationSensorListener # onRotationSensor ( long , float , float , float , float , float , float , float ) * OvrRotationSensorListener . onRotationSensor ( ) } . * @ param timeStamp * Clock - time when the data was received , in nanoseconds . * @ param w * The ' W ' rotation component . * @ param x * The ' X ' rotation component . * @ param y * The ' Y ' rotation component . * @ param z * The ' Z ' rotation component . * @ param gyroX * Angular velocity on the ' X ' axis . * @ param gyroY * Angular velocity on the ' Y ' axis . * @ param gyroZ * Angular velocity on the ' Z ' axis . */ void setRotationSensorData ( long timeStamp , float w , float x , float y , float z , float gyroX , float gyroY , float gyroZ ) { } }
NativeCameraRig . setRotationSensorData ( getNative ( ) , timeStamp , w , x , y , z , gyroX , gyroY , gyroZ ) ;
public class GVRRigidBody { /** * Sets an linear factor [ X , Y , Z ] that influences forces acting on this { @ linkplain GVRRigidBody rigid body } * @ param x factor on the ' X ' axis . * @ param y factor on the ' Y ' axis . * @ param z factor on the ' Z ' axis . */ public void setLinearFactor ( float x , float y , float z ) { } }
Native3DRigidBody . setLinearFactor ( getNative ( ) , x , y , z ) ;
public class RectifyImageOps { /** * Creates an { @ link ImageDistort } for rectifying an image given its rectification matrix . * Lens distortion is assumed to have been previously removed . * @ param rectify Transform for rectifying the image . * @ param imageType Type of single band image the transform is to be applied to . * @ return ImageDistort for rectifying the image . */ public static < T extends ImageGray < T > > ImageDistort < T , T > rectifyImage ( FMatrixRMaj rectify , BorderType borderType , Class < T > imageType ) { } }
boolean skip = borderType == BorderType . SKIP ; if ( skip ) { borderType = BorderType . EXTENDED ; } InterpolatePixelS < T > interp = FactoryInterpolation . bilinearPixelS ( imageType , borderType ) ; FMatrixRMaj rectifyInv = new FMatrixRMaj ( 3 , 3 ) ; CommonOps_FDRM . invert ( rectify , rectifyInv ) ; PointTransformHomography_F32 rectifyTran = new PointTransformHomography_F32 ( rectifyInv ) ; // don ' t bother caching the results since it is likely to only be applied once and is cheap to compute ImageDistort < T , T > ret = FactoryDistort . distortSB ( false , interp , imageType ) ; ret . setRenderAll ( ! skip ) ; ret . setModel ( new PointToPixelTransform_F32 ( rectifyTran ) ) ; return ret ;
public class BlockHouseHolder_DDRB { /** * Computes the inner product of row vector ' rowA ' against row vector ' rowB ' while taking account leading zeros and one . < br > * < br > * ret = a < sup > T < / sup > * b * Row A is assumed to be a householder vector . Element at ' colStartA ' is one and previous elements are zero . * @ param blockLength * @ param A block aligned submatrix . * @ param rowA Row index inside the sub - matrix of first row vector has zeros and ones . . * @ param rowB Row index inside the sub - matrix of second row vector . * @ return dot product of the two vectors . */ public static double innerProdRow ( int blockLength , DSubmatrixD1 A , int rowA , DSubmatrixD1 B , int rowB , int zeroOffset ) { } }
int offset = rowA + zeroOffset ; if ( offset + B . col0 >= B . col1 ) return 0 ; // take in account the one in ' A ' double total = B . get ( rowB , offset ) ; total += VectorOps_DDRB . dot_row ( blockLength , A , rowA , B , rowB , offset + 1 , A . col1 - A . col0 ) ; return total ;
public class TypedPropertyDescriptor { /** * this class maintains its own copy of propertyType . */ public void setReadMethod ( Method getter ) throws IntrospectionException { } }
super . setReadMethod ( getter ) ; if ( _propertyClass == null ) { _propertyClass = super . getPropertyType ( ) ; }
public class ProducerSequenceFactory { /** * bitmap prepare producer - > inputProducer */ private synchronized Producer < CloseableReference < CloseableImage > > getBitmapPrepareSequence ( Producer < CloseableReference < CloseableImage > > inputProducer ) { } }
Producer < CloseableReference < CloseableImage > > bitmapPrepareProducer = mBitmapPrepareSequences . get ( inputProducer ) ; if ( bitmapPrepareProducer == null ) { bitmapPrepareProducer = mProducerFactory . newBitmapPrepareProducer ( inputProducer ) ; mBitmapPrepareSequences . put ( inputProducer , bitmapPrepareProducer ) ; } return bitmapPrepareProducer ;
public class DefaultHttp2LocalFlowController { /** * The window update ratio is used to determine when a window update must be sent . If the ratio * of bytes processed since the last update has meet or exceeded this ratio then a window update will * be sent . This window update ratio will only be applied to { @ code streamId } . * Note it is the responsibly of the caller to ensure that the the * initial { @ code SETTINGS } frame is sent before this is called . It would * be considered a { @ link Http2Error # PROTOCOL _ ERROR } if a { @ code WINDOW _ UPDATE } * was generated by this method before the initial { @ code SETTINGS } frame is sent . * @ param stream the stream for which { @ code ratio } applies to . * @ param ratio the ratio to use when checking if a { @ code WINDOW _ UPDATE } is determined necessary . * @ throws Http2Exception If a protocol - error occurs while generating { @ code WINDOW _ UPDATE } frames */ public void windowUpdateRatio ( Http2Stream stream , float ratio ) throws Http2Exception { } }
assert ctx != null && ctx . executor ( ) . inEventLoop ( ) ; checkValidRatio ( ratio ) ; FlowState state = state ( stream ) ; state . windowUpdateRatio ( ratio ) ; state . writeWindowUpdateIfNeeded ( ) ;
public class XmlUtils { /** * Builds a prettier exception message . * @ param ex the SAXParseException * @ return an easier to read exception message */ public static String getPrettyParseExceptionInfo ( SAXParseException ex ) { } }
final StringBuilder sb = new StringBuilder ( ) ; if ( ex . getSystemId ( ) != null ) { sb . append ( "systemId=" ) . append ( ex . getSystemId ( ) ) . append ( ", " ) ; } if ( ex . getPublicId ( ) != null ) { sb . append ( "publicId=" ) . append ( ex . getPublicId ( ) ) . append ( ", " ) ; } if ( ex . getLineNumber ( ) > 0 ) { sb . append ( "Line=" ) . append ( ex . getLineNumber ( ) ) ; } if ( ex . getColumnNumber ( ) > 0 ) { sb . append ( ", Column=" ) . append ( ex . getColumnNumber ( ) ) ; } sb . append ( ": " ) . append ( ex . getMessage ( ) ) ; return sb . toString ( ) ;
public class SeaGlassTextFieldUI { /** * DOCUMENT ME ! * @ param context DOCUMENT ME ! * @ return DOCUMENT ME ! */ protected TextFieldBorder createTextFieldBorder ( SeaGlassContext context ) { } }
if ( textFieldBorder == null ) { textFieldBorder = new TextFieldBorder ( this , context . getStyle ( ) . getInsets ( context , null ) ) ; } return textFieldBorder ;
public class KProgram { /** * Create a new { @ link KProcess } * @ param context * context linked to the program * @ return a new program . */ public KProcess newProcess ( Object context ) { } }
KProcess process = new KProcess ( this , context ) ; this . init ( process ) ; return process ;
public class ThrowUnchecked { /** * Throws the given exception if it is unchecked or an instance of any of * the given declared types . Otherwise , it is thrown as an * UndeclaredThrowableException . This method only returns normally if the * exception is null . * @ param t exception to throw * @ param declaredTypes if exception is checked and is not an instance of * any of these types , then it is thrown as an * UndeclaredThrowableException . */ public static void fireDeclared ( Throwable t , Class ... declaredTypes ) { } }
if ( t != null ) { if ( declaredTypes != null ) { for ( Class declaredType : declaredTypes ) { if ( declaredType . isInstance ( t ) ) { fire ( t ) ; } } } if ( t instanceof RuntimeException ) { throw ( RuntimeException ) t ; } if ( t instanceof Error ) { throw ( Error ) t ; } throw new UndeclaredThrowableException ( t ) ; }
public class Pattern { /** * Appends a new group pattern to the existing one . The new pattern enforces non - strict * temporal contiguity . This means that a matching event of this pattern and the * preceding matching event might be interleaved with other events which are ignored . * @ param group the pattern to append * @ return A new pattern which is appended to this one */ public GroupPattern < T , F > followedByAny ( Pattern < T , F > group ) { } }
return new GroupPattern < > ( this , group , ConsumingStrategy . SKIP_TILL_ANY , afterMatchSkipStrategy ) ;
public class MinioClient { /** * Removes a bucket . * NOTE : - * All objects ( including all object versions and delete markers ) in the bucket * must be deleted prior , this API will not recursively delete objects * < / p > < b > Example : < / b > < br > * < pre > { @ code minioClient . removeBucket ( " my - bucketname " ) ; * System . out . println ( " my - bucketname is removed successfully " ) ; } < / pre > * @ param bucketName Bucket name . * @ throws InvalidBucketNameException upon invalid bucket name is given * @ throws NoSuchAlgorithmException * upon requested algorithm was not found during signature calculation * @ throws IOException upon connection error * @ throws InvalidKeyException * upon an invalid access key or secret key * @ throws NoResponseException upon no response from server * @ throws XmlPullParserException upon parsing response xml * @ throws ErrorResponseException upon unsuccessful execution * @ throws InternalException upon internal library error * @ throws InvalidArgumentException upon invalid value is passed to a method . * @ throws InsufficientDataException upon getting EOFException while reading given */ public void removeBucket ( String bucketName ) throws InvalidBucketNameException , NoSuchAlgorithmException , InsufficientDataException , IOException , InvalidKeyException , NoResponseException , XmlPullParserException , ErrorResponseException , InternalException { } }
executeDelete ( bucketName , null , null ) ;
public class StringCropper { /** * Return the rest of the string that is cropped the number of chars from * the beginning * @ param srcStr * @ param charCount * @ return */ public String removeHead ( String srcStr , int charCount ) { } }
return getRightOf ( srcStr , srcStr . length ( ) - charCount ) ;
public class ReactionManipulator { /** * Returns a new Reaction object which is the reverse of the given * Reaction . * @ param reaction the reaction being considered * @ return the reverse reaction */ public static IReaction reverse ( IReaction reaction ) { } }
IReaction reversedReaction = reaction . getBuilder ( ) . newInstance ( IReaction . class ) ; if ( reaction . getDirection ( ) == IReaction . Direction . BIDIRECTIONAL ) { reversedReaction . setDirection ( IReaction . Direction . BIDIRECTIONAL ) ; } else if ( reaction . getDirection ( ) == IReaction . Direction . FORWARD ) { reversedReaction . setDirection ( IReaction . Direction . BACKWARD ) ; } else if ( reaction . getDirection ( ) == IReaction . Direction . BACKWARD ) { reversedReaction . setDirection ( IReaction . Direction . FORWARD ) ; } IAtomContainerSet reactants = reaction . getReactants ( ) ; for ( int i = 0 ; i < reactants . getAtomContainerCount ( ) ; i ++ ) { double coefficient = reaction . getReactantCoefficient ( reactants . getAtomContainer ( i ) ) ; reversedReaction . addProduct ( reactants . getAtomContainer ( i ) , coefficient ) ; } IAtomContainerSet products = reaction . getProducts ( ) ; for ( int i = 0 ; i < products . getAtomContainerCount ( ) ; i ++ ) { double coefficient = reaction . getProductCoefficient ( products . getAtomContainer ( i ) ) ; reversedReaction . addReactant ( products . getAtomContainer ( i ) , coefficient ) ; } return reversedReaction ;
public class MetaClassImpl { /** * Create a CallSite */ public CallSite createPogoCallSite ( CallSite site , Object [ ] args ) { } }
if ( ! GroovyCategorySupport . hasCategoryInCurrentThread ( ) && ! ( this instanceof AdaptingMetaClass ) ) { Class [ ] params = MetaClassHelper . convertToTypeArray ( args ) ; CallSite tempSite = site ; if ( site . getName ( ) . equals ( "call" ) && GeneratedClosure . class . isAssignableFrom ( theClass ) ) { // here , we want to point to a method named " doCall " instead of " call " // but we don ' t want to replace the original call site name , otherwise // we loose the fact that the original method name was " call " so instead // we will point to a metamethod called " doCall " // see GROOVY - 5806 for details tempSite = new AbstractCallSite ( site . getArray ( ) , site . getIndex ( ) , "doCall" ) ; } MetaMethod metaMethod = getMethodWithCachingInternal ( theClass , tempSite , params ) ; if ( metaMethod != null ) return PogoMetaMethodSite . createPogoMetaMethodSite ( site , this , metaMethod , params , args ) ; } return new PogoMetaClassSite ( site , this ) ;
public class ChannelUtils { /** * This method will compare the new configuration against a previous configuration * and handle changes between the two . Deleted objects will be removed from * runtime and updates will stop and restart those specific chains . New chains * will be handled as normal . * This is same as startConfig ( Dictionary , boolean ) as to what it returns . * @ param oldconfig * @ param newconfig * @ return Map < String , List < String > > * @ see ChannelUtils # startConfig ( Dictionary , boolean ) */ public static synchronized Map < String , List < String > > startConfig ( Map < String , Object > oldconfig , Map < String , Object > newconfig ) { } }
if ( null == oldconfig ) { // if no old config , then assume this is not a restart situation . If " true " is passed for restart , then // other logic that throws an exception while trying to " unload " started chains needs to be modify not to throw // those exceptions , since sometimes the chains won ' t be loaded in the first place . return startConfig ( newconfig , false ) ; } final boolean bTrace = TraceComponent . isAnyTracingEnabled ( ) ; if ( bTrace && tc . isEventEnabled ( ) ) { Tr . event ( tc , "startConfig(old,new)" ) ; } final ChannelFramework cf = ChannelFrameworkFactory . getChannelFramework ( ) ; Map < String , Map < String , String [ ] > > oldc = extractConfig ( oldconfig ) ; Map < String , Map < String , String [ ] > > newc = extractConfig ( newconfig ) ; // TODO handle groups List < String > runningChains = new LinkedList < String > ( ) ; Map < String , String [ ] > oldlist = oldc . get ( "chains" ) ; Map < String , String [ ] > newlist = newc . get ( "chains" ) ; List < String > chainsToDelete = new LinkedList < String > ( ) ; List < String > chainsToStop = new LinkedList < String > ( ) ; Map < String , String [ ] > chainsToStart = new HashMap < String , String [ ] > ( ) ; // if an old chain name no longer exists , we need to stop it for ( String oldname : oldlist . keySet ( ) ) { if ( ! newlist . containsKey ( oldname ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Defunct chain; " + oldname ) ; } chainsToDelete . add ( oldname ) ; } } // check for new or updated chain definitions for ( Entry < String , String [ ] > entry : newlist . entrySet ( ) ) { final String newname = entry . getKey ( ) ; final String [ ] oldobj = oldlist . get ( newname ) ; if ( null == oldobj ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "New chain; " + newname ) ; } chainsToStart . put ( newname , entry . getValue ( ) ) ; } else { // compare attributes final String [ ] newobj = entry . getValue ( ) ; if ( hasChanged ( oldobj , newobj ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Chain updated; " + newname ) ; } chainsToStop . add ( newname ) ; chainsToStart . put ( newname , newobj ) ; } else { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Chain unchanged; " + newname ) ; } runningChains . add ( newname ) ; } } } // check for new or updated channels List < String > runningChannels = new LinkedList < String > ( ) ; Map < String , String [ ] > channelsToCreate = new HashMap < String , String [ ] > ( ) ; oldlist = oldc . get ( "channels" ) ; newlist = newc . get ( "channels" ) ; for ( String oldname : oldlist . keySet ( ) ) { if ( ! newlist . containsKey ( oldname ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Defunct channel: " + oldname ) ; } try { // must delete chains that were using the defunct channel ChainData [ ] existingchains = cf . getAllChains ( oldname ) ; for ( ChainData cd : existingchains ) { final String name = cd . getName ( ) ; if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Deleting chain; " + name ) ; } chainsToDelete . add ( name ) ; chainsToStart . remove ( name ) ; runningChains . remove ( name ) ; } } catch ( ChannelException e ) { FFDCFilter . processException ( e , "ChannelUtils.startConfig" , "defunctChannel" , new Object [ ] { oldname , cf } ) ; if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Unable to query defunct channel; " + e ) ; } } } } for ( String newname : newlist . keySet ( ) ) { String [ ] oldobj = oldlist . get ( newname ) ; String [ ] newobj = newlist . get ( newname ) ; if ( null == oldobj ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "New channel; " + newname ) ; } channelsToCreate . put ( newname , newobj ) ; } else { if ( hasChanged ( oldobj , newobj ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Channel updated; " + newname ) ; } channelsToCreate . put ( newname , newobj ) ; try { // since we don ' t know if the channel supports runtime // changes , stop chains using it ChainData [ ] c = cf . getAllChains ( newname ) ; for ( ChainData cd : c ) { final String name = cd . getName ( ) ; if ( runningChains . contains ( name ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Restarting chain; " + name ) ; } chainsToStop . add ( name ) ; runningChains . remove ( name ) ; // if it wasn ' t already flagged to start , make sure // it restarts after the stop if ( ! chainsToStart . containsKey ( name ) ) { chainsToStart . put ( name , newc . get ( "chains" ) . get ( name ) ) ; } } } } catch ( ChannelException e ) { FFDCFilter . processException ( e , "ChannelUtils.startConfig" , "updatedChannel" , new Object [ ] { newname , cf } ) ; if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Unable to query updated channel; " + e ) ; } } } else { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Channel unchanged; " + newname ) ; } runningChannels . add ( newname ) ; } } } // check factories List < String > runningFactories = new LinkedList < String > ( ) ; Map < String , String [ ] > factoriesToCreate = new HashMap < String , String [ ] > ( ) ; oldlist = oldc . get ( "factories" ) ; newlist = newc . get ( "factories" ) ; for ( String oldname : oldlist . keySet ( ) ) { if ( ! newlist . containsKey ( oldname ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Defunct factory; " + oldname ) ; } } } for ( String newname : newlist . keySet ( ) ) { String [ ] oldobj = oldlist . get ( newname ) ; String [ ] newobj = newlist . get ( newname ) ; if ( null == oldobj ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "New factory; " + newname ) ; } factoriesToCreate . put ( newname , newobj ) ; } else { if ( hasChanged ( oldobj , newobj ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Factory updated; " + newname ) ; } factoriesToCreate . put ( newname , newobj ) ; } else { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Factory unchanged; " + newname ) ; } runningFactories . add ( newname ) ; } } } // check for updated endpoints List < String > runningEndpoints = new LinkedList < String > ( ) ; Map < String , String [ ] > endpointsToCreate = new HashMap < String , String [ ] > ( ) ; List < String > updatedEndpoints = new LinkedList < String > ( ) ; oldlist = oldc . get ( "endpoints" ) ; newlist = newc . get ( "endpoints" ) ; for ( String oldname : oldlist . keySet ( ) ) { if ( ! newlist . containsKey ( oldname ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Defunct endpoint; " + oldname ) ; } EndPointMgrImpl . getRef ( ) . removeEndPoint ( oldname ) ; } } for ( String newname : newlist . keySet ( ) ) { String [ ] oldobj = oldlist . get ( newname ) ; String [ ] newobj = newlist . get ( newname ) ; if ( null == oldobj ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "New endpoint; " + newname ) ; } endpointsToCreate . put ( newname , newobj ) ; } else { if ( hasChanged ( oldobj , newobj ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Endpoint updated; " + newname ) ; } endpointsToCreate . put ( newname , newobj ) ; updatedEndpoints . add ( newname ) ; } else { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Endpoint unchanged; " + newname ) ; } runningEndpoints . add ( newname ) ; } } } // load new endpoint definitions runningEndpoints . addAll ( loadEndPoints ( endpointsToCreate ) ) ; // TCP does not allow endpoint runtime changes , restart necessary chains if ( ! updatedEndpoints . isEmpty ( ) ) { final Map < String , String [ ] > newchains = newc . get ( "chains" ) ; final Map < String , String [ ] > newchannels = newc . get ( "channels" ) ; for ( String chainname : runningChains ) { ChainData cd = cf . getChain ( chainname ) ; ChannelData [ ] channels = ( null != cd ) ? cd . getChannelList ( ) : null ; if ( null != channels && 0 < channels . length ) { String ep = ( String ) channels [ 0 ] . getPropertyBag ( ) . get ( "endPointName" ) ; if ( null != ep && updatedEndpoints . contains ( ep ) ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Chain [" + chainname + "] using updated endpoint " + ep ) ; } chainsToStop . add ( chainname ) ; chainsToStart . put ( chainname , newchains . get ( chainname ) ) ; String tcp = channels [ 0 ] . getExternalName ( ) ; channelsToCreate . put ( tcp , newchannels . get ( tcp ) ) ; } } } } Map < String , List < String > > rc = new HashMap < String , List < String > > ( ) ; // stop chains that need it if ( ! chainsToDelete . isEmpty ( ) ) { unloadChains ( chainsToDelete . iterator ( ) ) ; } // first stop / quiesce the chains ( AND WAIT FOR COMPLETION ) , // then destroy them for updates to happen correctly stopChains ( chainsToStop , - 1L , null ) ; for ( String chainname : chainsToStop ) { runningChains . remove ( chainname ) ; try { ChainData cd = cf . getChain ( chainname ) ; if ( null != cd ) { cf . destroyChain ( cd ) ; } } catch ( Exception e ) { FFDCFilter . processException ( e , "ChannelUtils.startConfig" , "stopChain" , new Object [ ] { chainname , cf } ) ; if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error destroying chain; " + chainname + " " + e ) ; } chainsToStart . remove ( chainname ) ; } } runningFactories . addAll ( loadFactories ( factoriesToCreate ) ) ; rc . put ( "factory" , runningFactories ) ; rc . put ( "endpoint" , runningEndpoints ) ; runningChannels . addAll ( loadChannels ( channelsToCreate ) ) ; rc . put ( "channel" , runningChannels ) ; runningChains . addAll ( loadChains ( chainsToStart , true , true ) ) ; for ( String chain : chainsToStart . keySet ( ) ) { try { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Starting chain: " + chain ) ; } cf . startChain ( chain ) ; } catch ( RetryableChannelException rce ) { if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error starting chain; " + rce ) ; } } catch ( Exception e ) { FFDCFilter . processException ( e , "ChannelUtils.startConfig" , "chain" , new Object [ ] { chain , cf } ) ; if ( bTrace && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Error starting chain; " + e ) ; } } } rc . put ( "chain" , runningChains ) ; // rc . put ( " group " , loadGroups ( newc . get ( " group " ) , true , false ) ) ; rc . put ( "group" , new LinkedList < String > ( ) ) ; return rc ;
public class ActionDefinition { /** * syntactic sugar */ public Attachment addSupportingEvidence ( ) { } }
Attachment t = new Attachment ( ) ; if ( this . supportingEvidence == null ) this . supportingEvidence = new ArrayList < Attachment > ( ) ; this . supportingEvidence . add ( t ) ; return t ;
public class KerasLoss { /** * Get layer output type . * @ param inputType Array of InputTypes * @ return output type as InputType * @ throws InvalidKerasConfigurationException Invalid Keras config */ @ Override public InputType getOutputType ( InputType ... inputType ) throws InvalidKerasConfigurationException , UnsupportedKerasConfigurationException { } }
if ( inputType . length > 1 ) throw new InvalidKerasConfigurationException ( "Keras Loss layer accepts only one input (received " + inputType . length + ")" ) ; return this . getLossLayer ( inputType [ 0 ] ) . getOutputType ( - 1 , inputType [ 0 ] ) ;
public class AnnotationAnalyzer { /** * Returns the text information for a given field of a class . * @ param field * Field to inspect . * @ param locale * Locale to use . * @ param annotationClasz * Type of annotation to find . * @ return Label information - May be < code > null < / code > in case the annotation was not found . */ public final FieldTextInfo createFieldInfo ( @ NotNull final Field field , @ NotNull final Locale locale , @ NotNull final Class < ? extends Annotation > annotationClasz ) { } }
Contract . requireArgNotNull ( "field" , field ) ; Contract . requireArgNotNull ( "locale" , locale ) ; Contract . requireArgNotNull ( "annotationClasz" , annotationClasz ) ; final Annotation annotation = field . getAnnotation ( annotationClasz ) ; if ( annotation == null ) { return null ; } try { final ResourceBundle bundle = getResourceBundle ( annotation , locale , field . getDeclaringClass ( ) ) ; final String text = getText ( bundle , annotation , field . getName ( ) + "." + annotationClasz . getSimpleName ( ) ) ; return new FieldTextInfo ( field , text ) ; } catch ( final MissingResourceException ex ) { return new FieldTextInfo ( field , toNullableString ( getValue ( annotation ) ) ) ; }
public class OAuth1 { /** * Sends a request to the server and returns a token * @ param url the URL to send the request to * @ param method the HTTP request method * @ param token a token used for authorization ( may be null if the * app is not authorized yet ) * @ param additionalAuthParams additional parameters that should be * added to the < code > Authorization < / code > header * @ return the token * @ throws IOException if the request was not successful * @ throws RequestException if the server returned an error * @ throws UnauthorizedException if the request is not authorized */ private Token requestCredentials ( URL url , Method method , Token token , Map < String , String > additionalAuthParams ) throws IOException { } }
Response r = requestInternal ( url , method , token , additionalAuthParams , null ) ; InputStream is = r . getInputStream ( ) ; String response = CSLUtils . readStreamToString ( is , UTF8 ) ; // create token for temporary credentials Map < String , String > sr = splitResponse ( response ) ; return responseToToken ( sr ) ;
public class ProvFactory { /** * A factory method to create an instance of a usage { @ link Used } from another * @ param u an instance of a usage * @ return an instance of { @ link Used } equal ( in the sense of @ see Object . equals ( ) ) to the input */ public Used newUsed ( Used u ) { } }
Used u1 = newUsed ( u . getId ( ) , u . getActivity ( ) , u . getEntity ( ) ) ; u1 . setTime ( u . getTime ( ) ) ; u1 . getType ( ) . addAll ( u . getType ( ) ) ; u1 . getLabel ( ) . addAll ( u . getLabel ( ) ) ; u1 . getRole ( ) . addAll ( u . getRole ( ) ) ; u1 . getLocation ( ) . addAll ( u . getLocation ( ) ) ; u1 . getOther ( ) . addAll ( u . getOther ( ) ) ; return u1 ;
public class Radar { /** * Defines the position of the center of the radar by the * given coordinates as latitude and longitude * @ param LON * @ param LAT */ public void setMyLocation ( final double LON , final double LAT ) { } }
this . MY_LOCATION . setLocation ( LON , LAT ) ; checkForBlips ( ) ; init ( getInnerBounds ( ) . width , getInnerBounds ( ) . height ) ; repaint ( ) ;
public class ExtensionsInner { /** * Gets the status of Operations Management Suite ( OMS ) on the HDInsight cluster . * @ param resourceGroupName The name of the resource group . * @ param clusterName The name of the cluster . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ClusterMonitoringResponseInner object */ public Observable < ServiceResponse < ClusterMonitoringResponseInner > > getMonitoringStatusWithServiceResponseAsync ( String resourceGroupName , String clusterName ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( clusterName == null ) { throw new IllegalArgumentException ( "Parameter clusterName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . getMonitoringStatus ( this . client . subscriptionId ( ) , resourceGroupName , clusterName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < ClusterMonitoringResponseInner > > > ( ) { @ Override public Observable < ServiceResponse < ClusterMonitoringResponseInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < ClusterMonitoringResponseInner > clientResponse = getMonitoringStatusDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class EldaRouterRestletSupport { /** * Add the baseFilePath to the FileManager singleton . Only do it * once , otherwise the instance will get larger on each config load * ( at least that won ' t be once per query , though ) . Just possibly * there may be multiple servlet contexts so we add a new only only if * its not already in the instance ' s locator list . */ private static void addBaseFilepath ( String baseFilePath ) { } }
FileManager fm = EldaFileManager . get ( ) ; for ( Iterator < Locator > il = fm . locators ( ) ; il . hasNext ( ) ; ) { Locator l = il . next ( ) ; if ( l instanceof LocatorFile ) if ( ( ( LocatorFile ) l ) . getName ( ) . equals ( baseFilePath ) ) return ; } log . info ( "adding locator for " + baseFilePath ) ; EldaFileManager . get ( ) . addLocatorFile ( baseFilePath ) ;
public class AmazonConfigClient { /** * Returns the resource counts across accounts and regions that are present in your AWS Config aggregator . You can * request the resource counts by providing filters and GroupByKey . * For example , if the input contains accountID 12345678910 and region us - east - 1 in filters , the API returns the * count of resources in account ID 12345678910 and region us - east - 1 . If the input contains ACCOUNT _ ID as a * GroupByKey , the API returns resource counts for all source accounts that are present in your aggregator . * @ param getAggregateDiscoveredResourceCountsRequest * @ return Result of the GetAggregateDiscoveredResourceCounts operation returned by the service . * @ throws ValidationException * The requested action is not valid . * @ throws InvalidLimitException * The specified limit is outside the allowable range . * @ throws InvalidNextTokenException * The specified next token is invalid . Specify the < code > nextToken < / code > string that was returned in the * previous response to get the next page of results . * @ throws NoSuchConfigurationAggregatorException * You have specified a configuration aggregator that does not exist . * @ sample AmazonConfig . GetAggregateDiscoveredResourceCounts * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / config - 2014-11-12 / GetAggregateDiscoveredResourceCounts " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetAggregateDiscoveredResourceCountsResult getAggregateDiscoveredResourceCounts ( GetAggregateDiscoveredResourceCountsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetAggregateDiscoveredResourceCounts ( request ) ;
public class JenkinsBuildPropertyType { /** * Depends on the nature of the entity . Allowed to the ones who can create the associated entity . */ @ Override public boolean canEdit ( ProjectEntity entity , SecurityService securityService ) { } }
switch ( entity . getProjectEntityType ( ) ) { case BUILD : return securityService . isProjectFunctionGranted ( entity , BuildCreate . class ) ; case PROMOTION_RUN : return securityService . isProjectFunctionGranted ( entity , PromotionRunCreate . class ) ; case VALIDATION_RUN : return securityService . isProjectFunctionGranted ( entity , ValidationRunCreate . class ) ; default : throw new PropertyUnsupportedEntityTypeException ( getClass ( ) . getName ( ) , entity . getProjectEntityType ( ) ) ; }
public class Boxing { /** * Transforms any array into an array of { @ code char } . * @ param src source array * @ param srcPos start position * @ param len length * @ return char array */ public static char [ ] unboxCharacters ( Object src , int srcPos , int len ) { } }
return unboxCharacters ( array ( src ) , srcPos , len ) ;
public class AbstractViewQuery { /** * Click this view will trigger a CocoTask * @ param task * @ return */ public T clicked ( final CocoTask < ? > task ) { } }
return clicked ( new View . OnClickListener ( ) { @ Override public void onClick ( View v ) { query . task ( task ) ; } } ) ;
public class Auth { /** * Sets up Authenticator and Authorizer . */ public static void setup ( ) { } }
if ( DatabaseDescriptor . getAuthenticator ( ) instanceof AllowAllAuthenticator ) return ; setupAuthKeyspace ( ) ; setupTable ( USERS_CF , USERS_CF_SCHEMA ) ; DatabaseDescriptor . getAuthenticator ( ) . setup ( ) ; DatabaseDescriptor . getAuthorizer ( ) . setup ( ) ; // register a custom MigrationListener for permissions cleanup after dropped keyspaces / cfs . MigrationManager . instance . register ( new AuthMigrationListener ( ) ) ; // the delay is here to give the node some time to see its peers - to reduce // " Skipped default superuser setup : some nodes were not ready " log spam . // It ' s the only reason for the delay . ScheduledExecutors . nonPeriodicTasks . schedule ( new Runnable ( ) { public void run ( ) { setupDefaultSuperuser ( ) ; } } , SUPERUSER_SETUP_DELAY , TimeUnit . MILLISECONDS ) ; try { String query = String . format ( "SELECT * FROM %s.%s WHERE name = ?" , AUTH_KS , USERS_CF ) ; selectUserStatement = ( SelectStatement ) QueryProcessor . parseStatement ( query ) . prepare ( ) . statement ; } catch ( RequestValidationException e ) { throw new AssertionError ( e ) ; // not supposed to happen }
public class FrustumIntersection { /** * Determine whether the given axis - aligned box is partly or completely within or outside of the frustum defined by < code > this < / code > frustum culler * and , if the box is not inside this frustum , return the index of the plane that culled it . * The box is specified via its min and max corner coordinates . * This method differs from { @ link # intersectAab ( float , float , float , float , float , float ) } in that * it allows to mask - off planes that should not be calculated . For example , in order to only test a box against the * left frustum plane , use a mask of { @ link # PLANE _ MASK _ NX } . Or in order to test all planes < i > except < / i > the left plane , use * a mask of < code > ( ~ 0 ^ PLANE _ MASK _ NX ) < / code > . * The algorithm implemented by this method is conservative . This means that in certain circumstances a < i > false positive < / i > * can occur , when the method returns < code > - 1 < / code > for boxes that are actually not visible / do not intersect the frustum . * See < a href = " http : / / iquilezles . org / www / articles / frustumcorrect / frustumcorrect . htm " > iquilezles . org < / a > for an examination of this problem . * Reference : < a href = " http : / / old . cescg . org / CESCG - 2002 / DSykoraJJelinek / " > Efficient View Frustum Culling < / a > * @ param minX * the x - coordinate of the minimum corner * @ param minY * the y - coordinate of the minimum corner * @ param minZ * the z - coordinate of the minimum corner * @ param maxX * the x - coordinate of the maximum corner * @ param maxY * the y - coordinate of the maximum corner * @ param maxZ * the z - coordinate of the maximum corner * @ param mask * contains as bitset all the planes that should be tested . * This value can be any combination of * { @ link # PLANE _ MASK _ NX } , { @ link # PLANE _ MASK _ PX } , * { @ link # PLANE _ MASK _ NY } , { @ link # PLANE _ MASK _ PY } , * { @ link # PLANE _ MASK _ NZ } and { @ link # PLANE _ MASK _ PZ } * @ return the index of the first plane that culled the box , if the box does not intersect the frustum , * or { @ link # INTERSECT } if the box intersects the frustum , or { @ link # INSIDE } if the box is fully inside of the frustum . * The plane index is one of { @ link # PLANE _ NX } , { @ link # PLANE _ PX } , { @ link # PLANE _ NY } , { @ link # PLANE _ PY } , { @ link # PLANE _ NZ } and { @ link # PLANE _ PZ } */ public int intersectAab ( float minX , float minY , float minZ , float maxX , float maxY , float maxZ , int mask ) { } }
/* * This is an implementation of the first algorithm in " 2.5 Plane masking and coherency " of the mentioned site . * In addition to the algorithm in the paper , this method also returns the index of the first plane that culled the box . */ int plane = PLANE_NX ; boolean inside = true ; if ( ( mask & PLANE_MASK_NX ) == 0 || nxX * ( nxX < 0 ? minX : maxX ) + nxY * ( nxY < 0 ? minY : maxY ) + nxZ * ( nxZ < 0 ? minZ : maxZ ) >= - nxW ) { plane = PLANE_PX ; inside &= nxX * ( nxX < 0 ? maxX : minX ) + nxY * ( nxY < 0 ? maxY : minY ) + nxZ * ( nxZ < 0 ? maxZ : minZ ) >= - nxW ; if ( ( mask & PLANE_MASK_PX ) == 0 || pxX * ( pxX < 0 ? minX : maxX ) + pxY * ( pxY < 0 ? minY : maxY ) + pxZ * ( pxZ < 0 ? minZ : maxZ ) >= - pxW ) { plane = PLANE_NY ; inside &= pxX * ( pxX < 0 ? maxX : minX ) + pxY * ( pxY < 0 ? maxY : minY ) + pxZ * ( pxZ < 0 ? maxZ : minZ ) >= - pxW ; if ( ( mask & PLANE_MASK_NY ) == 0 || nyX * ( nyX < 0 ? minX : maxX ) + nyY * ( nyY < 0 ? minY : maxY ) + nyZ * ( nyZ < 0 ? minZ : maxZ ) >= - nyW ) { plane = PLANE_PY ; inside &= nyX * ( nyX < 0 ? maxX : minX ) + nyY * ( nyY < 0 ? maxY : minY ) + nyZ * ( nyZ < 0 ? maxZ : minZ ) >= - nyW ; if ( ( mask & PLANE_MASK_PY ) == 0 || pyX * ( pyX < 0 ? minX : maxX ) + pyY * ( pyY < 0 ? minY : maxY ) + pyZ * ( pyZ < 0 ? minZ : maxZ ) >= - pyW ) { plane = PLANE_NZ ; inside &= pyX * ( pyX < 0 ? maxX : minX ) + pyY * ( pyY < 0 ? maxY : minY ) + pyZ * ( pyZ < 0 ? maxZ : minZ ) >= - pyW ; if ( ( mask & PLANE_MASK_NZ ) == 0 || nzX * ( nzX < 0 ? minX : maxX ) + nzY * ( nzY < 0 ? minY : maxY ) + nzZ * ( nzZ < 0 ? minZ : maxZ ) >= - nzW ) { plane = PLANE_PZ ; inside &= nzX * ( nzX < 0 ? maxX : minX ) + nzY * ( nzY < 0 ? maxY : minY ) + nzZ * ( nzZ < 0 ? maxZ : minZ ) >= - nzW ; if ( ( mask & PLANE_MASK_PZ ) == 0 || pzX * ( pzX < 0 ? minX : maxX ) + pzY * ( pzY < 0 ? minY : maxY ) + pzZ * ( pzZ < 0 ? minZ : maxZ ) >= - pzW ) { inside &= pzX * ( pzX < 0 ? maxX : minX ) + pzY * ( pzY < 0 ? maxY : minY ) + pzZ * ( pzZ < 0 ? maxZ : minZ ) >= - pzW ; return inside ? INSIDE : INTERSECT ; } } } } } } return plane ;
public class SimpleLog { /** * Log an error with fatal log level . */ public final void fatal ( Object message , Throwable t ) { } }
if ( isLevelEnabled ( SimpleLog . LOG_LEVEL_FATAL ) ) { log ( SimpleLog . LOG_LEVEL_FATAL , message , t ) ; }
public class DeleteAssessmentRunRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteAssessmentRunRequest deleteAssessmentRunRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteAssessmentRunRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteAssessmentRunRequest . getAssessmentRunArn ( ) , ASSESSMENTRUNARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ModuleInfo { /** * Add annotations found in a module descriptor classfile . * @ param moduleAnnotations * the module annotations */ void addAnnotations ( final AnnotationInfoList moduleAnnotations ) { } }
// Currently only class annotations are used in the module - info . class file if ( moduleAnnotations != null && ! moduleAnnotations . isEmpty ( ) ) { if ( this . annotationInfo == null ) { this . annotationInfo = new AnnotationInfoList ( moduleAnnotations ) ; } else { this . annotationInfo . addAll ( moduleAnnotations ) ; } }
public class DatasetUtils { public static String getSelect ( final SQLiteDataset dataset ) { } }
final Class < ? > klass = dataset . getClass ( ) ; try { return getSelect ( klass ) ; } catch ( final Exception e ) { Logger . ex ( e ) ; return "" ; }
public class BoxApiCollaboration { /** * A request to delete a collaboration with given collaboration id . * @ param collaborationId id of the collaboration to delete * @ return request to delete a collaboration */ public BoxRequestsShare . DeleteCollaboration getDeleteRequest ( String collaborationId ) { } }
BoxRequestsShare . DeleteCollaboration collab = new BoxRequestsShare . DeleteCollaboration ( collaborationId , getCollaborationInfoUrl ( collaborationId ) , mSession ) ; return collab ;
public class FilesImpl { /** * Returns the content of the specified compute node file . * @ param poolId The ID of the pool that contains the compute node . * @ param nodeId The ID of the compute node that contains the file . * @ param filePath The path to the compute node file that you want to get the content of . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < InputStream > getFromComputeNodeAsync ( String poolId , String nodeId , String filePath , final ServiceCallback < InputStream > serviceCallback ) { } }
return ServiceFuture . fromHeaderResponse ( getFromComputeNodeWithServiceResponseAsync ( poolId , nodeId , filePath ) , serviceCallback ) ;
public class LocalisationManager { /** * Method reallocateTransmissionStreams * @ param ignoredStream */ public void reallocateTransmissionStreams ( PtoPXmitMsgsItemStream ignoredStream ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "reallocateTransmissionStreams" , ignoredStream ) ; if ( _xmitQueuePoints != null ) { LockManager lockManager = _baseDestinationHandler . getReallocationLockManager ( ) ; lockManager . lockExclusive ( ) ; try { if ( _xmitQueuePoints != null ) { // PK57432 Cant hold queuePoints lock while we call reallocate HashMap clonedXmitQueuePoints ; synchronized ( _xmitQueuePoints ) { // It ' s possible that while we ' re iterating over the existing xmit streams and // their messages that we inadvertently create a new xmit stream ( while in // searchForPtoPOutputHandler ) . This obviously invalidates the list , so instead // we take a copy up front , safe in the knowledge that any newly created xmit // stream can ' t possibly need its messages reallocated . clonedXmitQueuePoints = ( HashMap ) _xmitQueuePoints . clone ( ) ; } Iterator itr = clonedXmitQueuePoints . values ( ) . iterator ( ) ; while ( itr . hasNext ( ) ) { PtoPXmitMsgsItemStream xmitQueue = ( PtoPXmitMsgsItemStream ) itr . next ( ) ; if ( xmitQueue != ignoredStream ) xmitQueue . reallocateMsgs ( ) ; } } } finally { lockManager . unlockExclusive ( ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "reallocateTransmissionStreams" ) ;
public class Property_Builder { /** * Resets the state of this builder . * @ return this { @ code Builder } object */ public org . inferred . freebuilder . processor . property . Property . Builder clear ( ) { } }
Property_Builder defaults = new org . inferred . freebuilder . processor . property . Property . Builder ( ) ; type = defaults . type ; boxedType = defaults . boxedType ; name = defaults . name ; capitalizedName = defaults . capitalizedName ; allCapsName = defaults . allCapsName ; usingBeanConvention = defaults . usingBeanConvention ; getterName = defaults . getterName ; fullyCheckedCast = defaults . fullyCheckedCast ; clearAccessorAnnotations ( ) ; _unsetProperties . clear ( ) ; _unsetProperties . addAll ( defaults . _unsetProperties ) ; return ( org . inferred . freebuilder . processor . property . Property . Builder ) this ;
public class TypeCheck { /** * Visits a CALL node . * @ param t The node traversal object that supplies context , such as the * scope chain to use in name lookups as well as error reporting . * @ param n The node being visited . */ private void visitCall ( NodeTraversal t , Node n ) { } }
checkCallConventions ( t , n ) ; Node child = n . getFirstChild ( ) ; JSType childType = getJSType ( child ) . restrictByNotNullOrUndefined ( ) ; if ( ! childType . canBeCalled ( ) ) { report ( n , NOT_CALLABLE , childType . toString ( ) ) ; ensureTyped ( n ) ; return ; } // A couple of types can be called as if they were functions . // If it is a function type , then validate parameters . if ( childType . isFunctionType ( ) ) { FunctionType functionType = childType . toMaybeFunctionType ( ) ; // Non - native constructors should not be called directly // unless they specify a return type if ( functionType . isConstructor ( ) && ! functionType . isNativeObjectType ( ) && ( functionType . getReturnType ( ) . isUnknownType ( ) || functionType . getReturnType ( ) . isVoidType ( ) ) && ! n . getFirstChild ( ) . isSuper ( ) ) { report ( n , CONSTRUCTOR_NOT_CALLABLE , childType . toString ( ) ) ; } // Functions with explicit ' this ' types must be called in a GETPROP or GETELEM . if ( functionType . isOrdinaryFunction ( ) && ! NodeUtil . isGet ( child ) ) { JSType receiverType = functionType . getTypeOfThis ( ) ; if ( receiverType . isUnknownType ( ) || receiverType . isAllType ( ) || receiverType . isVoidType ( ) || ( receiverType . isObjectType ( ) && receiverType . toObjectType ( ) . isNativeObjectType ( ) ) ) { // Allow these special cases . } else { report ( n , EXPECTED_THIS_TYPE , functionType . toString ( ) ) ; } } visitArgumentList ( n , functionType ) ; ensureTyped ( n , functionType . getReturnType ( ) ) ; } else { ensureTyped ( n ) ; } // TODO ( nicksantos ) : Add something to check for calls of RegExp objects , // which is not supported by IE . Either say something about the return type // or warn about the non - portability of the call or both .
public class CmsXmlCategoryValue { /** * Creates the String value for this category value element . < p > * @ param cms the cms context * @ return the String value for this category value element */ private String createStringValue ( CmsObject cms ) { } }
Attribute enabled = m_element . attribute ( CmsXmlPage . ATTRIBUTE_ENABLED ) ; String content = "" ; if ( ( enabled == null ) || Boolean . valueOf ( enabled . getText ( ) ) . booleanValue ( ) ) { List < CmsLink > links = getLinks ( cms ) ; int i = 0 ; for ( CmsLink link : links ) { if ( link != null ) { String uri = "" ; uri += link . getUri ( ) ; if ( cms != null ) { uri = cms . getRequestContext ( ) . removeSiteRoot ( link . getUri ( ) ) ; } if ( i > 0 ) { content += "," ; } content += uri ; i ++ ; } } } return content ;
public class AdGroupCriterion { /** * Gets the labels value for this AdGroupCriterion . * @ return labels * Labels that are attached to the AdGroupCriterion . To associate * an existing { @ link Label } to an * existing { @ link AdGroupCriterion } , use { @ link AdGroupCriterionService # mutateLabel } * with ADD * operator . To remove an associated { @ link Label } from * the { @ link AdGroupCriterion } , use * { @ link AdGroupCriterionService # mutateLabel } with REMOVE * operator . To filter on { @ link Label } s , * use one of { @ link Predicate . Operator # CONTAINS _ ALL } , * { @ link Predicate . Operator # CONTAINS _ ANY } , * { @ link Predicate . Operator # CONTAINS _ NONE } operators * with a list of { @ link Label } ids . * < span class = " constraint CampaignType " > This field may * not be set for campaign channel subtype UNIVERSAL _ APP _ CAMPAIGN . < / span > * < span class = " constraint ReadOnly " > This field is read only and will * be ignored when sent to the API for the following { @ link Operator } s : * REMOVE and SET . < / span > */ public com . google . api . ads . adwords . axis . v201809 . cm . Label [ ] getLabels ( ) { } }
return labels ;
public class CmsFlexBucketConfiguration { /** * Gets the bucket bit index for the given bucket name . < p > * @ param bucketName a bucket name * @ return the bit index for the bucket */ int getBucketIndex ( String bucketName ) { } }
if ( bucketName . equals ( BUCKET_OTHER ) ) { return 0 ; } for ( int i = 0 ; i < m_bucketNames . size ( ) ; i ++ ) { if ( m_bucketNames . get ( i ) . equals ( bucketName ) ) { return 1 + i ; } } return - 1 ;
public class AWSCognitoIdentityProviderClient { /** * Deletes the specified Amazon Cognito user pool . * @ param deleteUserPoolRequest * Represents the request to delete a user pool . * @ return Result of the DeleteUserPool operation returned by the service . * @ throws ResourceNotFoundException * This exception is thrown when the Amazon Cognito service cannot find the requested resource . * @ throws InvalidParameterException * This exception is thrown when the Amazon Cognito service encounters an invalid parameter . * @ throws TooManyRequestsException * This exception is thrown when the user has made too many requests for a given operation . * @ throws NotAuthorizedException * This exception is thrown when a user is not authorized . * @ throws UserImportInProgressException * This exception is thrown when you are trying to modify a user pool while a user import job is in progress * for that pool . * @ throws InternalErrorException * This exception is thrown when Amazon Cognito encounters an internal error . * @ sample AWSCognitoIdentityProvider . DeleteUserPool * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / DeleteUserPool " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteUserPoolResult deleteUserPool ( DeleteUserPoolRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteUserPool ( request ) ;
public class ProcyonDecompiler { /** * Decompiles the given . class file and creates the specified output source file . * @ param classFilePath the . class file to be decompiled . * @ param outputDir The directory where decompiled . java files will be placed . */ @ Override public DecompilationResult decompileClassFile ( Path rootDir , Path classFilePath , Path outputDir ) throws DecompilationException { } }
Checks . checkDirectoryToBeRead ( rootDir . toFile ( ) , "Classes root dir" ) ; File classFile = classFilePath . toFile ( ) ; Checks . checkFileToBeRead ( classFile , "Class file" ) ; Checks . checkDirectoryToBeFilled ( outputDir . toFile ( ) , "Output directory" ) ; log . info ( "Decompiling .class '" + classFilePath + "' to '" + outputDir + "' from: '" + rootDir + "'" ) ; String name = classFilePath . normalize ( ) . toAbsolutePath ( ) . toString ( ) . substring ( rootDir . toAbsolutePath ( ) . toString ( ) . length ( ) + 1 ) ; final String typeName = StringUtils . removeEnd ( name , ".class" ) ; // . replace ( ' / ' , ' . ' ) ; DecompilationResult result = new DecompilationResult ( ) ; try { DecompilerSettings settings = getDefaultSettings ( outputDir . toFile ( ) ) ; this . procyonConf . setDecompilerSettings ( settings ) ; // TODO : This is horrible mess . final ITypeLoader typeLoader = new CompositeTypeLoader ( new WindupClasspathTypeLoader ( rootDir . toString ( ) ) , new ClasspathTypeLoader ( ) ) ; WindupMetadataSystem metadataSystem = new WindupMetadataSystem ( typeLoader ) ; File outputFile = this . decompileType ( settings , metadataSystem , typeName ) ; result . addDecompiled ( Collections . singletonList ( classFilePath . toString ( ) ) , outputFile . getAbsolutePath ( ) ) ; } catch ( Throwable e ) { DecompilationFailure failure = new DecompilationFailure ( "Error during decompilation of " + classFilePath . toString ( ) + ":\n " + e . getMessage ( ) , Collections . singletonList ( name ) , e ) ; log . severe ( failure . getMessage ( ) ) ; result . addFailure ( failure ) ; } return result ;
public class Configuration { /** * Saves the configuration to { @ code configFile } * @ param configFile The file to save the configuration to . * @ throws IOException if an I / O error occurs */ private void save ( OneOrOther < File , Path > configFile ) throws IOException { } }
if ( configFile . isOne ( ) ) { File config = configFile . getOne ( ) ; this . saveToWriter ( getFileWriter ( config ) ) ; } else { Path config = configFile . getOther ( ) ; this . saveToWriter ( getPathWriter ( config ) ) ; }
public class KubernetesResponseComposer { /** * This is a reimplementation of Java 8 ' s String . join . */ private static String join ( String sep , Collection < String > collection ) { } }
StringBuilder builder = new StringBuilder ( ) ; boolean first = true ; for ( String element : collection ) { if ( first ) { first = false ; } else { builder . append ( sep ) ; } builder . append ( element ) ; } return builder . toString ( ) ;
public class AbstractUserAgentStringParser { /** * Examines the user agent string whether it is a browser . * @ param userAgent * String of an user agent * @ param builder * Builder for an user agent information */ private static void examineAsBrowser ( final UserAgent . Builder builder , final Data data ) { } }
Matcher matcher ; VersionNumber version = VersionNumber . UNKNOWN ; for ( final Entry < BrowserPattern , Browser > entry : data . getPatternToBrowserMap ( ) . entrySet ( ) ) { matcher = entry . getKey ( ) . getPattern ( ) . matcher ( builder . getUserAgentString ( ) ) ; if ( matcher . find ( ) ) { entry . getValue ( ) . copyTo ( builder ) ; // try to get the browser version from the first subgroup if ( matcher . groupCount ( ) > ZERO_MATCHING_GROUPS ) { version = VersionNumber . parseVersion ( matcher . group ( 1 ) != null ? matcher . group ( 1 ) : "" ) ; } builder . setVersionNumber ( version ) ; break ; } }
public class JsonToken { /** * Get the whole slice as a string . * @ return Slice decoded as UTF _ 8 string . */ public String decodeJsonLiteral ( ) { } }
// This decodes the string from UTF _ 8 bytes . StringBuilder out = new StringBuilder ( len ) ; boolean esc = false ; final int end = off + len - 1 ; for ( int i = off + 1 ; i < end ; ++ i ) { char ch = fb [ i ] ; if ( esc ) { esc = false ; switch ( ch ) { case 'b' : out . append ( '\b' ) ; break ; case 'f' : out . append ( '\f' ) ; break ; case 'n' : out . append ( '\n' ) ; break ; case 'r' : out . append ( '\r' ) ; break ; case 't' : out . append ( '\t' ) ; break ; case '\"' : case '\\' : case '/' : out . append ( ch ) ; break ; case 'u' : int endU = i + 5 ; if ( end < endU ) { out . append ( '?' ) ; } else { int n = 0 ; int pos = i + 1 ; for ( ; pos < endU ; ++ pos ) { ch = fb [ pos ] ; if ( ch >= '0' && ch <= '9' ) { n = ( n << 4 ) + ( ch - '0' ) ; } else if ( ch >= 'a' && ch <= 'f' ) { n = ( n << 4 ) + ( ch - ( 'a' - 10 ) ) ; } else if ( ch >= 'A' && ch <= 'F' ) { n = ( n << 4 ) + ( ch - ( 'A' - 10 ) ) ; } else { n = '?' ; break ; } } out . append ( ( char ) n ) ; } i += 4 ; // skipping 4 more characters . break ; default : out . append ( '?' ) ; break ; } } else if ( ch == '\\' ) { esc = true ; } else { out . append ( ch ) ; } } return out . toString ( ) ;
public class FreemarkerCall { /** * Render the template to a String * @ return */ @ Override public String process ( ) { } }
final StringWriter sw = new StringWriter ( ) ; process ( sw ) ; return sw . toString ( ) ;
public class JdbcWriter { /** * Flushes JdbcWriterCommands and commit . * { @ inheritDoc } * @ see org . apache . gobblin . writer . DataWriter # commit ( ) */ @ Override public void commit ( ) throws IOException { } }
try { LOG . info ( "Flushing pending insert." ) ; this . commands . flush ( ) ; LOG . info ( "Commiting transaction." ) ; this . conn . commit ( ) ; } catch ( Exception e ) { this . failed = true ; throw new RuntimeException ( e ) ; }
public class DiagnosticsInner { /** * List Site Detector Responses . * List Site Detector Responses . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; DetectorResponseInner & gt ; object */ public Observable < Page < DetectorResponseInner > > listSiteDetectorResponsesSlotNextAsync ( final String nextPageLink ) { } }
return listSiteDetectorResponsesSlotNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < DetectorResponseInner > > , Page < DetectorResponseInner > > ( ) { @ Override public Page < DetectorResponseInner > call ( ServiceResponse < Page < DetectorResponseInner > > response ) { return response . body ( ) ; } } ) ;
public class AwsClientBuilder { /** * Get the current value of an advanced config option . * @ param key Key of value to get . * @ param < T > Type of value to get . * @ return Value if set , otherwise null . */ protected final < T > T getAdvancedConfig ( AdvancedConfig . Key < T > key ) { } }
return advancedConfig . get ( key ) ;
public class InternalService { /** * Send ' user is typing ' . * @ param conversationId ID of a conversation . * @ return Observable to send event . */ public Observable < ComapiResult < Void > > isTyping ( @ NonNull final String conversationId ) { } }
final String token = getToken ( ) ; if ( sessionController . isCreatingSession ( ) || TextUtils . isEmpty ( token ) ) { return Observable . error ( getSessionStateErrorDescription ( ) ) ; } else { return doIsTyping ( token , conversationId , true ) ; }
public class HuberLoss { /** * Computes the HuberLoss loss * @ param pred the predicted value * @ param y the true value * @ param c the threshold value * @ return the HuberLoss loss */ public static double loss ( double pred , double y , double c ) { } }
final double x = y - pred ; if ( Math . abs ( x ) <= c ) return x * x * 0.5 ; else return c * ( Math . abs ( x ) - c / 2 ) ;
public class FormatUtilities { /** * Returns the given time formatted as a number of days , hours and minutes . * @ param dt The time to be parsed * @ return The given time formatted as a number of days , hours and minutes */ static public String getLongFormattedDays ( long dt ) { } }
StringBuffer ret = new StringBuffer ( ) ; long days = dt / 86400000L ; long millis = dt - ( days * 86400000L ) ; if ( days > 0 ) { ret . append ( Long . toString ( days ) ) ; ret . append ( " day" ) ; if ( days > 1 ) ret . append ( "s" ) ; } long hours = millis / 3600000L ; millis = millis - ( hours * 3600000L ) ; if ( hours > 0 ) { if ( ret . length ( ) > 0 ) ret . append ( " " ) ; ret . append ( Long . toString ( hours ) ) ; ret . append ( " hour" ) ; if ( hours > 1 ) ret . append ( "s" ) ; } long minutes = millis / 60000L ; millis = millis - ( minutes * 60000L ) ; if ( minutes > 0 ) { if ( ret . length ( ) > 0 ) ret . append ( " " ) ; ret . append ( Long . toString ( minutes ) ) ; ret . append ( " minute" ) ; if ( minutes > 1 ) ret . append ( "s" ) ; } long seconds = millis / 1000L ; if ( seconds > 0 ) { if ( ret . length ( ) > 0 ) ret . append ( " " ) ; ret . append ( Long . toString ( seconds ) ) ; ret . append ( " second" ) ; if ( seconds > 1 ) ret . append ( "s" ) ; } return ret . toString ( ) ;
public class OutputStreamInterceptingFilter { /** * Handles a single " blob " instruction , decoding its base64 data , * sending that data to the associated OutputStream , and ultimately * dropping the " blob " instruction such that the client never receives * it . If no OutputStream is associated with the stream index within * the " blob " instruction , the instruction is passed through untouched . * @ param instruction * The " blob " instruction being handled . * @ return * The originally - provided " blob " instruction , if that instruction * should be passed through to the client , or null if the " blob " * instruction should be dropped . */ private GuacamoleInstruction handleBlob ( GuacamoleInstruction instruction ) { } }
// Verify all required arguments are present List < String > args = instruction . getArgs ( ) ; if ( args . size ( ) < 2 ) return instruction ; // Pull associated stream String index = args . get ( 0 ) ; InterceptedStream < OutputStream > stream = getInterceptedStream ( index ) ; if ( stream == null ) return instruction ; // Decode blob byte [ ] blob ; try { String data = args . get ( 1 ) ; blob = BaseEncoding . base64 ( ) . decode ( data ) ; } catch ( IllegalArgumentException e ) { logger . warn ( "Received base64 data for intercepted stream was invalid." ) ; logger . debug ( "Decoding base64 data for intercepted stream failed." , e ) ; return null ; } try { // Attempt to write data to stream stream . getStream ( ) . write ( blob ) ; // Force client to respond with their own " ack " if we need to // confirm that they are not falling behind with respect to the // graphical session if ( ! acknowledgeBlobs ) { acknowledgeBlobs = true ; return new GuacamoleInstruction ( "blob" , index , "" ) ; } // Otherwise , acknowledge the blob on the client ' s behalf sendAck ( index , "OK" , GuacamoleStatus . SUCCESS ) ; } catch ( IOException e ) { sendAck ( index , "FAIL" , GuacamoleStatus . SERVER_ERROR ) ; logger . debug ( "Write failed for intercepted stream." , e ) ; } // Instruction was handled purely internally return null ;
public class IndexScanPlanNode { /** * / or at a particular key , possibly compound . */ private String explainSearchKeys ( String [ ] asIndexed , int nCovered ) { } }
// By default , indexing starts at the start of the index . if ( m_searchkeyExpressions . isEmpty ( ) ) { return "start" ; } String conjunction = "" ; String result = "(" ; int prefixSize = nCovered - 1 ; for ( int ii = 0 ; ii < prefixSize ; ++ ii ) { result += conjunction + asIndexed [ ii ] + ( m_compareNotDistinct . get ( ii ) ? " NOT DISTINCT " : " = " ) + m_searchkeyExpressions . get ( ii ) . explain ( getTableNameForExplain ( ) ) ; conjunction = ") AND (" ; } // last element result += conjunction + asIndexed [ prefixSize ] + " " ; if ( m_lookupType == IndexLookupType . EQ && m_compareNotDistinct . get ( prefixSize ) ) { result += "NOT DISTINCT" ; } else { result += m_lookupType . getSymbol ( ) ; } result += " " + m_searchkeyExpressions . get ( prefixSize ) . explain ( getTableNameForExplain ( ) ) ; if ( m_lookupType != IndexLookupType . EQ && m_compareNotDistinct . get ( prefixSize ) ) { result += ", including NULLs" ; } result += ")" ; return result ;
public class CoordinatesUtils { /** * Returns the 3D length of the geometry * @ param geom * @ return */ public static double length3D ( Geometry geom ) { } }
double sum = 0 ; for ( int i = 0 ; i < geom . getNumGeometries ( ) ; i ++ ) { Geometry subGeom = geom . getGeometryN ( i ) ; if ( subGeom instanceof Polygon ) { sum += length3D ( ( Polygon ) subGeom ) ; } else if ( subGeom instanceof LineString ) { sum += length3D ( ( LineString ) subGeom ) ; } } return sum ;
public class MemoryManager { /** * Releases all memory segments for the given owner . * @ param owner The owner memory segments are to be released . */ public void releaseAll ( Object owner ) { } }
if ( owner == null ) { return ; } // - - - - - BEGIN CRITICAL SECTION - - - - - synchronized ( lock ) { if ( isShutDown ) { throw new IllegalStateException ( "Memory manager has been shut down." ) ; } // get all segments final Set < MemorySegment > segments = allocatedSegments . remove ( owner ) ; // all segments may have been freed previously individually if ( segments == null || segments . isEmpty ( ) ) { return ; } // free each segment if ( isPreAllocated ) { for ( MemorySegment seg : segments ) { memoryPool . returnSegmentToPool ( seg ) ; } } else { for ( MemorySegment seg : segments ) { seg . free ( ) ; } numNonAllocatedPages += segments . size ( ) ; } segments . clear ( ) ; } // - - - - - END CRITICAL SECTION - - - - -
public class AbstractCommandLineRunner { /** * Creates JS source code inputs from a list of files . */ @ GwtIncompatible ( "Unnecessary" ) private List < SourceFile > createSourceInputs ( List < JsModuleSpec > jsModuleSpecs , List < FlagEntry < JsSourceType > > files , List < JsonFileSpec > jsonFiles , List < String > moduleRoots ) throws IOException { } }
if ( isInTestMode ( ) ) { return inputsSupplierForTesting != null ? inputsSupplierForTesting . get ( ) : null ; } if ( files . isEmpty ( ) && jsonFiles == null ) { // Request to read from stdin . files = ImmutableList . of ( new FlagEntry < JsSourceType > ( JsSourceType . JS , "-" ) ) ; } for ( JSError error : deduplicateIjsFiles ( files , moduleRoots , ! jsModuleSpecs . isEmpty ( ) ) ) { compiler . report ( error ) ; } try { if ( jsonFiles != null ) { return createInputs ( files , jsonFiles , jsModuleSpecs ) ; } else { return createInputs ( files , true , jsModuleSpecs ) ; } } catch ( FlagUsageException e ) { throw new FlagUsageException ( "Bad --js flag. " + e . getMessage ( ) ) ; }
public class JolokiaHttpHandler { /** * Used for checking origin or referer is an origin policy is enabled */ private String extractOriginOrReferer ( HttpExchange pExchange ) { } }
Headers headers = pExchange . getRequestHeaders ( ) ; String origin = headers . getFirst ( "Origin" ) ; if ( origin == null ) { origin = headers . getFirst ( "Referer" ) ; } return origin != null ? origin . replaceAll ( "[\\n\\r]*" , "" ) : null ;
public class FieldDescriptor { /** * @ see XmlCapable # toXML ( ) */ public String toXML ( ) { } }
RepositoryTags tags = RepositoryTags . getInstance ( ) ; String eol = SystemUtils . LINE_SEPARATOR ; // opening tag + attributes StringBuffer result = new StringBuffer ( 1024 ) ; result . append ( " " ) ; result . append ( tags . getOpeningTagNonClosingById ( FIELD_DESCRIPTOR ) ) ; result . append ( " " ) ; result . append ( eol ) ; // / / id // String id = new Integer ( getColNo ( ) ) . toString ( ) ; // result + = / * " " + * / tags . getAttribute ( ID , id ) + eol ; // name result . append ( " " ) ; result . append ( tags . getAttribute ( FIELD_NAME , this . getAttributeName ( ) ) ) ; result . append ( eol ) ; // table not yet implemented // column result . append ( " " ) ; result . append ( tags . getAttribute ( COLUMN_NAME , this . getColumnName ( ) ) ) ; result . append ( eol ) ; // jdbc - type result . append ( " " ) ; result . append ( tags . getAttribute ( JDBC_TYPE , this . getColumnType ( ) ) ) ; result . append ( eol ) ; // primarykey if ( this . isPrimaryKey ( ) ) { result . append ( " " ) ; result . append ( tags . getAttribute ( PRIMARY_KEY , "true" ) ) ; result . append ( eol ) ; } // nullable if ( this . isRequired ( ) ) { result . append ( " " ) ; result . append ( tags . getAttribute ( NULLABLE , "false" ) ) ; result . append ( eol ) ; } // indexed not yet implemented // autoincrement if ( this . isAutoIncrement ( ) ) { result . append ( " " ) ; result . append ( tags . getAttribute ( AUTO_INCREMENT , "true" ) ) ; result . append ( eol ) ; } // locking if ( this . isLocking ( ) ) { result . append ( " " ) ; result . append ( tags . getAttribute ( LOCKING , "true" ) ) ; result . append ( eol ) ; } // updateLock // default is true so only write if false if ( ! this . isUpdateLock ( ) ) { result . append ( " " ) ; result . append ( tags . getAttribute ( UPDATE_LOCK , "false" ) ) ; result . append ( eol ) ; } // default - fetch not yet implemented // conversion if ( this . getFieldConversion ( ) . getClass ( ) != FieldConversionDefaultImpl . class ) { result . append ( " " ) ; result . append ( tags . getAttribute ( FIELD_CONVERSION , getFieldConversion ( ) . getClass ( ) . getName ( ) ) ) ; result . append ( eol ) ; } // length if ( this . isLengthSpecified ( ) ) { result . append ( " " ) ; result . append ( tags . getAttribute ( LENGTH , "" + getLength ( ) ) ) ; result . append ( eol ) ; } // precision if ( this . isPrecisionSpecified ( ) ) { result . append ( " " ) ; result . append ( tags . getAttribute ( PRECISION , "" + getPrecision ( ) ) ) ; result . append ( eol ) ; } // scale if ( this . isScaleSpecified ( ) ) { result . append ( " " ) ; result . append ( tags . getAttribute ( SCALE , "" + getScale ( ) ) ) ; result . append ( eol ) ; } // access result . append ( " " ) ; result . append ( tags . getAttribute ( ACCESS , this . getAccess ( ) ) ) ; result . append ( eol ) ; result . append ( " />" ) ; result . append ( eol ) ; return result . toString ( ) ;
public class MultiMetaDataRequest { /** * Append any specified parameters to the provided WebTarget . * @ param webTarget a web target used by the Jersey Client API , not null * @ return the WebTarget with any path and query parameters appended , not null */ public WebTarget appendPathAndQueryParameters ( final WebTarget webTarget ) { } }
ArgumentChecker . notNull ( webTarget , "webTarget" ) ; WebTarget resultTarget = webTarget ; resultTarget = resultTarget . path ( MULTI_SET_NAME + EXTENSION ) ; resultTarget = resultTarget . queryParam ( COLUMNS_PARAM , buildCodeList ( _quandlCodes ) ) ; // This is a hack that stops Quandl from returning all the data as part of the query resultTarget = resultTarget . queryParam ( EXCLUDE_DATA_PARAM , INFINITE_FUTURE ) ; return resultTarget ;
public class PApplicationException { /** * The application exception type . * @ return Optional of the < code > id < / code > field value . */ @ javax . annotation . Nonnull public java . util . Optional < net . morimekta . providence . PApplicationExceptionType > optionalId ( ) { } }
return java . util . Optional . ofNullable ( mId ) ;
public class DataTableCore { /** * Optional parameter defining which rows are selected when the datatable is initially rendered . If this attribute is an integer , it ' s the row index . If it ' s a string , it ' s a jQuery expression . If it ' s another object , it ' s compared to the loop var . Automatically sets selection = ' true ' and selected - items = ' row ' . < P > * @ return Returns the value of the attribute , or null , if it hasn ' t been set by the JSF file . */ public java . lang . Object getSelectedRow ( ) { } }
return ( java . lang . Object ) getStateHelper ( ) . eval ( PropertyKeys . selectedRow ) ;
public class WorkerConfigurationUtils { /** * 10 percent of Xmx */ public Pair < Integer , Integer > getMinSizeAndSizeOfInBuffer ( int executionThreadsCount ) { } }
// 20 % percent of executionThreadsCount , but bigger than 1 int defaultMinInBufferSize = Math . max ( 1 , executionThreadsCount / 5 ) ; int minInBufferSizeLocal = getInteger ( WORKER_INBUFFER_MIN_SIZE , defaultMinInBufferSize ) ; int minInBufferSize = ( minInBufferSizeLocal > 0 ) ? minInBufferSizeLocal : defaultMinInBufferSize ; int defaultNewInBufferSize = ( executionThreadsCount == 1 ) ? 2 : ( ( 3 * executionThreadsCount ) / 2 ) ; int newInBufferSizeLocal = getInteger ( WORKER_INBUFFER_SIZE , defaultNewInBufferSize ) ; int newInBufferSize = ( newInBufferSizeLocal > minInBufferSize ) ? newInBufferSizeLocal : defaultNewInBufferSize ; if ( newInBufferSize <= minInBufferSize ) { throw new IllegalStateException ( format ( "Value of property \"%s\" must be greater than the value of property \"%s\"." , WORKER_INBUFFER_SIZE , WORKER_INBUFFER_MIN_SIZE ) ) ; } return new ImmutablePair < > ( minInBufferSize , newInBufferSize ) ;
public class ExportImportHelper { public void exportBySql ( String name , List < ? extends Map < String , Object > > rows ) { } }
dataSets . put ( name , rows ) ; logger . info ( "Exported: name=[{}] count=[{}]" , name , rows . size ( ) ) ;
public class CommonDatabaseMetaData { /** * Retrieves a description of the stored procedures available in the given catalog . * Only procedure descriptions matching the schema and procedure name criteria are returned . They are ordered by * < code > PROCEDURE _ CAT < / code > , < code > PROCEDURE _ SCHEM < / code > , < code > PROCEDURE _ NAME < / code > and < code > SPECIFIC _ * NAME < / code > . * < P > Each procedure description has the the following columns : < OL > < LI > < B > PROCEDURE _ CAT < / B > String = > procedure * catalog ( may be < code > null < / code > ) < LI > < B > PROCEDURE _ SCHEM < / B > String = > procedure schema ( may be * < code > null < / code > ) < LI > < B > PROCEDURE _ NAME < / B > String = > procedure name < LI > reserved for future use < LI > reserved * for future use < LI > reserved for future use < LI > < B > REMARKS < / B > String = > explanatory comment on the procedure * < LI > < B > PROCEDURE _ TYPE < / B > short = > kind of procedure : < UL > < LI > procedureResultUnknown - Cannot determine if a * return value will be returned < LI > procedureNoResult - Does not return a return value < LI > procedureReturnsResult * - Returns a return value < / UL > < LI > < B > SPECIFIC _ NAME < / B > String = > The name which uniquely identifies this * procedure within its schema . < / OL > * A user may not have permissions to execute any of the procedures that are returned by < code > getProcedures < / code > * @ param catalog a catalog name ; must match the catalog name as it is stored in the database ; " " * retrieves those without a catalog ; < code > null < / code > means that the catalog name * should not be used to narrow the search * @ param schemaPattern a schema name pattern ; must match the schema name as it is stored in the database ; " " * retrieves those without a schema ; < code > null < / code > means that the schema name should * not be used to narrow the search * @ param procedureNamePattern a procedure name pattern ; must match the procedure name as it is stored in the * database * @ return < code > ResultSet < / code > - each row is a procedure description * @ throws java . sql . SQLException if a database access error occurs * @ see # getSearchStringEscape */ public ResultSet getProcedures ( final String catalog , final String schemaPattern , final String procedureNamePattern ) throws SQLException { } }
log . info ( "getting empty result set, procedures" ) ; return getEmptyResultSet ( ) ;
public class RecurlyClient { /** * Lookup a transaction * @ param transactionId recurly transaction id * @ return the transaction if found , null otherwise */ public Transaction getTransaction ( final String transactionId ) { } }
if ( transactionId == null || transactionId . isEmpty ( ) ) throw new RuntimeException ( "transactionId cannot be empty!" ) ; return doGET ( Transactions . TRANSACTIONS_RESOURCE + "/" + transactionId , Transaction . class ) ;
public class ErrorFactory { /** * Creates a DecodingException object . * @ param errorId * reference to the error identifier * @ param message * additional message * @ return DecodingException */ public static DecodingException createDecodingException ( final ErrorKeys errorId , final String message ) { } }
return new DecodingException ( errorId . toString ( ) + ":\r\n" + message ) ;
public class WeakSet { /** * Puts the specified value in the set . * @ param value * the value . * @ return the value of any previous put or { @ code null } if there was no such value . */ public T put ( final T value ) { } }
expungeStaleEntries ( ) ; final int hash = value . hashCode ( ) ; int index = ( hash & 0x7FFFFFFF ) % elementData . length ; Entry < T > entry = elementData [ index ] ; while ( entry != null && ! eq ( value , entry . get ( ) ) ) { entry = entry . nextInSlot ; } if ( entry == null ) { if ( ++ elementCount > threshold ) { expandElementArray ( elementData . length ) ; index = ( hash & 0x7FFFFFFF ) % elementData . length ; } entry = createHashedEntry ( value , index ) ; return null ; } final T result = entry . get ( ) ; return result ;
public class CodeGenerator { /** * Loads all CLDR data and invokes the code generators for each data type . Output * is a series of Java classes under the outputDir . */ public static void generate ( Path outputDir ) throws IOException { } }
DataReader reader = DataReader . get ( ) ; CalendarCodeGenerator datetimeGenerator = new CalendarCodeGenerator ( ) ; Map < LocaleID , ClassName > dateClasses = datetimeGenerator . generate ( outputDir , reader ) ; PluralCodeGenerator pluralGenerator = new PluralCodeGenerator ( ) ; pluralGenerator . generate ( outputDir , reader ) ; NumberCodeGenerator numberGenerator = new NumberCodeGenerator ( ) ; Map < LocaleID , ClassName > numberClasses = numberGenerator . generate ( outputDir , reader ) ; LanguageCodeGenerator languageGenerator = new LanguageCodeGenerator ( ) ; languageGenerator . generate ( outputDir , reader ) ; MethodSpec registerCalendars = indexFormatters ( "registerCalendars" , "registerCalendarFormatter" , dateClasses ) ; MethodSpec registerNumbers = indexFormatters ( "registerNumbers" , "registerNumberFormatter" , numberClasses ) ; MethodSpec constructor = MethodSpec . constructorBuilder ( ) . addModifiers ( PRIVATE ) . build ( ) ; FieldSpec instance = FieldSpec . builder ( CLDR , "instance" , PRIVATE , STATIC , FINAL ) . build ( ) ; MethodSpec getter = MethodSpec . methodBuilder ( "get" ) . addModifiers ( PUBLIC , STATIC ) . returns ( CLDR ) . addStatement ( "return instance" ) . build ( ) ; TypeSpec . Builder type = TypeSpec . classBuilder ( "CLDR" ) . addModifiers ( PUBLIC ) . superclass ( CLDR_BASE ) . addMethod ( constructor ) . addMethod ( getter ) . addMethod ( registerCalendars ) . addMethod ( registerNumbers ) ; Set < LocaleID > availableLocales = reader . availableLocales ( ) . stream ( ) . map ( s -> LocaleID . parse ( s ) ) . collect ( Collectors . toSet ( ) ) ; createLocales ( type , reader . defaultContent ( ) , availableLocales ) ; createLanguageAliases ( type , reader . languageAliases ( ) ) ; createTerritoryAliases ( type , reader . territoryAliases ( ) ) ; createLikelySubtags ( type , reader . likelySubtags ( ) ) ; createCurrencies ( type , numberGenerator . getCurrencies ( reader ) ) ; addPluralRules ( type ) ; // Initialize all static maps . type . addStaticBlock ( CodeBlock . builder ( ) . addStatement ( "registerCalendars()" ) . addStatement ( "registerNumbers()" ) . addStatement ( "registerDefaultContent()" ) . addStatement ( "registerLanguageAliases()" ) . addStatement ( "registerTerritoryAliases()" ) . addStatement ( "registerLikelySubtags()" ) . addStatement ( "instance = new CLDR()" ) . build ( ) ) ; type . addField ( instance ) ; saveClass ( outputDir , PACKAGE_CLDR , "CLDR" , type . build ( ) ) ;
public class HeapDisk { /** * Frees the last { @ code count } blocks from the given file . */ public synchronized void free ( RegularFile file , int count ) { } }
int remainingCacheSpace = maxCachedBlockCount - blockCache . blockCount ( ) ; if ( remainingCacheSpace > 0 ) { file . copyBlocksTo ( blockCache , Math . min ( count , remainingCacheSpace ) ) ; } file . truncateBlocks ( file . blockCount ( ) - count ) ; allocatedBlockCount -= count ;
public class DefaultGrailsDomainClassInjector { /** * Tests whether the ClassNode implements the specified method name * @ param classNode The ClassNode * @ param methodName The method name * @ param argTypes * @ return True if it implements the method */ private static boolean implementsMethod ( ClassNode classNode , String methodName , Class [ ] argTypes ) { } }
List methods = classNode . getMethods ( ) ; if ( argTypes == null || argTypes . length == 0 ) { for ( Iterator i = methods . iterator ( ) ; i . hasNext ( ) ; ) { MethodNode mn = ( MethodNode ) i . next ( ) ; boolean methodMatch = mn . getName ( ) . equals ( methodName ) ; if ( methodMatch ) return true ; // TODO Implement further parameter analysis } } return false ;
public class DocumentAndOp { /** * Set the instance to be an update operation . * @ param doc * @ param term */ public void setUpdate ( Document doc , Term term ) { } }
this . op = Op . UPDATE ; this . doc = doc ; this . term = term ;
public class AmazonIdentityManagementClient { /** * Updates the password policy settings for the AWS account . * < note > * < ul > * < li > * This operation does not support partial updates . No parameters are required , but if you do not specify a * parameter , that parameter ' s value reverts to its default value . See the < b > Request Parameters < / b > section for * each parameter ' s default value . Also note that some parameters do not allow the default parameter to be * explicitly set . Instead , to invoke the default value , do not include that parameter when you invoke the * operation . * < / li > * < / ul > * < / note > * For more information about using a password policy , see < a * href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / Using _ ManagingPasswordPolicies . html " > Managing an IAM * Password Policy < / a > in the < i > IAM User Guide < / i > . * @ param updateAccountPasswordPolicyRequest * @ return Result of the UpdateAccountPasswordPolicy operation returned by the service . * @ throws NoSuchEntityException * The request was rejected because it referenced a resource entity that does not exist . The error message * describes the resource . * @ throws MalformedPolicyDocumentException * The request was rejected because the policy document was malformed . The error message describes the * specific error . * @ throws LimitExceededException * The request was rejected because it attempted to create resources beyond the current AWS account limits . * The error message describes the limit exceeded . * @ throws ServiceFailureException * The request processing has failed because of an unknown error , exception or failure . * @ sample AmazonIdentityManagement . UpdateAccountPasswordPolicy * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / UpdateAccountPasswordPolicy " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdateAccountPasswordPolicyResult updateAccountPasswordPolicy ( UpdateAccountPasswordPolicyRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateAccountPasswordPolicy ( request ) ;
public class MkCoPTree { /** * Creates a new directory entry representing the specified node . * @ param node the node to be represented by the new entry * @ param routingObjectID the id of the routing object of the node * @ param parentDistance the distance from the routing object of the node to * the routing object of the parent node */ @ Override protected MkCoPEntry createNewDirectoryEntry ( MkCoPTreeNode < O > node , DBID routingObjectID , double parentDistance ) { } }
return new MkCoPDirectoryEntry ( routingObjectID , parentDistance , node . getPageID ( ) , node . coveringRadiusFromEntries ( routingObjectID , this ) , null ) ; // node . conservativeKnnDistanceApproximation ( k _ max ) ) ;
public class PooledService { /** * Helper method to ensure a minimum number of endpoints is enabled . */ private void ensureMinimum ( ) { } }
int belowMin = minEndpoints - endpoints . size ( ) ; if ( belowMin > 0 ) { LOGGER . debug ( logIdent ( hostname , this ) + "Service is {} below minimum, filling up." , belowMin ) ; synchronized ( epMutex ) { for ( int i = 0 ; i < belowMin ; i ++ ) { Endpoint endpoint = endpointFactory . create ( hostname , bucket , username , password , port , ctx ) ; endpoints . add ( endpoint ) ; endpointStates . register ( endpoint , endpoint ) ; endpoint . connect ( ) . subscribe ( new Subscriber < LifecycleState > ( ) { @ Override public void onCompleted ( ) { /* ignored on purpose */ } @ Override public void onError ( Throwable e ) { LOGGER . warn ( logIdent ( hostname , PooledService . this ) + "Got an error while connecting endpoint!" , e ) ; } @ Override public void onNext ( LifecycleState state ) { /* ignored on purpose */ } } ) ; } LOGGER . debug ( logIdent ( hostname , PooledService . this ) + "New number of endpoints is {}" , endpoints . size ( ) ) ; } }
public class ApacheHttpTransport { /** * Returns a new instance of the default HTTP parameters we use . */ static HttpParams newDefaultHttpParams ( ) { } }
HttpParams params = new BasicHttpParams ( ) ; // Turn off stale checking . Our connections break all the time anyway , // and it ' s not worth it to pay the penalty of checking every time . HttpConnectionParams . setStaleCheckingEnabled ( params , false ) ; HttpConnectionParams . setSocketBufferSize ( params , 8192 ) ; ConnManagerParams . setMaxTotalConnections ( params , 200 ) ; ConnManagerParams . setMaxConnectionsPerRoute ( params , new ConnPerRouteBean ( 20 ) ) ; return params ;
public class BlogPreProcessor { /** * Given a blog file , read through each line and extract the content and * updated date , printing these as one line to the result file . */ public void processFile ( File blogFile ) throws IOException { } }
BufferedReader br = new BufferedReader ( new FileReader ( blogFile ) ) ; String line = null ; String date = null ; String id = null ; StringBuilder content = new StringBuilder ( ) ; boolean needMoreContent = false ; while ( ( line = br . readLine ( ) ) != null ) { if ( line . contains ( "<id>" ) ) { int startIndex = line . indexOf ( ">" ) + 1 ; int endIndex = line . lastIndexOf ( "<" ) ; id = line . substring ( startIndex , endIndex ) ; } else if ( line . contains ( "<content>" ) ) { // Extract the start of a content node . If the previous content , // updated pair was incomplete , i . e . updated had no value , this will // overwrite the previous content value . int startIndex = line . indexOf ( ">" ) + 1 ; int endIndex = line . lastIndexOf ( "<" ) ; content = new StringBuilder ( ) ; if ( endIndex > startIndex ) content . append ( line . substring ( startIndex , endIndex ) ) ; else { content . append ( line . substring ( startIndex ) ) ; needMoreContent = true ; } } else if ( needMoreContent ) { // The content node might span several lines , so consider all lines read // until the next close bracket to be part of the current content . int endIndex = ( line . contains ( "</content>" ) ) ? line . lastIndexOf ( "<" ) : - 1 ; if ( endIndex > 0 ) { content . append ( line . substring ( 0 , endIndex ) ) ; needMoreContent = false ; } else content . append ( line ) ; } else if ( line . contains ( "<updated>" ) ) { // The updated timestamp only spans one line . int startIndex = line . indexOf ( ">" ) + 1 ; int endIndex = line . lastIndexOf ( "<" ) ; date = line . substring ( startIndex , endIndex ) ; if ( date . equals ( "" ) ) date = null ; } else if ( content != null && date != null ) { // Cleand and print out the content and date . long dateTime = Timestamp . valueOf ( date ) . getTime ( ) ; if ( dateTime < beginTime || dateTime > endTime ) { needMoreContent = false ; date = null ; continue ; } String cleanedContent = processor . process ( content . toString ( ) ) ; if ( ! cleanedContent . equals ( "" ) ) { synchronized ( pw ) { pw . format ( "%d %s\n" , dateTime , cleanedContent ) ; pw . flush ( ) ; } } LOGGER . info ( String . format ( "Processed blog %s with timestamp %d" , id , dateTime ) ) ; needMoreContent = false ; date = null ; } } br . close ( ) ;
public class Scene { /** * Iterates over the list of { @ link Layer } and draws them all . */ @ Override public final Scene draw ( ) { } }
final NFastArrayList < Layer > layers = getChildNodes ( ) ; if ( null != layers ) { final int size = layers . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { final Layer layer = layers . get ( i ) ; if ( null != layer ) { layer . draw ( ) ; } } } return this ;
public class ExceptionDestinationHandlerImpl { /** * Wrapper method for handleUndeliverableMessage . This version will be called * from an external component via the * com . ibm . ws . sib . processor . ExceptionDestinationHandler interface . E . g . we * need to access this routine from the MQLink in the comms component . * @ param msg - The undeliverable message * @ param tran - The transaction that the message was delivered under * @ param exceptionReason - The reason why the message could not be delivered * @ param exceptionStrings - A list of inserts to place into an error message * @ return A code indicating what we did with the message */ @ Override public UndeliverableReturnCode handleUndeliverableMessage ( SIBusMessage msg , String alternateUser , TransactionCommon tran , int exceptionReason , String [ ] exceptionStrings ) { } }
// F001333-14610 // Delegate down onto the new method passing a null // subscription ID . return handleUndeliverableMessage ( msg , alternateUser , tran , exceptionReason , exceptionStrings , null ) ;
public class ConfigurationContext { /** * Register bundles resolved by lookup mechanism . { @ link GuiceyBundleLookup } used as context . * @ param bundles bundles resolved by lookup mechanism * @ see GuiceyBundleLookup */ public void registerLookupBundles ( final List < GuiceyBundle > bundles ) { } }
setScope ( ConfigScope . BundleLookup . getType ( ) ) ; for ( GuiceyBundle bundle : bundles ) { register ( ConfigItem . Bundle , bundle ) ; } closeScope ( ) ; lifecycle ( ) . bundlesFromLookupResolved ( bundles ) ; lifecycle ( ) . bundlesResolved ( getEnabledBundles ( ) , getDisabledBundles ( ) ) ;
public class ZipUtils { /** * Replace the file denoted by the zipFile with the provided data . The zipFile specifies * both the zip and the file inside the zip using ' ! ' as separator . * @ param zipFile The zip - file to process * @ param data The string - data to replace * @ param encoding The encoding that should be used when writing the string data to the file * @ throws IOException Thrown if files can not be read or any other error occurs while handling the Zip - files */ public static void replaceInZip ( String zipFile , String data , String encoding ) throws IOException { } }
if ( ! isFileInZip ( zipFile ) ) { throw new IOException ( "Parameter should specify a file inside a ZIP file, but had: " + zipFile ) ; } File zip = new File ( zipFile . substring ( 0 , zipFile . indexOf ( ZIP_DELIMITER ) ) ) ; String zipOut = zipFile . substring ( zipFile . indexOf ( ZIP_DELIMITER ) + 1 ) ; logger . info ( "Updating containing Zip " + zip + " to " + zipOut ) ; // replace in zip ZipUtils . replaceInZip ( zip , zipOut , data , encoding ) ;
public class RequireJavaVersion { /** * ( non - Javadoc ) * @ see org . apache . maven . enforcer . rule . api . EnforcerRule # execute ( org . apache . maven . enforcer . rule . api . EnforcerRuleHelper ) */ public void execute ( EnforcerRuleHelper helper ) throws EnforcerRuleException { } }
String javaVersion = SystemUtils . JAVA_VERSION_TRIMMED ; Log log = helper . getLog ( ) ; log . debug ( "Detected Java String: " + javaVersion ) ; javaVersion = normalizeJDKVersion ( javaVersion ) ; log . debug ( "Normalized Java String: " + javaVersion ) ; ArtifactVersion detectedJdkVersion = new DefaultArtifactVersion ( javaVersion ) ; log . debug ( "Parsed Version: Major: " + detectedJdkVersion . getMajorVersion ( ) + " Minor: " + detectedJdkVersion . getMinorVersion ( ) + " Incremental: " + detectedJdkVersion . getIncrementalVersion ( ) + " Build: " + detectedJdkVersion . getBuildNumber ( ) + " Qualifier: " + detectedJdkVersion . getQualifier ( ) ) ; enforceVersion ( helper . getLog ( ) , "JDK" , getVersion ( ) , detectedJdkVersion ) ;
public class YggdrasilAuthenticator { /** * Creates a < code > YggdrasilAuthenticator < / code > with a customized * { @ link AuthenticationService } and the given client token , and initializes * it with password . * @ param service the customized { @ link AuthenticationService } * @ param passwordProvider the password provider * @ param clientToken the client token * @ return a YggdrasilAuthenticator * @ throws AuthenticationException If an exception occurs during the * authentication */ public static YggdrasilAuthenticator password ( AuthenticationService service , PasswordProvider passwordProvider , String clientToken ) throws AuthenticationException { } }
Objects . requireNonNull ( service ) ; Objects . requireNonNull ( passwordProvider ) ; Objects . requireNonNull ( clientToken ) ; YggdrasilAuthenticator auth = new YggdrasilAuthenticator ( service ) ; auth . refreshWithPassword ( passwordProvider ) ; return auth ;
public class HttpDirectoryService { /** * Checks if the first pattern can be included in the second one and resolves directive conflicts if needed * @ param patterns * @ param specificPattern * @ param generalPattern */ private void checkPatternMatching ( Map < String , PatternCacheControl > patterns , String specificPattern , String generalPattern ) { } }
if ( PatternMatcherUtils . caseInsensitiveMatch ( specificPattern , generalPattern ) ) { PatternCacheControl specificPatternDirective = patterns . get ( specificPattern ) ; PatternCacheControl generalPatternDirective = patterns . get ( generalPattern ) ; generalPatternDirective . incrementMatchingPatternCount ( ) ; ConflictResolverUtils . resolveConflicts ( specificPatternDirective , generalPatternDirective ) ; }
public class D6CrudHelperBase { /** * Returns Set of columName of model class * @ return */ final Set < String > getAllColumnNames ( ) { } }
final Set < String > columnNameSet = mColumnNameFieldInfoMap . keySet ( ) ; // Returns clone of columnNameSet because { @ see D6Inex } directly // manipulats ( almost delete ) columnNameSet , so it effects // mColumnNameFieldInfoMap . // So , prevent original columnNameSet from getting edit . return new LinkedHashSet < String > ( columnNameSet ) ;
public class ClassPathBuilder { /** * Scan given codebase in order to * < ul > * < li > check the codebase for nested archives ( adding any found to the * worklist ) * < li > build a list of class resources found in the codebase * < / ul > * @ param workList * the worklist * @ param discoveredCodeBase * the codebase to scan * @ throws InterruptedException */ private void scanCodebase ( IClassPath classPath , LinkedList < WorkListItem > workList , DiscoveredCodeBase discoveredCodeBase ) throws InterruptedException { } }
if ( DEBUG ) { System . out . println ( "Scanning " + discoveredCodeBase . getCodeBase ( ) . getCodeBaseLocator ( ) ) ; } IScannableCodeBase codeBase = ( IScannableCodeBase ) discoveredCodeBase . getCodeBase ( ) ; ICodeBaseIterator i = codeBase . iterator ( ) ; while ( i . hasNext ( ) ) { ICodeBaseEntry entry = i . next ( ) ; if ( VERBOSE ) { System . out . println ( "Entry: " + entry . getResourceName ( ) ) ; } if ( ! NO_PARSE_CLASS_NAMES && codeBase . isApplicationCodeBase ( ) && DescriptorFactory . isClassResource ( entry . getResourceName ( ) ) && ! ( entry instanceof SingleFileCodeBaseEntry ) ) { parseClassName ( entry ) ; } // Note the resource exists in this codebase discoveredCodeBase . addCodeBaseEntry ( entry ) ; // If resource is a nested archive , add it to the worklist if ( scanNestedArchives && ( codeBase . isApplicationCodeBase ( ) || codeBase instanceof DirectoryCodeBase ) && Archive . isLibraryFileName ( entry . getResourceName ( ) ) ) { if ( VERBOSE ) { System . out . println ( "Entry is an library!" ) ; } ICodeBaseLocator nestedArchiveLocator = classFactory . createNestedArchiveCodeBaseLocator ( codeBase , entry . getResourceName ( ) ) ; addToWorkList ( workList , new WorkListItem ( nestedArchiveLocator , codeBase . isApplicationCodeBase ( ) , ICodeBase . Discovered . NESTED ) ) ; } }
public class PoolManager { /** * Can we take slots from this pool when preempting tasks ? */ public boolean canBePreempted ( String pool ) { } }
Boolean result = canBePreempted . get ( pool ) ; return result == null ? true : result ; // Default is true
public class ProfilePoint { /** * Return last visible point data for a profile points list . * < p > For the profile the min and max angles of " sight " are * calculated . The min azimuth angle represents the " upper " * line of sight , as thoght from the zenith . * < p > The max azimuth angle represents the " below the earth " line * of sight ( think of a viewer looking in direction nadir ) . * < p > The return values are in an array of doubles containing : * < ul > * < li > [ 0 ] min point elev , < / li > * < li > [ 1 ] min point x , < / li > * < li > [ 2 ] min point y , < / li > * < li > [ 3 ] min point progressive , < / li > * < li > [ 4 ] min point azimuth , < / li > * < li > [ 5 ] max point elev , < / li > * < li > [ 6 ] max point x , < / li > * < li > [ 7 ] max point y , < / li > * < li > [ 8 ] max point progressive , < / li > * < li > [ 9 ] max point azimuth < / li > * < / ul > * @ param profile the profile to analize . * @ return the last visible point parameters . */ public static double [ ] getLastVisiblePointData ( List < ProfilePoint > profile ) { } }
if ( profile . size ( ) < 2 ) { throw new IllegalArgumentException ( "A profile needs to have at least 2 points." ) ; } ProfilePoint first = profile . get ( 0 ) ; double baseElev = first . getElevation ( ) ; Coordinate baseCoord = new Coordinate ( 0 , 0 ) ; double minAzimuthAngle = Double . POSITIVE_INFINITY ; double maxAzimuthAngle = Double . NEGATIVE_INFINITY ; ProfilePoint minAzimuthPoint = null ; ProfilePoint maxAzimuthPoint = null ; for ( int i = 1 ; i < profile . size ( ) ; i ++ ) { ProfilePoint currentPoint = profile . get ( i ) ; double currentElev = currentPoint . getElevation ( ) ; if ( HMConstants . isNovalue ( currentElev ) ) { continue ; } currentElev = currentElev - baseElev ; double currentProg = currentPoint . getProgressive ( ) ; Coordinate currentCoord = new Coordinate ( currentProg , currentElev ) ; double azimuth = GeometryUtilities . azimuth ( baseCoord , currentCoord ) ; if ( azimuth <= minAzimuthAngle ) { minAzimuthAngle = azimuth ; minAzimuthPoint = currentPoint ; } if ( azimuth >= maxAzimuthAngle ) { maxAzimuthAngle = azimuth ; maxAzimuthPoint = currentPoint ; } } if ( minAzimuthPoint == null || maxAzimuthPoint == null ) { return null ; } return new double [ ] { minAzimuthPoint . elevation , minAzimuthPoint . position . x , minAzimuthPoint . position . y , minAzimuthPoint . progressive , minAzimuthAngle , maxAzimuthPoint . elevation , maxAzimuthPoint . position . x , maxAzimuthPoint . position . y , maxAzimuthPoint . progressive , maxAzimuthAngle , } ;
public class GVRKUms { /** * Gibt alle Transaktionsdatensätze in einer " flachen " Struktur zurück . * D . h . nicht in einzelne Buchungstage unterteilt , sondern in einer Liste * analog zu einem " normalen " Kontoauszug . * @ return Liste mit Transaktionsdaten ( { @ link GVRKUms . UmsLine } ) */ public List < UmsLine > getFlatData ( ) { } }
verifyMT94xParsing ( "getFlatData()" ) ; List < UmsLine > result = new ArrayList < UmsLine > ( ) ; for ( Iterator < BTag > i = tageMT940 . iterator ( ) ; i . hasNext ( ) ; ) { BTag tag = i . next ( ) ; result . addAll ( tag . lines ) ; } return result ;
public class HttpGetRequester { /** * Gets rest api response as object . * @ param < T > the type parameter * @ param uri the uri * @ param typeReference the type reference * @ return the rest api response as object */ public static < T > T getRestApiResponseAsObject ( String uri , TypeReference < T > typeReference ) { } }
return JMJson . withJsonResource ( getResponseAsString ( uri ) , typeReference ) ;
public class ModelsImpl { /** * Get one entity role for a given entity . * @ param appId The application ID . * @ param versionId The version ID . * @ param cEntityId The composite entity extractor ID . * @ param roleId entity role ID . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the EntityRole object if successful . */ public EntityRole getCompositeEntityRole ( UUID appId , String versionId , UUID cEntityId , UUID roleId ) { } }
return getCompositeEntityRoleWithServiceResponseAsync ( appId , versionId , cEntityId , roleId ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ArgumentsBuilder { /** * Converts profile options into corresponding arguments ( - - profiles ) . * @ param profiles profile options * @ return converted Pax Runner collection of arguments */ private static String extractArguments ( final ProfileOption [ ] profiles ) { } }
final StringBuilder argument = new StringBuilder ( ) ; if ( profiles != null && profiles . length > 0 ) { for ( ProfileOption profile : profiles ) { if ( profile != null && profile . getProfile ( ) != null && profile . getProfile ( ) . length ( ) > 0 ) { if ( argument . length ( ) == 0 ) { argument . append ( "--profiles=" ) ; } else { argument . append ( "," ) ; } argument . append ( profile . getProfile ( ) ) ; } } } return argument . toString ( ) ;
public class LeaderBoardItem { /** * * * * * * Resizing * * * * * */ private void resize ( ) { } }
width = getWidth ( ) - getInsets ( ) . getLeft ( ) - getInsets ( ) . getRight ( ) ; height = getHeight ( ) - getInsets ( ) . getTop ( ) - getInsets ( ) . getBottom ( ) ; size = parentWidth < parentHeight ? parentWidth : parentHeight ; if ( ASPECT_RATIO * width > height ) { width = 1 / ( ASPECT_RATIO / height ) ; } else if ( 1 / ( ASPECT_RATIO / height ) > width ) { height = ASPECT_RATIO * width ; } if ( width > 0 && height > 0 ) { // pane . setMaxSize ( width , height ) ; // pane . setPrefSize ( width , height ) ; pane . setMaxSize ( parentWidth , height * 0.12 ) ; pane . setPrefSize ( parentWidth , height * 0.12 ) ; drawTriangle ( ) ; triangle . setLayoutX ( size * 0.05 ) ; triangle . setLayoutY ( ( height - triangle . getBoundsInLocal ( ) . getHeight ( ) ) * 0.25 ) ; nameText . setFont ( Fonts . latoRegular ( size * 0.06 ) ) ; nameText . setX ( size * 0.12 ) ; nameText . setY ( 0 ) ; valueText . setFont ( Fonts . latoRegular ( size * 0.06 ) ) ; valueText . setX ( ( parentWidth - size * 0.05 ) - valueText . getLayoutBounds ( ) . getWidth ( ) ) ; valueText . setY ( 0 ) ; separator . setStartX ( size * 0.05 ) ; separator . setStartY ( size * 0.1 ) ; separator . setEndX ( parentWidth - size * 0.05 ) ; separator . setEndY ( size * 0.1 ) ; redraw ( ) ; }