signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Authorizer { /** * A list of the Amazon Cognito user pool ARNs for the < code > COGNITO _ USER _ POOLS < / code > authorizer . Each element is * of this format : < code > arn : aws : cognito - idp : { region } : { account _ id } : userpool / { user _ pool _ id } < / code > . For a * < code > TOKEN < / code > or < code > REQUEST < / code > authorizer , this is not defined . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setProviderARNs ( java . util . Collection ) } or { @ link # withProviderARNs ( java . util . Collection ) } if you want to * override the existing values . * @ param providerARNs * A list of the Amazon Cognito user pool ARNs for the < code > COGNITO _ USER _ POOLS < / code > authorizer . Each * element is of this format : < code > arn : aws : cognito - idp : { region } : { account _ id } : userpool / { user _ pool _ id } < / code > . * For a < code > TOKEN < / code > or < code > REQUEST < / code > authorizer , this is not defined . * @ return Returns a reference to this object so that method calls can be chained together . */ public Authorizer withProviderARNs ( String ... providerARNs ) { } }
if ( this . providerARNs == null ) { setProviderARNs ( new java . util . ArrayList < String > ( providerARNs . length ) ) ; } for ( String ele : providerARNs ) { this . providerARNs . add ( ele ) ; } return this ;
public class DefaultProgression { /** * Notify listeners about value change . */ protected void fireValueChange ( ) { } }
if ( this . listeners != null ) { final ProgressionEvent event = new ProgressionEvent ( this , isRootModel ( ) ) ; for ( final ProgressionListener listener : this . listeners . getListeners ( ProgressionListener . class ) ) { listener . onProgressionValueChanged ( event ) ; } }
public class DateTileSkin { /** * * * * * * Initialization * * * * * */ @ Override protected void initGraphics ( ) { } }
super . initGraphics ( ) ; final ZonedDateTime TIME = tile . getTime ( ) ; titleText = new Text ( DAY_FORMATTER . format ( TIME ) ) ; titleText . setFill ( tile . getTitleColor ( ) ) ; description = new Label ( Integer . toString ( TIME . getDayOfMonth ( ) ) ) ; description . setAlignment ( Pos . CENTER ) ; description . setTextAlignment ( TextAlignment . CENTER ) ; description . setWrapText ( true ) ; description . setTextOverrun ( OverrunStyle . WORD_ELLIPSIS ) ; description . setTextFill ( tile . getTextColor ( ) ) ; description . setPrefSize ( PREFERRED_WIDTH * 0.9 , PREFERRED_HEIGHT * 0.72 ) ; description . setFont ( Fonts . latoLight ( PREFERRED_HEIGHT * 0.65 ) ) ; text = new Text ( MONTH_YEAR_FORMATTER . format ( TIME ) ) ; text . setFill ( tile . getTextColor ( ) ) ; getPane ( ) . getChildren ( ) . addAll ( titleText , text , description ) ;
public class AbstractMenu { /** * Check , if the user of the context has access to this user interface * object . First , the instance method checks , if some access configuration * exists for this menu instance object . If the user has access for this * menu , it is test , if the context user has access to minimum one sub * command command / menu . If yes , the user is allowed to access this menu * instance , other the user is not allowed to access this menu . * @ param _ targetMode TargetMode of the Command * @ param _ instance the field will represent , e . g . on edit mode * @ return < i > true < / i > if context user has access , otherwise < i > false < / i > is * returned * @ throws EFapsException on error */ @ Override public boolean hasAccess ( final TargetMode _targetMode , final Instance _instance ) throws EFapsException { } }
boolean ret = super . hasAccess ( _targetMode , _instance ) ; if ( ret && getCommands ( ) . size ( ) > 0 && ! AppAccessHandler . excludeMode ( ) ) { ret = false ; for ( final AbstractCommand cmd : getCommands ( ) ) { if ( cmd . hasAccess ( _targetMode , _instance ) ) { ret = true ; break ; } } } return ret ;
public class JsonOutput { /** * Adds an object if object is not null . * @ param writer * object writer . * @ param field * field writer . * @ param value * value writer . * @ param objWriter * object value writer . * @ throws JSONException * if io error occurs . */ public static < T > void addIfNotNull ( JSONWriter writer , String field , T value , JsonObjectWriter < T > objWriter ) throws JSONException { } }
if ( value == null ) return ; writer . key ( field ) ; writer . object ( ) ; objWriter . write ( writer , value ) ; writer . endObject ( ) ;
public class MediaConfiguration { /** * Setup a Configuration with specified Configuration , AMS account and token provider * @ param configuration The target configuration * @ param apiServer the AMS account uri * @ param azureAdTokenProvider the token provider * @ return the target Configuration */ public static Configuration configureWithAzureAdTokenProvider ( Configuration configuration , URI apiServer , TokenProvider azureAdTokenProvider ) { } }
configuration . setProperty ( AZURE_AD_API_SERVER , apiServer . toString ( ) ) ; configuration . setProperty ( AZURE_AD_TOKEN_PROVIDER , azureAdTokenProvider ) ; return configuration ;
public class CommandLineRunnerMirrorImpl { /** * < p > run . < / p > * @ param args an array of { @ link java . lang . String } objects . * @ throws java . lang . Exception if any . */ public void run ( String [ ] args ) throws Exception { } }
CommandLineRunner runner = new CommandLineRunner ( ) ; CompositeSpecificationRunnerMonitor aggregateMonitor = new CompositeSpecificationRunnerMonitor ( ) ; aggregateMonitor . add ( new AntSpecificationRunnerMonitor ( logger ) ) ; aggregateMonitor . add ( recorderMonitor ) ; runner . setMonitor ( aggregateMonitor ) ; runner . run ( args ) ;
public class DecimalWithUoMType { /** * { @ inheritDoc } */ @ Override protected Object readValue ( final Object _object ) { } }
final BigDecimal ret ; if ( _object instanceof BigDecimal ) { ret = ( BigDecimal ) _object ; } else if ( _object != null ) { ret = new BigDecimal ( _object . toString ( ) ) ; } else { ret = null ; } return ret ;
public class DeviceImpl { /** * Init the device . Calls { @ link Delete } before { @ link Init } * @ throws DevFailed */ @ Command ( name = INIT_CMD ) public synchronized void initCmd ( ) throws DevFailed { } }
xlogger . entry ( ) ; // if init is already in progress , do nothing if ( ! isInitializing ) { isInitializing = true ; isCorrectlyInit . set ( false ) ; deleteDevice ( ) ; doInit ( ) ; try { pushInterfaceChangeEvent ( false ) ; } catch ( final DevFailed e ) { // ignore logger . error ( "error pushing event" , e ) ; } } xlogger . exit ( ) ;
public class RecastMeshDetail { /** * Calculate minimum extend of the polygon . */ private static float polyMinExtent ( float [ ] verts , int nverts ) { } }
float minDist = Float . MAX_VALUE ; for ( int i = 0 ; i < nverts ; i ++ ) { int ni = ( i + 1 ) % nverts ; int p1 = i * 3 ; int p2 = ni * 3 ; float maxEdgeDist = 0 ; for ( int j = 0 ; j < nverts ; j ++ ) { if ( j == i || j == ni ) { continue ; } float d = distancePtSeg2d ( verts , j * 3 , verts , p1 , p2 ) ; maxEdgeDist = Math . max ( maxEdgeDist , d ) ; } minDist = Math . min ( minDist , maxEdgeDist ) ; } return ( float ) Math . sqrt ( minDist ) ;
public class Mapper { /** * Called once for each key / value pair in the input split . Most applications * should override this , but the default is the identity function . */ @ SuppressWarnings ( "unchecked" ) protected void map ( KEYIN key , VALUEIN value , Context context ) throws IOException , InterruptedException { } }
context . write ( ( KEYOUT ) key , ( VALUEOUT ) value ) ;
public class CheckRangeHandler { /** * Set this cloned listener to the same state at this listener . * @ param field The field this new listener will be added to . * @ param The new listener to sync to this . * @ param Has the init method been called ? * @ return True if I called init . */ public boolean syncClonedListener ( BaseField field , FieldListener listener , boolean bInitCalled ) { } }
if ( ! bInitCalled ) ( ( CheckRangeHandler ) listener ) . init ( null , m_dStartRange , m_dEndRange ) ; return super . syncClonedListener ( field , listener , true ) ;
public class BigFileSearcher { /** * Call from each worker thread */ private void onProgress ( final int workerNumber , final int workerSize , final List < Long > pointerList , final float progress ) { } }
if ( progressCache == null ) { progressCache = new ProgressCache ( workerSize , ( onRealtimeResultListener != null ) ) ; } progressCache . setProgress ( workerNumber , progress , pointerList ) ; if ( onProgressListener != null ) { onProgressListener . onProgress ( progressCache . getProgress ( ) ) ; } if ( onRealtimeResultListener != null ) { onRealtimeResultListener . onRealtimeResultListener ( progressCache . getProgress ( ) , progressCache . getResultPointers ( ) ) ; }
public class AdditionalNumberImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . map . api . primitives . MAPAsnPrimitive # encodeData * ( org . mobicents . protocols . asn . AsnOutputStream ) */ public void encodeData ( AsnOutputStream asnOs ) throws MAPException { } }
if ( this . mSCNumber == null && this . sGSNNumber == null ) throw new MAPException ( "Error when encoding " + _PrimitiveName + ": both mscNumber and sgsnNumber must not be null" ) ; if ( this . mSCNumber != null && this . sGSNNumber != null ) throw new MAPException ( "Error when encoding " + _PrimitiveName + ": both mscNumber and sgsnNumber must not be not null" ) ; if ( this . mSCNumber != null ) { ( ( ISDNAddressStringImpl ) this . mSCNumber ) . encodeData ( asnOs ) ; } else { ( ( ISDNAddressStringImpl ) this . sGSNNumber ) . encodeData ( asnOs ) ; }
public class Headers { /** * / * - - - - - [ Internal - Helpers ] - - - - - */ private Header newHeader ( AsciiString name ) { } }
Header header = new Header ( name ) ; if ( first == null ) first = header ; else { header . prev = first . prev ; first . prev . next = header ; first . prev = header ; } return header ;
public class Table { /** * Compare two strings in the buffer . * @ param offset _ 1 An ' int ' index of the first string into the bb . * @ param offset _ 2 An ' int ' index of the second string into the bb . * @ param bb A { @ code ByteBuffer } to get the strings . */ protected static int compareStrings ( int offset_1 , int offset_2 , ByteBuffer bb ) { } }
offset_1 += bb . getInt ( offset_1 ) ; offset_2 += bb . getInt ( offset_2 ) ; int len_1 = bb . getInt ( offset_1 ) ; int len_2 = bb . getInt ( offset_2 ) ; int startPos_1 = offset_1 + SIZEOF_INT ; int startPos_2 = offset_2 + SIZEOF_INT ; int len = Math . min ( len_1 , len_2 ) ; for ( int i = 0 ; i < len ; i ++ ) { if ( bb . get ( i + startPos_1 ) != bb . get ( i + startPos_2 ) ) return bb . get ( i + startPos_1 ) - bb . get ( i + startPos_2 ) ; } return len_1 - len_2 ;
public class DataStream { /** * Assigns timestamps to the elements in the data stream and creates watermarks to * signal event time progress based on the elements themselves . * < p > This method creates watermarks based purely on stream elements . For each element * that is handled via { @ link AssignerWithPunctuatedWatermarks # extractTimestamp ( Object , long ) } , * the { @ link AssignerWithPunctuatedWatermarks # checkAndGetNextWatermark ( Object , long ) } * method is called , and a new watermark is emitted , if the returned watermark value is * non - negative and greater than the previous watermark . * < p > This method is useful when the data stream embeds watermark elements , or certain elements * carry a marker that can be used to determine the current event time watermark . * This operation gives the programmer full control over the watermark generation . Users * should be aware that too aggressive watermark generation ( i . e . , generating hundreds of * watermarks every second ) can cost some performance . * < p > For cases where watermarks should be created in a regular fashion , for example * every x milliseconds , use the { @ link AssignerWithPeriodicWatermarks } . * @ param timestampAndWatermarkAssigner The implementation of the timestamp assigner and * watermark generator . * @ return The stream after the transformation , with assigned timestamps and watermarks . * @ see AssignerWithPunctuatedWatermarks * @ see AssignerWithPeriodicWatermarks * @ see # assignTimestampsAndWatermarks ( AssignerWithPeriodicWatermarks ) */ public SingleOutputStreamOperator < T > assignTimestampsAndWatermarks ( AssignerWithPunctuatedWatermarks < T > timestampAndWatermarkAssigner ) { } }
// match parallelism to input , otherwise dop = 1 sources could lead to some strange // behaviour : the watermark will creep along very slowly because the elements // from the source go to each extraction operator round robin . final int inputParallelism = getTransformation ( ) . getParallelism ( ) ; final AssignerWithPunctuatedWatermarks < T > cleanedAssigner = clean ( timestampAndWatermarkAssigner ) ; TimestampsAndPunctuatedWatermarksOperator < T > operator = new TimestampsAndPunctuatedWatermarksOperator < > ( cleanedAssigner ) ; return transform ( "Timestamps/Watermarks" , getTransformation ( ) . getOutputType ( ) , operator ) . setParallelism ( inputParallelism ) ;
public class JShell { /** * Returns the active method snippets . * This convenience method is equivalent to { @ code snippets ( ) } filtered for * { @ link jdk . jshell . Snippet . Status # isActive ( ) status ( snippet ) . isActive ( ) } * { @ code & & snippet . kind ( ) = = Kind . METHOD } * and cast to MethodSnippet . * @ return the active declared methods . */ public Stream < MethodSnippet > methods ( ) { } }
return snippets ( ) . filter ( sn -> status ( sn ) . isActive ( ) && sn . kind ( ) == Snippet . Kind . METHOD ) . map ( sn -> ( MethodSnippet ) sn ) ;
public class DeregisterWebhookWithThirdPartyRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeregisterWebhookWithThirdPartyRequest deregisterWebhookWithThirdPartyRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deregisterWebhookWithThirdPartyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deregisterWebhookWithThirdPartyRequest . getWebhookName ( ) , WEBHOOKNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class WorkflowTemplateServiceClient { /** * Instantiates a template and begins execution . * < p > The returned Operation can be used to track execution of workflow by polling * [ operations . get ] [ google . longrunning . Operations . GetOperation ] . The Operation will complete when * entire workflow is finished . * < p > The running workflow can be aborted via * [ operations . cancel ] [ google . longrunning . Operations . CancelOperation ] . This will cause any * inflight jobs to be cancelled and workflow - owned clusters to be deleted . * < p > The [ Operation . metadata ] [ google . longrunning . Operation . metadata ] will be * [ WorkflowMetadata ] [ google . cloud . dataproc . v1beta2 . WorkflowMetadata ] . * < p > On successful completion , [ Operation . response ] [ google . longrunning . Operation . response ] will * be [ Empty ] [ google . protobuf . Empty ] . * < p > Sample code : * < pre > < code > * try ( WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient . create ( ) ) { * WorkflowTemplateName name = WorkflowTemplateName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ WORKFLOW _ TEMPLATE ] " ) ; * InstantiateWorkflowTemplateRequest request = InstantiateWorkflowTemplateRequest . newBuilder ( ) * . setName ( name . toString ( ) ) * . build ( ) ; * workflowTemplateServiceClient . instantiateWorkflowTemplateAsync ( request ) . get ( ) ; * < / code > < / pre > * @ param request The request object containing all of the parameters for the API call . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , WorkflowMetadata > instantiateWorkflowTemplateAsync ( InstantiateWorkflowTemplateRequest request ) { } }
return instantiateWorkflowTemplateOperationCallable ( ) . futureCall ( request ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcMember ( ) { } }
if ( ifcMemberEClass == null ) { ifcMemberEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 320 ) ; } return ifcMemberEClass ;
public class CassandraCounter { /** * { @ inheritDoc } */ @ Override public DataPoint get ( long timestampMs ) { } }
long _key = toTimeSeriesPoint ( timestampMs ) ; Map < Long , DataPoint > row = _getRowWithCache ( timestampMs ) ; DataPoint result = row != null ? row . get ( _key ) : null ; return result != null ? result : new DataPoint ( Type . NONE , _key , 0 , RESOLUTION_MS ) ;
public class Query { /** * < code > * Used internally by the bridge object to generate the JSON that is sent to the search service . * < / code > * @ param clientKey * The client key used to authenticate this request . * @ return A JSON representation of this query object . */ public String getBridgeRefinementsJson ( String clientKey , String navigationName ) { } }
RefinementsRequest request = new RefinementsRequest ( ) ; request . setOriginalQuery ( populateRequest ( clientKey ) ) ; request . setNavigationName ( navigationName ) ; return requestToJson ( request ) ;
public class IbanUtil { /** * format iban to four character blocks . * @ param pentry string to format and cursor position * @ return formated string with new cursor position */ public static ValueWithPos < String > ibanFormatWithPos ( final ValueWithPos < String > pentry ) { } }
if ( pentry == null ) { return null ; } if ( StringUtils . isNotEmpty ( pentry . getValue ( ) ) ) { final StringBuilder ibanSb = new StringBuilder ( pentry . getValue ( ) . length ( ) ) ; int pos = 0 ; int posformated = 0 ; for ( final char charCode : pentry . getValue ( ) . toCharArray ( ) ) { if ( CharUtils . isAsciiAlphaUpper ( charCode ) || CharUtils . isAsciiNumeric ( charCode ) ) { if ( pos > 0 && pos % BLOCK_LENGTH == 0 ) { ibanSb . append ( SEPARATOR ) ; if ( posformated <= pentry . getPos ( ) ) { pentry . setPos ( pentry . getPos ( ) + 1 ) ; } posformated ++ ; } ibanSb . append ( charCode ) ; pos ++ ; posformated ++ ; } else { if ( posformated < pentry . getPos ( ) ) { pentry . setPos ( pentry . getPos ( ) - 1 ) ; } } } pentry . setValue ( ibanSb . toString ( ) ) ; if ( pentry . getPos ( ) < 0 ) { pentry . setPos ( 0 ) ; } else if ( pentry . getPos ( ) >= ibanSb . length ( ) ) { pentry . setPos ( ibanSb . length ( ) ) ; } } return pentry ;
public class LongSerializer { /** * = = = = = These methods apply to all whole numbers with minor modifications = = = = = * = = = = = byte , short , int , long = = = = = */ @ Override public Long convert ( Object value ) { } }
if ( value instanceof Number ) { double d = ( ( Number ) value ) . doubleValue ( ) ; if ( Double . isNaN ( d ) || Math . round ( d ) != d ) throw new IllegalArgumentException ( "Not a valid long: " + value ) ; return ( ( Number ) value ) . longValue ( ) ; } else if ( value instanceof String ) { return Long . parseLong ( ( String ) value ) ; } else if ( value instanceof Idfiable ) { return ( ( Idfiable ) value ) . longId ( ) ; } else return null ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertFNCYfrUnitsToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class MPPUtility { /** * This method converts between the duration units representation * used in the MPP file , and the standard MPX duration units . * If the supplied units are unrecognised , the units default to days . * @ param type MPP units * @ param projectDefaultDurationUnits default duration units for this project * @ return MPX units */ public static final TimeUnit getDurationTimeUnits ( int type , TimeUnit projectDefaultDurationUnits ) { } }
TimeUnit units ; switch ( type & DURATION_UNITS_MASK ) { case 3 : { units = TimeUnit . MINUTES ; break ; } case 4 : { units = TimeUnit . ELAPSED_MINUTES ; break ; } case 5 : { units = TimeUnit . HOURS ; break ; } case 6 : { units = TimeUnit . ELAPSED_HOURS ; break ; } case 8 : { units = TimeUnit . ELAPSED_DAYS ; break ; } case 9 : { units = TimeUnit . WEEKS ; break ; } case 10 : { units = TimeUnit . ELAPSED_WEEKS ; break ; } case 11 : { units = TimeUnit . MONTHS ; break ; } case 12 : { units = TimeUnit . ELAPSED_MONTHS ; break ; } case 19 : { units = TimeUnit . PERCENT ; break ; } case 20 : { units = TimeUnit . ELAPSED_PERCENT ; break ; } case 7 : { units = TimeUnit . DAYS ; break ; } case 21 : { units = projectDefaultDurationUnits == null ? TimeUnit . DAYS : projectDefaultDurationUnits ; break ; } default : { units = TimeUnit . DAYS ; break ; } } return ( units ) ;
public class BoltNeo4jSequenceGenerator { /** * Create the sequence nodes setting the initial value if the node does not exist already . * All nodes are created inside the same transaction . */ private void createSequencesConstraintsStatements ( List < Statement > statements , Iterable < Sequence > sequences ) { } }
Statement statement = createUniqueConstraintStatement ( SEQUENCE_NAME_PROPERTY , NodeLabel . SEQUENCE . name ( ) ) ; statements . add ( statement ) ;
public class Normalizer2Impl { /** * NFD without an NFD Normalizer2 instance . */ public Appendable decompose ( CharSequence s , StringBuilder dest ) { } }
decompose ( s , 0 , s . length ( ) , dest , s . length ( ) ) ; return dest ;
public class CmsJspTagContentCheck { /** * Sets the list of elements to check for . < p > * @ param elementList the list of elements to check for */ public void setIfexistsall ( String elementList ) { } }
if ( elementList != null ) { m_elementList = elementList ; m_checkall = true ; m_checknone = false ; }
public class InstanceBasedSecurityManager { /** * Determine if the current user has the specified role . Delegates to * { @ link # hasRoleName ( ActionBean , Method , String ) } to determine if the user has a role without * using the expression ; this method handles evaluating the expression . * @ param bean the current action bean * @ param handler the current event handler * @ param role the role to check * @ return { @ code true } if the user has the role , and { @ code false } otherwise */ @ Override protected Boolean hasRole ( ActionBean bean , Method handler , String role ) { } }
LOG . debug ( "Checking role " + role + " using " + bean ) ; String roleName ; String roleExpression ; Matcher ifTrigger = Pattern . compile ( "\\bif\\b" ) . matcher ( role ) ; if ( ifTrigger . find ( ) ) { roleName = role . substring ( 0 , ifTrigger . start ( ) ) . trim ( ) ; // Lose leading and trailing whitespace . roleExpression = role . substring ( ifTrigger . end ( ) ) . trim ( ) ; // Lose leading and trailing whitespace . } else // The role does not contain a condition . { roleName = role ; roleExpression = null ; } LOG . debug ( "The role name and its expression are " + roleName + " & " + String . valueOf ( roleExpression ) ) ; // Check if the user has the required role . Boolean hasRole = hasRoleName ( bean , handler , roleName ) ; // If there is a limiting expression , restrict the role to cases where the expression evaluates to true . if ( hasRole != null && hasRole && roleExpression != null ) { LOG . debug ( "Checking expression " + roleExpression ) ; Object value = evaluateRoleExpression ( bean , roleExpression ) ; hasRole = value == null ? null : Boolean . TRUE . equals ( value ) ; } LOG . debug ( "Done checking role " + role + ": access is " + ( hasRole ? "allowed" : "denied" ) + '.' ) ; return hasRole ;
public class JsScopeContext { /** * Returns the JavaScript statement to declare the scope declaration ( eg . the list of * variables declared in the JavaScript function ) . */ CharSequence scopeDeclaration ( ) { } }
if ( scopeVariables . length == 0 ) { return "" ; } StringBuilder scopeDeclaration = new StringBuilder ( ) ; scopeDeclaration . append ( scopeVariables [ 0 ] ) ; for ( int i = 1 ; i < scopeVariables . length ; i ++ ) { scopeDeclaration . append ( ", " ) ; scopeDeclaration . append ( scopeVariables [ i ] ) ; } return scopeDeclaration ;
public class AwsSecurityFindingFilters { /** * The protocol of network - related information about a finding . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setNetworkProtocol ( java . util . Collection ) } or { @ link # withNetworkProtocol ( java . util . Collection ) } if you * want to override the existing values . * @ param networkProtocol * The protocol of network - related information about a finding . * @ return Returns a reference to this object so that method calls can be chained together . */ public AwsSecurityFindingFilters withNetworkProtocol ( StringFilter ... networkProtocol ) { } }
if ( this . networkProtocol == null ) { setNetworkProtocol ( new java . util . ArrayList < StringFilter > ( networkProtocol . length ) ) ; } for ( StringFilter ele : networkProtocol ) { this . networkProtocol . add ( ele ) ; } return this ;
public class HttpSessionsPanel { /** * A new Site was selected . * @ param site the site */ private void siteSelected ( String site ) { } }
if ( ! site . equals ( currentSite ) ) { this . sessionsModel = extension . getHttpSessionsSite ( site ) . getModel ( ) ; this . getHttpSessionsTable ( ) . setModel ( this . sessionsModel ) ; this . setSessionsTableColumnSizes ( ) ; currentSite = site ; }
public class HllSketch { /** * Wraps the given WritableMemory , which must be a image of a valid updatable sketch , * and may have data . What remains on the java heap is a * thin wrapper object that reads and writes to the given WritableMemory , which , depending on * how the user configures the WritableMemory , may actually reside on the Java heap or off - heap . * < p > The given < i > dstMem < / i > is checked for the required capacity as determined by * { @ link # getMaxUpdatableSerializationBytes ( int , TgtHllType ) } . * @ param wmem an writable image of a valid sketch with data . * @ return an HllSketch where the sketch data is in the given dstMem . */ public static final HllSketch writableWrap ( final WritableMemory wmem ) { } }
final boolean compact = extractCompactFlag ( wmem ) ; if ( compact ) { throw new SketchesArgumentException ( "Cannot perform a writableWrap of a writable sketch image that is in compact form." ) ; } final int lgConfigK = extractLgK ( wmem ) ; final TgtHllType tgtHllType = extractTgtHllType ( wmem ) ; final long minBytes = getMaxUpdatableSerializationBytes ( lgConfigK , tgtHllType ) ; final long capBytes = wmem . getCapacity ( ) ; HllUtil . checkMemSize ( minBytes , capBytes ) ; final CurMode curMode = checkPreamble ( wmem ) ; final HllSketch directSketch ; if ( curMode == CurMode . HLL ) { if ( tgtHllType == TgtHllType . HLL_4 ) { directSketch = new HllSketch ( new DirectHll4Array ( lgConfigK , wmem ) ) ; } else if ( tgtHllType == TgtHllType . HLL_6 ) { directSketch = new HllSketch ( new DirectHll6Array ( lgConfigK , wmem ) ) ; } else { // Hll _ 8 directSketch = new HllSketch ( new DirectHll8Array ( lgConfigK , wmem ) ) ; } } else if ( curMode == CurMode . LIST ) { directSketch = new HllSketch ( new DirectCouponList ( lgConfigK , tgtHllType , curMode , wmem ) ) ; } else { directSketch = new HllSketch ( new DirectCouponHashSet ( lgConfigK , tgtHllType , wmem ) ) ; } return directSketch ;
public class ObservationTree { /** * See { @ link # addState ( Object , Word , Object ) } . Convenience method that stores all information that the traces of * the given { @ link ADTNode } holds . * @ param state * the hypothesis state for which information should be stored * @ param adtNode * the { @ link ADTNode } whose traces should be stored */ public void addTrace ( final S state , final ADTNode < S , I , O > adtNode ) { } }
final FastMealyState < O > internalState = this . nodeToObservationMap . get ( state ) ; ADTNode < S , I , O > adsIter = adtNode ; while ( adsIter != null ) { final Pair < Word < I > , Word < O > > trace = ADTUtil . buildTraceForNode ( adsIter ) ; this . addTrace ( internalState , trace . getFirst ( ) , trace . getSecond ( ) ) ; adsIter = ADTUtil . getStartOfADS ( adsIter ) . getParent ( ) ; }
public class DatabaseStoreService { /** * Declarative Services method for setting the EmbeddableWebSphereTransactionManager service reference * @ param ref reference to service object ; type of service object is verified */ protected void setEmbeddableWebSphereTransactionManager ( ServiceReference < EmbeddableWebSphereTransactionManager > ref ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . FINE , methodClassName , "setEmbeddableWebSphereTransactionManager" , "setting " + ref ) ; } embeddableWebSphereTransactionManagerRef . setReference ( ref ) ;
public class DownloadRequestQueue { /** * Perform construction . * @ param callbackHandler */ private void initialize ( Handler callbackHandler ) { } }
int processors = Runtime . getRuntime ( ) . availableProcessors ( ) ; mDownloadDispatchers = new DownloadDispatcher [ processors ] ; mDelivery = new CallBackDelivery ( callbackHandler ) ;
public class SystemObserver { /** * Get the package name for this application . * @ param context Context . * @ return { @ link String } with value as package name . Empty String in case of error */ static String getPackageName ( Context context ) { } }
String packageName = "" ; if ( context != null ) { try { final PackageInfo packageInfo = context . getPackageManager ( ) . getPackageInfo ( context . getPackageName ( ) , 0 ) ; packageName = packageInfo . packageName ; } catch ( Exception e ) { PrefHelper . LogException ( "Error obtaining PackageName" , e ) ; } } return packageName ;
public class SQLInLoop { /** * implements the visitor to clear the collections , and report the query locations that are in loops * @ param obj * the context object for the currently parsed code block */ @ Override public void visitCode ( Code obj ) { } }
queryLocations . clear ( ) ; loops . clear ( ) ; super . visitCode ( obj ) ; for ( Integer qLoc : queryLocations ) { for ( LoopLocation lLoc : loops ) { if ( lLoc . isInLoop ( qLoc . intValue ( ) ) ) { bugReporter . reportBug ( new BugInstance ( this , BugType . SIL_SQL_IN_LOOP . name ( ) , NORMAL_PRIORITY ) . addClass ( this ) . addMethod ( this ) . addSourceLine ( this , qLoc . intValue ( ) ) ) ; break ; } } }
public class OnePlusNLayoutHelperEx { /** * { @ inheritDoc } * Currently , this layout supports maximum children up to 5 , otherwise { @ link * IllegalArgumentException } * will be thrown * @ param start start position of items handled by this layoutHelper * @ param end end position of items handled by this layoutHelper , if end & lt ; start or end - * start & gt 4 , it will throw { @ link IllegalArgumentException } */ @ Override public void onRangeChange ( int start , int end ) { } }
if ( end - start < 4 ) { throw new IllegalArgumentException ( "pls use OnePlusNLayoutHelper instead of OnePlusNLayoutHelperEx which childcount <= 5" ) ; } if ( end - start > 6 ) { throw new IllegalArgumentException ( "OnePlusNLayoutHelper only supports maximum 7 children now" ) ; }
public class JsonUtil { /** * Writes a JCR property as an JSON object : { name : . . . , value : . . . , type : . . . , multi : . . . } . * @ param writer the JSON writer object ( with the JSON state ) * @ param property the JCR property to write * @ param mapping the format in the JSON output * @ throws javax . jcr . RepositoryException error on accessing JCR * @ throws java . io . IOException error on write JSON */ public static void writeJsonProperty ( JsonWriter writer , Node node , Property property , MappingRules mapping ) throws RepositoryException , IOException { } }
if ( property != null && ( PropertyType . BINARY != property . getType ( ) || mapping . propertyFormat . binary != MappingRules . PropertyFormat . Binary . skip ) ) { String name = property . getName ( ) ; int type = property . getType ( ) ; if ( mapping . propertyFormat . scope == MappingRules . PropertyFormat . Scope . value ) { writer . name ( name ) ; } else { writer . beginObject ( ) ; writer . name ( "name" ) . value ( name ) ; writer . name ( "value" ) ; } if ( property . isMultiple ( ) ) { writer . beginArray ( ) ; for ( Value value : property . getValues ( ) ) { JsonUtil . writeJsonValue ( writer , node , name , value , type , mapping ) ; } writer . endArray ( ) ; } else { JsonUtil . writeJsonValue ( writer , node , name , property . getValue ( ) , type , mapping ) ; } if ( mapping . propertyFormat . scope != MappingRules . PropertyFormat . Scope . value ) { writer . name ( "type" ) . value ( PropertyType . nameFromValue ( type ) ) ; writer . name ( "multi" ) . value ( property . isMultiple ( ) ) ; if ( mapping . propertyFormat . scope == MappingRules . PropertyFormat . Scope . definition ) { PropertyDefinition definition = property . getDefinition ( ) ; writer . name ( "auto" ) . value ( definition . isAutoCreated ( ) ) ; writer . name ( "protected" ) . value ( definition . isProtected ( ) ) ; } writer . endObject ( ) ; } }
public class ClassPathUtils { /** * Get all of the directory paths in a zip / jar file * @ param pathToJarFile location of the jarfile . can also be a zipfile * @ return set of directory paths relative to the root of the jar */ public static Set < Path > getDirectoriesFromJar ( Path pathToJarFile ) throws IOException { } }
Set < Path > result = new HashSet < Path > ( ) ; ZipFile jarfile = new ZipFile ( pathToJarFile . toFile ( ) ) ; try { final Enumeration < ? extends ZipEntry > entries = jarfile . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry entry = entries . nextElement ( ) ; if ( entry . isDirectory ( ) ) { result . add ( Paths . get ( entry . getName ( ) ) ) ; } } jarfile . close ( ) ; } finally { IOUtils . closeQuietly ( jarfile ) ; } return result ;
public class IdentityAssertionLoginModule { /** * Sets the subject with the temporary subject contents that was not set already from the * shared state . */ protected void updateSubjectWithTemporarySubjectContents ( ) { } }
subject . getPrincipals ( ) . addAll ( temporarySubject . getPrincipals ( ) ) ; subject . getPublicCredentials ( ) . addAll ( temporarySubject . getPublicCredentials ( ) ) ; subject . getPrivateCredentials ( ) . addAll ( temporarySubject . getPrivateCredentials ( ) ) ;
public class BeanMappingFactoryHelper { /** * 提供されたヘッダーから該当するカラム番号のヘッダーを取得する 。 * @ param suppliedHeaders 提供されたヘッダー 。 提供されてない場合は 、 長さ0の配列 。 * @ param columnNumber カラム番号 。 1から始まる 。 * @ return 該当するカラムのヘッダー 。 見つからない場合は空を返す 。 */ private static Optional < String > getSuppliedHeaders ( final String [ ] suppliedHeaders , final int columnNumber ) { } }
final int length = suppliedHeaders . length ; if ( length == 0 ) { return Optional . empty ( ) ; } if ( columnNumber < length ) { return Optional . ofNullable ( suppliedHeaders [ columnNumber - 1 ] ) ; } return Optional . empty ( ) ;
public class SafeCast { /** * / * - - - - Convert 8 - byte signed long to fewer bytes . - - - - - */ public static int safeLongToInt ( long l ) { } }
if ( l > ( long ) Integer . MAX_VALUE || l < ( long ) Integer . MIN_VALUE ) { throw new IllegalStateException ( "Cannot convert long to int: " + l ) ; } return ( int ) l ;
public class ApiOvhDedicatedCloud { /** * Get this object properties * REST : GET / dedicatedCloud / { serviceName } / federation / activeDirectory / { activeDirectoryId } * @ param serviceName [ required ] Domain of the service * @ param activeDirectoryId [ required ] Id of the Active Directory */ public OvhFederationAccessNetwork serviceName_federation_activeDirectory_activeDirectoryId_GET ( String serviceName , Long activeDirectoryId ) throws IOException { } }
String qPath = "/dedicatedCloud/{serviceName}/federation/activeDirectory/{activeDirectoryId}" ; StringBuilder sb = path ( qPath , serviceName , activeDirectoryId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhFederationAccessNetwork . class ) ;
public class FingerprintDialog { /** * Check if a fingerprint scanner is available and if at least one finger is enrolled in the phone . * @ param context A context * @ return True is authentication is available , False otherwise */ public static boolean isAvailable ( Context context ) { } }
FingerprintManager manager = ( FingerprintManager ) context . getSystemService ( Context . FINGERPRINT_SERVICE ) ; return ( manager != null && manager . isHardwareDetected ( ) && manager . hasEnrolledFingerprints ( ) ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcAnnotationCurveOccurrence ( ) { } }
if ( ifcAnnotationCurveOccurrenceEClass == null ) { ifcAnnotationCurveOccurrenceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 13 ) ; } return ifcAnnotationCurveOccurrenceEClass ;
public class LogWriter { /** * Delete obsolete log files , which is by default 2 days . */ public static void cleanObsolete ( ) { } }
HandlerThreadFactory . getWriteLogThreadHandler ( ) . post ( new Runnable ( ) { @ Override public void run ( ) { long now = System . currentTimeMillis ( ) ; File [ ] f = BlockCanaryInternals . getLogFiles ( ) ; if ( f != null && f . length > 0 ) { synchronized ( SAVE_DELETE_LOCK ) { for ( File aF : f ) { if ( now - aF . lastModified ( ) > OBSOLETE_DURATION ) { aF . delete ( ) ; } } } } } } ) ;
public class ServletRegistrationComponent { /** * The method to use when dependencies are invalidated . * @ throws Exception in case of critical error */ public void stopping ( ) throws Exception { } }
// Update the HTTP service this . logger . fine ( "iPojo unregisters REST and icons servlets related to Roboconf's DM." ) ; if ( this . httpService != null ) { this . httpService . unregister ( CONTEXT ) ; } else { this . logger . fine ( "The HTTP service is gone. The servlets were already unregistered." ) ; } // Unregister the preference if ( this . manager != null ) { this . manager . preferencesMngr ( ) . removeFromList ( IPreferencesMngr . WEB_EXTENSIONS , CONTEXT ) ; }
public class IsolationForestModelV3 { /** * Version & Schema - specific filling into the impl */ @ Override public IsolationForestModel createImpl ( ) { } }
IsolationForestV3 . IsolationForestParametersV3 p = this . parameters ; IsolationForestModel . IsolationForestParameters parms = p . createImpl ( ) ; return new IsolationForestModel ( model_id . key ( ) , parms , new IsolationForestModel . IsolationForestOutput ( null ) ) ;
public class XMLUpdateShredder { /** * Delete node . * @ throws TTException * In case any exception occurs while moving the cursor or * deleting nodes in Treetank . */ private void deleteNode ( ) throws TTException { } }
/* * If found in one of the rightsiblings in the current shreddered * structure remove all nodes until the transaction points to the found * node ( keyMatches ) . */ if ( mInserted && ! mMovedToRightSibling ) { mInserted = false ; if ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . hasRightSibling ( ) ) { // Cursor is on the inserted node , so move to right sibling . mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; } } // / / Check if transaction is on the last node in the shreddered file . // checkIfLastNode ( true ) ; // Determines if transaction has moved to the parent node after a delete // operation . boolean movedToParent = false ; // Determines if ldeleteNodeast node in a subtree is going to be // deleted . boolean isLast = false ; do { if ( mWtx . getNode ( ) . getDataKey ( ) != mKeyMatches ) { final ITreeStructData node = ( ITreeStructData ) mWtx . getNode ( ) ; if ( ! node . hasRightSibling ( ) && ! node . hasLeftSibling ( ) ) { // if ( mDelete = = EDelete . ATSTARTMIDDLE ) { // / / If the delete occurs right before an end tag the // / / level hasn ' t been incremented . // mLevelInShreddered - - ; /* * Node has no right and no left sibling , so the transaction * moves to the parent after the delete . */ movedToParent = true ; } else if ( ! node . hasRightSibling ( ) ) { // Last node has been reached , which means that the // transaction moves to the left sibling . isLast = true ; } mWtx . remove ( ) ; } } while ( mWtx . getNode ( ) . getDataKey ( ) != mKeyMatches && ! movedToParent && ! isLast ) ; if ( movedToParent ) { if ( mDelete == EDelete . ATBOTTOM ) { /* * Deleted right before an end tag has been parsed , thus don ' t * move transaction to next node in processEndTag ( ) . */ mRemovedNode = true ; } /* * Treetank transaction has been moved to parent , because all child * nodes have been deleted , thus to right sibling . */ mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; } else { if ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . hasFirstChild ( ) ) { if ( mDelete == EDelete . ATBOTTOM && isLast ) { /* * Deleted right before an end tag has been parsed , thus * don ' t move transaction to next node in processEndTag ( ) . */ mRemovedNode = true ; } if ( isLast ) { // If last node of a subtree has been removed , move to // parent and right sibling . mWtx . moveTo ( mWtx . getNode ( ) . getParentKey ( ) ) ; mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ; // / / If the delete occurs right before an end tag the level // / / hasn ' t been incremented . // if ( mDelete = = EDelete . ATSTARTMIDDLE ) { // mLevelInShreddered - - ; } } } // Check if transaction is on the last node in the shreddered file . // checkIfLastNode ( true ) ; mInsert = EInsert . NOINSERT ;
public class CompRowMatrix { /** * Finds the insertion index */ private int getIndex ( int row , int column ) { } }
int i = no . uib . cipr . matrix . sparse . Arrays . binarySearch ( columnIndex , column , rowPointer [ row ] , rowPointer [ row + 1 ] ) ; if ( i >= 0 && columnIndex [ i ] == column ) return i ; else throw new IndexOutOfBoundsException ( "Entry (" + ( row + 1 ) + ", " + ( column + 1 ) + ") is not in the matrix structure" ) ;
public class AbstractPrincipalAttributesRepository { /** * Gets principal attributes . * @ param principal the principal * @ return the principal attributes */ @ JsonIgnore protected Map < String , List < Object > > getPrincipalAttributes ( final Principal principal ) { } }
if ( ignoreResolvedAttributes ) { return new HashMap < > ( 0 ) ; } return convertPrincipalAttributesToPersonAttributes ( principal . getAttributes ( ) ) ;
public class JsJmsMessageImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . mfp . JsJmsMessage # uncheckedSetDeliveryDelay ( long ) */ @ Override public void uncheckedSetDeliveryDelay ( long value ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "uncheckedSetDeliveryDelay" , Long . valueOf ( value ) ) ; getHdr2 ( ) . setLongField ( JsHdr2Access . DELIVERYDELAY_DATA , value ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "uncheckedSetDeliveryDelay" ) ;
public class ShogunCoreAuthenticationProvider { /** * This method has to be { @ link Transactional } to allow that associated entities * can be fetched lazily . * @ see org . springframework . security . authentication . AuthenticationProvider # authenticate ( org . springframework . security . core . Authentication ) */ @ Override @ Transactional ( value = "transactionManager" , readOnly = true ) public Authentication authenticate ( Authentication authentication ) throws AuthenticationException { } }
// prepare an exception final String exceptionMessage = "User and password do not match." ; // get username / password String accountName = authentication . getName ( ) ; String rawPassword = ( String ) authentication . getCredentials ( ) ; LOG . debug ( "Trying to authenticate User '" + accountName + "'" ) ; User user = userDao . findByAccountName ( accountName ) ; // prepare set of authorities Set < GrantedAuthority > grantedAuthorities = new HashSet < GrantedAuthority > ( ) ; String encryptedPassword = null ; if ( user == null ) { LOG . warn ( "No user for account name '" + accountName + "' could be found." ) ; throw new UsernameNotFoundException ( exceptionMessage ) ; } else if ( ! user . isActive ( ) ) { LOG . warn ( "The user with the account name '" + accountName + "' is not active." ) ; throw new DisabledException ( exceptionMessage ) ; } else { encryptedPassword = user . getPassword ( ) ; // check if rawPassword matches the hash from db if ( passwordEncoder . matches ( rawPassword , encryptedPassword ) ) { Set < Role > allUserRoles = getAllUserRoles ( user ) ; // create granted authorities for the security context for ( Role role : allUserRoles ) { grantedAuthorities . add ( new SimpleGrantedAuthority ( role . getName ( ) ) ) ; } } else { LOG . warn ( "The given password for user '" + accountName + "' does not match." ) ; throw new BadCredentialsException ( exceptionMessage ) ; } } // Create corresponding token to forward in Spring Security ' s filter // chain . We will use the SHOGun - Core user as the principal . Authentication authResult = null ; if ( grantedAuthorities . isEmpty ( ) ) { // if the user has no authorities , we will build the // UsernamePasswordAuthenticationToken without authorities , which // leads to an unauthenticated user , i . e . isAuthenticated ( ) of // authenticationToken will return false afterwards . LOG . warn ( "The user '" + accountName + "' has no authorities and will thereby NOT be authenticated." ) ; authResult = new UsernamePasswordAuthenticationToken ( user , encryptedPassword ) ; } else { // if we pass some grantedAuthorities , isAuthenticated ( ) of // authenticationToken will return true afterwards authResult = new UsernamePasswordAuthenticationToken ( user , encryptedPassword , grantedAuthorities ) ; LOG . debug ( "The user '" + accountName + "' got the following (explicit) roles: " + StringUtils . join ( getRawRoleNames ( grantedAuthorities ) , ", " ) ) ; } final boolean isAuthenticated = authResult . isAuthenticated ( ) ; final String authLog = isAuthenticated ? "has succesfully" : "has NOT" ; LOG . info ( "The user '" + accountName + "' " + authLog + " been authenticated." ) ; return authResult ;
public class AbstractIndirectionHandler { /** * Calls afterMaterialization on all registered listeners in the reverse * order of registration . */ protected void afterMaterialization ( ) { } }
if ( _listeners != null ) { MaterializationListener listener ; // listeners may remove themselves during the afterMaterialization // callback . // thus we must iterate through the listeners vector from back to // front // to avoid index problems . for ( int idx = _listeners . size ( ) - 1 ; idx >= 0 ; idx -- ) { listener = ( MaterializationListener ) _listeners . get ( idx ) ; listener . afterMaterialization ( this , _realSubject ) ; } }
public class AptProperty { /** * Returns the class name of the property editor class , or null */ public String getEditorClass ( ) { } }
PropertyInfo pi = getPropertyInfo ( ) ; if ( pi == null ) return null ; // This is trickier , because APT doesn ' t allow access to Class - valued annotations , // because the type may not yet have been compiled . Collection < AnnotationMirror > annotMirrors = _propDecl . getAnnotationMirrors ( ) ; for ( AnnotationMirror am : annotMirrors ) { if ( am . getAnnotationType ( ) . toString ( ) . equals ( "org.apache.beehive.controls.api.packaging.PropertyInfo" ) ) { Map < AnnotationTypeElementDeclaration , AnnotationValue > avs = am . getElementValues ( ) ; for ( AnnotationTypeElementDeclaration ated : avs . keySet ( ) ) { if ( ated . toString ( ) . equals ( "editorClass()" ) ) { // Get the annotation value , and ignore the default value which implies // no editor class ( because ' null ' cannot be a default value ) String editorClass = avs . get ( ated ) . getValue ( ) . toString ( ) ; if ( editorClass . equals ( "org.apache.beehive.controls.api.packaging.PropertyInfo.NoEditor.class" ) ) return null ; return editorClass ; } } break ; } } return null ;
public class MusicController { /** * Show all list of registered musics in json format */ @ Public @ Path ( "/musics/list/json" ) public void showAllMusicsAsJSON ( ) { } }
result . use ( json ( ) ) . from ( musicDao . listAll ( ) ) . serialize ( ) ;
public class Strman { /** * Changes passed in string to all lower case and adds underscore between words . * @ param input The input string * @ return the input string in all lower case with underscores between words */ public static String underscored ( final String input ) { } }
if ( input == null || input . length ( ) == 0 ) { return "" ; } return input . trim ( ) . replaceAll ( "([a-z\\d])([A-Z]+)" , "$1_$2" ) . replaceAll ( "[-\\s]+" , "_" ) . toLowerCase ( ) ;
public class Page { /** * Returns the minimum bounding box that contains all the TextElements on this Page */ public Rectangle getTextBounds ( ) { } }
List < TextElement > texts = this . getText ( ) ; if ( ! texts . isEmpty ( ) ) { return Utils . bounds ( texts ) ; } else { return new Rectangle ( ) ; }
public class Cells { /** * Given the table name , returns the List of Cell object associated to that table . * @ param nameSpace the table name . * @ return the List of Cell object associated to that table . */ private List < Cell > getCellsByTable ( String nameSpace ) { } }
String tName = StringUtils . isEmpty ( nameSpace ) ? this . nameSpace : nameSpace ; List < Cell > res = cells . get ( tName ) ; if ( res == null ) { res = new ArrayList < > ( ) ; cells . put ( tName , res ) ; } return res ;
public class DbxRequestUtil { /** * Convenience function for making HTTP POST requests . */ public static HttpRequestor . Response startPostNoAuth ( DbxRequestConfig requestConfig , String sdkUserAgentIdentifier , String host , String path , /* @ Nullable */ String /* @ Nullable */ [ ] params , /* @ Nullable */ List < HttpRequestor . Header > headers ) throws NetworkIOException { } }
byte [ ] encodedParams = StringUtil . stringToUtf8 ( encodeUrlParams ( requestConfig . getUserLocale ( ) , params ) ) ; headers = copyHeaders ( headers ) ; headers . add ( new HttpRequestor . Header ( "Content-Type" , "application/x-www-form-urlencoded; charset=utf-8" ) ) ; return startPostRaw ( requestConfig , sdkUserAgentIdentifier , host , path , encodedParams , headers ) ;
public class DBaseFileAttributePool { /** * Get an attribute container that corresponds to the specified file . * @ param dbaseFile is the file to read * @ param recordNumber is the index of the record inside the file ( { @ code 0 . . size - 1 } ) . * @ return a container or < code > null < / code > on error */ @ Pure public static DBaseFileAttributeProvider getContainer ( URL dbaseFile , int recordNumber ) { } }
final DBaseFileAttributePool pool = getPool ( dbaseFile ) ; if ( pool != null ) { final DBaseFileAttributeAccessor accessor = pool . getAccessor ( recordNumber ) ; if ( accessor != null ) { return new DBaseFileAttributeProvider ( accessor ) ; } } return null ;
public class ChemSequenceManipulator { /** * Returns a List of all IChemObject inside a ChemSequence . * @ return A List of all ChemObjects . */ public static List < IChemObject > getAllChemObjects ( IChemSequence sequence ) { } }
List < IChemObject > list = new ArrayList < IChemObject > ( ) ; // list . add ( sequence ) ; for ( int i = 0 ; i < sequence . getChemModelCount ( ) ; i ++ ) { list . add ( sequence . getChemModel ( i ) ) ; List < IChemObject > current = ChemModelManipulator . getAllChemObjects ( sequence . getChemModel ( i ) ) ; for ( IChemObject chemObject : current ) { if ( ! list . contains ( chemObject ) ) list . add ( chemObject ) ; } } return list ;
public class LasUtils { /** * Creates a builder for las data . * The attributes are : * < ul > * < li > the _ geom : a point geometry < / li > * < li > elev < / li > * < li > intensity < / li > * < li > classification < / li > * < li > impulse < / li > * < li > numimpulse < / li > * < / ul > * @ param crs the { @ link CoordinateReferenceSystem } . * @ return the { @ link SimpleFeatureBuilder builder } . */ public static SimpleFeatureBuilder getLasFeatureBuilder ( CoordinateReferenceSystem crs ) { } }
if ( lasSimpleFeatureBuilder == null ) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder ( ) ; b . setName ( "lasdata" ) ; b . setCRS ( crs ) ; b . add ( THE_GEOM , Point . class ) ; b . add ( ID , Integer . class ) ; b . add ( ELEVATION , Double . class ) ; b . add ( INTENSITY , Double . class ) ; b . add ( CLASSIFICATION , Integer . class ) ; b . add ( IMPULSE , Double . class ) ; b . add ( NUM_OF_IMPULSES , Double . class ) ; final SimpleFeatureType featureType = b . buildFeatureType ( ) ; lasSimpleFeatureBuilder = new SimpleFeatureBuilder ( featureType ) ; } return lasSimpleFeatureBuilder ;
public class OpenTok { /** * Creates a token for connecting to an OpenTok session , using the default settings . The default * settings are the following : * < ul > * < li > The token is assigned the role of publisher . < / li > * < li > The token expires 24 hours after it is created . < / li > * < li > The token includes no connection data . < / li > * < / ul > * The following example shows how to generate a token that has the default settings : * < pre > * import com . opentok . OpenTok ; * class Test { * public static void main ( String argv [ ] ) throws OpenTokException { * int API _ KEY = 0 ; / / Replace with your OpenTok API key ( see https : / / tokbox . com / account ) . * String API _ SECRET = " " ; / / Replace with your OpenTok API secret . * OpenTok sdk = new OpenTok ( API _ KEY , API _ SECRET ) ; * / / Generate a basic session . Or you could use an existing session ID . * String sessionId = System . out . println ( sdk . createSession ( ) . getSessionId ( ) ) ; * String token = sdk . generateToken ( sessionId ) ; * System . out . println ( token ) ; * < / pre > * @ param sessionId The session ID corresponding to the session to which the user will connect . * @ return The token string . * @ see # generateToken ( String , TokenOptions ) */ public String generateToken ( String sessionId ) throws OpenTokException { } }
return generateToken ( sessionId , new TokenOptions . Builder ( ) . build ( ) ) ;
public class BackPropagationNet { /** * Computes the delta between the networks output for a same and its true value * @ param dataSet the data set we are learning from * @ param idx the index into the data set for the current data point * @ param delta _ out the place to store the delta , may already be initialized with random noise * @ param a _ i the activation of the final output layer for the data point * @ param d _ i the derivative of the activation of the final output layer * @ return the error that occurred in predicting this data point */ private double computeOutputDelta ( DataSet dataSet , final int idx , Vec delta_out , Vec a_i , Vec d_i ) { } }
double error = 0 ; if ( dataSet instanceof ClassificationDataSet ) { ClassificationDataSet cds = ( ClassificationDataSet ) dataSet ; final int ct = cds . getDataPointCategory ( idx ) ; for ( int i = 0 ; i < outputSize ; i ++ ) if ( i == ct ) delta_out . set ( i , f . max ( ) - targetBump ) ; else delta_out . set ( i , f . min ( ) + targetBump ) ; for ( int j = 0 ; j < delta_out . length ( ) ; j ++ ) { double val = delta_out . get ( j ) ; error += pow ( ( val - a_i . get ( j ) ) , 2 ) ; val = - ( val - a_i . get ( j ) ) * d_i . get ( j ) ; delta_out . set ( j , val ) ; } } else if ( dataSet instanceof RegressionDataSet ) { RegressionDataSet rds = ( RegressionDataSet ) dataSet ; double val = rds . getTargetValue ( idx ) ; val = f . min ( ) + targetBump + targetMultiplier * ( val - targetMin ) ; error += pow ( ( val - a_i . get ( 0 ) ) , 2 ) ; delta_out . set ( 0 , - ( val - a_i . get ( 0 ) ) * d_i . get ( 0 ) ) ; } else { throw new RuntimeException ( "BUG: please report" ) ; } return error ;
public class Resource { /** * GET method . * @ param path * @ param data * @ return response from API . */ public String get ( String path , Response data ) { } }
return request ( "GET" , path , data , getDefaultOptions ( ) ) ;
public class Annotate { /** * Annotate morphological information into a NAF document . * @ param kaf * the NAF document */ public final void annotatePOSToKAF ( final KAFDocument kaf ) { } }
final List < List < WF > > sentences = kaf . getSentences ( ) ; for ( final List < WF > wfs : sentences ) { final List < ixa . kaflib . Span < WF > > tokenSpans = new ArrayList < ixa . kaflib . Span < WF > > ( ) ; List < Morpheme > morphemes = null ; final String [ ] tokens = new String [ wfs . size ( ) ] ; for ( int i = 0 ; i < wfs . size ( ) ; i ++ ) { tokens [ i ] = wfs . get ( i ) . getForm ( ) ; final List < WF > wfTarget = new ArrayList < WF > ( ) ; wfTarget . add ( wfs . get ( i ) ) ; tokenSpans . add ( KAFDocument . newWFSpan ( wfTarget ) ) ; } if ( this . multiwords ) { final String [ ] multiWordTokens = this . multiWordMatcher . getTokensWithMultiWords ( tokens ) ; List < String > posTags = this . posTagger . posAnnotate ( multiWordTokens ) ; String [ ] posTagsArray = new String [ posTags . size ( ) ] ; posTagsArray = posTags . toArray ( posTagsArray ) ; morphemes = this . lemmatizer . getMorphemes ( multiWordTokens , posTagsArray ) ; getMultiWordSpans ( tokens , wfs , tokenSpans ) ; } else { List < String > posTags = this . posTagger . posAnnotate ( tokens ) ; String [ ] posTagsArray = new String [ posTags . size ( ) ] ; posTagsArray = posTags . toArray ( posTagsArray ) ; morphemes = this . lemmatizer . getMorphemes ( tokens , posTagsArray ) ; } for ( int i = 0 ; i < morphemes . size ( ) ; i ++ ) { final Term term = kaf . newTerm ( tokenSpans . get ( i ) ) ; if ( this . dictag || multiwords ) { final String dictPosTag = this . dictMorphoTagger . tag ( morphemes . get ( i ) . getWord ( ) , morphemes . get ( i ) . getTag ( ) ) ; morphemes . get ( i ) . setTag ( dictPosTag ) ; } final String posId = Resources . getKafTagSet ( morphemes . get ( i ) . getTag ( ) , lang ) ; final String type = Resources . setTermType ( posId ) ; // dictionary lemmatizer overwrites probabilistic predictions if // lemma is not equal to " O " if ( this . dictLemmatizer != null ) { final String lemma = this . dictLemmatizer . apply ( morphemes . get ( i ) . getWord ( ) , morphemes . get ( i ) . getTag ( ) ) ; if ( ! lemma . equalsIgnoreCase ( "O" ) ) { morphemes . get ( i ) . setLemma ( lemma ) ; } } term . setType ( type ) ; term . setLemma ( morphemes . get ( i ) . getLemma ( ) ) ; term . setPos ( posId ) ; term . setMorphofeat ( morphemes . get ( i ) . getTag ( ) ) ; } }
public class TraceEventHelper { /** * ToC : Connections * @ param data The data * @ return The events * @ exception Exception If an error occurs */ public static Map < String , List < TraceEvent > > tocConnections ( List < TraceEvent > data ) throws Exception { } }
Map < String , List < TraceEvent > > result = new TreeMap < String , List < TraceEvent > > ( ) ; for ( TraceEvent te : data ) { if ( te . getType ( ) == TraceEvent . GET_CONNECTION ) { List < TraceEvent > l = result . get ( te . getPayload1 ( ) ) ; if ( l == null ) l = new ArrayList < TraceEvent > ( ) ; l . add ( te ) ; result . put ( te . getPayload1 ( ) , l ) ; } } return result ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcRelConnectsElements ( ) { } }
if ( ifcRelConnectsElementsEClass == null ) { ifcRelConnectsElementsEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 534 ) ; } return ifcRelConnectsElementsEClass ;
public class MetadataMapper { /** * 将byte映射为对象 * @ param content * @ param charset * @ return */ public static Set < MetaData > fromByte ( byte [ ] content , Charset charset ) { } }
Set < MetaData > mdSet = new HashSet < MetaData > ( ) ; if ( null == content ) { return mdSet ; } String meta_buff = new String ( content , charset ) ; String [ ] rows = meta_buff . split ( OtherConstants . FDFS_RECORD_SEPERATOR ) ; for ( int i = 0 ; i < rows . length ; i ++ ) { String [ ] cols = rows [ i ] . split ( OtherConstants . FDFS_FIELD_SEPERATOR , 2 ) ; MetaData md = new MetaData ( cols [ 0 ] ) ; if ( cols . length == 2 ) { md . setValue ( cols [ 1 ] ) ; } mdSet . add ( md ) ; } return mdSet ;
public class STCImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . STC__FRGCOLOR : setFRGCOLOR ( FRGCOLOR_EDEFAULT ) ; return ; case AfplibPackage . STC__PRECSION : setPRECSION ( PRECSION_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class MyTreebankReader { /** * 以逗号或分号为标志将树分成若干新的句法树 * @ param inst * @ return */ private static List < Tree < Node > > getNewTree ( Tree < Node > inst ) { } }
delDefault ( inst ) ; List < Tree < Node > > newTreeList = new ArrayList < Tree < Node > > ( ) ; List < Tree < Node > > children = new ArrayList < Tree < Node > > ( ) ; if ( ! inst . isLeaf ( ) && ! inst . getFirstChild ( ) . isLeaf ( ) ) { boolean hasPu = false ; Tree < Node > newInst = null ; for ( int i = 0 ; i < inst . getFirstChild ( ) . children . size ( ) ; i ++ ) { children . add ( inst . getFirstChild ( ) . getChild ( i ) ) ; String tag = inst . getFirstChild ( ) . getLabel ( ) . getTag ( ) ; String flag0 = inst . getFirstChild ( ) . getChild ( i ) . getLabel ( ) . getTag ( ) ; String data0 = inst . getFirstChild ( ) . getChild ( i ) . getLabel ( ) . getData ( ) ; if ( flag0 . equals ( "PU" ) && ( data0 . equals ( "," ) || data0 . equals ( ";" ) || data0 . equals ( "、" ) || data0 . equals ( "。" ) || data0 . equals ( "!" ) || data0 . equals ( "?" ) ) ) { hasPu = true ; if ( children . size ( ) != 0 ) newInst = new Tree < Node > ( new Node ( tag , "" , 0 ) , children ) ; else newInst = new Tree < Node > ( new Node ( tag , "" , 0 ) ) ; newTreeList . add ( newInst ) ; children = new ArrayList < Tree < Node > > ( ) ; } } if ( ! hasPu ) newTreeList . add ( inst ) ; } return newTreeList ;
public class RangeVariableResolver { /** * Divides AND conditions and assigns */ static Expression decomposeCondition ( Expression e , HsqlArrayList conditions ) { } }
if ( e == null ) { return Expression . EXPR_TRUE ; } Expression arg1 = e . getLeftNode ( ) ; Expression arg2 = e . getRightNode ( ) ; int type = e . getType ( ) ; if ( type == OpTypes . AND ) { arg1 = decomposeCondition ( arg1 , conditions ) ; arg2 = decomposeCondition ( arg2 , conditions ) ; if ( arg1 == Expression . EXPR_TRUE ) { return arg2 ; } if ( arg2 == Expression . EXPR_TRUE ) { return arg1 ; } e . setLeftNode ( arg1 ) ; e . setRightNode ( arg2 ) ; return e ; } else if ( type == OpTypes . EQUAL ) { if ( arg1 . getType ( ) == OpTypes . ROW && arg2 . getType ( ) == OpTypes . ROW ) { for ( int i = 0 ; i < arg1 . nodes . length ; i ++ ) { Expression part = new ExpressionLogical ( arg1 . nodes [ i ] , arg2 . nodes [ i ] ) ; part . resolveTypes ( null , null ) ; conditions . add ( part ) ; } return Expression . EXPR_TRUE ; } } if ( e != Expression . EXPR_TRUE ) { conditions . add ( e ) ; } return Expression . EXPR_TRUE ;
public class SelenideLogger { /** * Remove listener ( from the current thread ) . * @ param name unique name of listener added by method { @ link # addListener ( String , LogEventListener ) } * @ param < T > class of listener to be returned * @ return the listener being removed */ @ SuppressWarnings ( "unchecked" ) public static < T extends LogEventListener > T removeListener ( String name ) { } }
Map < String , LogEventListener > listeners = SelenideLogger . listeners . get ( ) ; return listeners == null ? null : ( T ) listeners . remove ( name ) ;
public class Javalin { /** * Adds a POST request handler with the given roles for the specified path to the instance . * Requires an access manager to be set on the instance . * @ see AccessManager * @ see < a href = " https : / / javalin . io / documentation # handlers " > Handlers in docs < / a > */ public Javalin post ( @ NotNull String path , @ NotNull Handler handler , @ NotNull Set < Role > permittedRoles ) { } }
return addHandler ( HandlerType . POST , path , handler , permittedRoles ) ;
public class TimeSourceProvider { /** * Get a TimeSource * the default TimeSource instance ( default : { @ link NTPTimeSource } * @ return TimeSource */ public static TimeSource getInstance ( ) { } }
String className = System . getProperty ( DL4JSystemProperties . TIMESOURCE_CLASSNAME_PROPERTY , DEFAULT_TIMESOURCE_CLASS_NAME ) ; return getInstance ( className ) ;
public class SmartsStereoMatch { /** * Verify the tetrahedral stereochemistry ( clockwise / anticlockwise ) of atom * { @ code u } is preserved in the target when the { @ code mapping } is used . * @ param u tetrahedral index in the target * @ param mapping mapping of vertices * @ return the tetrahedral configuration is preserved */ private boolean checkTetrahedral ( int u , int [ ] mapping ) { } }
int v = mapping [ u ] ; if ( targetTypes [ v ] != null && targetTypes [ v ] != Type . Tetrahedral ) return false ; ITetrahedralChirality queryElement = ( ITetrahedralChirality ) queryElements [ u ] ; ITetrahedralChirality targetElement = ( ITetrahedralChirality ) targetElements [ v ] ; IAtom queryAtom = query . getAtom ( u ) ; IAtom targetAtom = target . getAtom ( v ) ; int [ ] us = neighbors ( queryElement , queryMap ) ; us = map ( u , v , us , mapping ) ; int p = permutationParity ( us ) ; // check if unspecified was allowed if ( targetTypes [ v ] == null ) { if ( queryAtom instanceof SMARTSAtom ) return ( ( SMARTSAtom ) queryAtom ) . chiralityMatches ( targetAtom , 0 , p ) ; else return ( ( QueryAtom ) queryAtom ) . getExpression ( ) . matches ( targetAtom , 0 ) ; } // target was non - tetrahedral if ( targetTypes [ v ] != Type . Tetrahedral ) return false ; int [ ] vs = neighbors ( targetElement , targetMap ) ; int q = permutationParity ( vs ) * parity ( targetElement . getStereo ( ) ) ; if ( queryAtom instanceof SMARTSAtom ) return ( ( SMARTSAtom ) queryAtom ) . chiralityMatches ( targetAtom , q , p ) ; else { q *= p ; if ( q < 0 ) return ( ( QueryAtom ) queryAtom ) . getExpression ( ) . matches ( targetAtom , IStereoElement . LEFT ) ; else if ( q > 0 ) return ( ( QueryAtom ) queryAtom ) . getExpression ( ) . matches ( targetAtom , IStereoElement . RIGHT ) ; else return ( ( QueryAtom ) queryAtom ) . getExpression ( ) . matches ( targetAtom , 0 ) ; }
public class LogManager { /** * Reinitialize the logging properties and reread the logging configuration * from the given stream , which should be in java . util . Properties format . * A PropertyChangeEvent will be fired after the properties are read . * Any log level definitions in the new configuration file will be * applied using Logger . setLevel ( ) , if the target Logger exists . * @ param ins stream to read properties from * @ exception SecurityException if a security manager exists and if * the caller does not have LoggingPermission ( " control " ) . * @ exception IOException if there are problems reading from the stream . */ public void readConfiguration ( InputStream ins ) throws IOException { } }
checkPermission ( ) ; reset ( ) ; // Load the properties Properties . loadLineReader ( new Properties . LineReader ( ins ) , props :: put ) ; if ( ReflectionUtil . isJreReflectionStripped ( ) ) { reflectionStrippedProcessing ( ) ; } // Instantiate new configuration objects . String names [ ] = parseClassNames ( "config" ) ; for ( int i = 0 ; i < names . length ; i ++ ) { String word = names [ i ] ; try { getClassInstance ( word ) . newInstance ( ) ; } catch ( Exception ex ) { System . err . println ( "Can't load config class \"" + word + "\"" ) ; System . err . println ( "" + ex ) ; // ex . printStackTrace ( ) ; } } // Set levels on any pre - existing loggers , based on the new properties . setLevelsOnExistingLoggers ( ) ; /* J2ObjC removed . / / Notify any interested parties that our properties have changed . changes . firePropertyChange ( null , null , null ) ; */ // Note that we need to reinitialize global handles when // they are first referenced . synchronized ( this ) { initializedGlobalHandlers = false ; }
public class PluginSkeleton { /** * Returns true if the plugin has the same name and logger repository as the * testPlugin passed in . * @ param testPlugin The plugin to test equivalency against . * @ return Returns true if testPlugin is considered to be equivalent . */ public boolean isEquivalent ( final Plugin testPlugin ) { } }
return ( repository == testPlugin . getLoggerRepository ( ) ) && ( ( this . name == null && testPlugin . getName ( ) == null ) || ( this . name != null && name . equals ( testPlugin . getName ( ) ) ) ) && this . getClass ( ) . equals ( testPlugin . getClass ( ) ) ;
public class SFTrustManager { /** * Certificate Revocation checks * @ param chain chain of certificates attached . * @ param peerHost Hostname of the server * @ throws CertificateException if any certificate validation fails */ void validateRevocationStatus ( X509Certificate [ ] chain , String peerHost ) throws CertificateException { } }
final List < Certificate > bcChain = convertToBouncyCastleCertificate ( chain ) ; final List < SFPair < Certificate , Certificate > > pairIssuerSubjectList = getPairIssuerSubject ( bcChain ) ; if ( peerHost . startsWith ( "ocspssd" ) ) { return ; } if ( ocspCacheServer . new_endpoint_enabled ) { ocspCacheServer . resetOCSPResponseCacheServer ( peerHost ) ; } synchronized ( OCSP_RESPONSE_CACHE_LOCK ) { boolean isCached = isCached ( pairIssuerSubjectList ) ; if ( this . useOcspResponseCacheServer && ! isCached ) { if ( ! ocspCacheServer . new_endpoint_enabled ) { LOGGER . debug ( "Downloading OCSP response cache from the server. URL: {}" , SF_OCSP_RESPONSE_CACHE_SERVER_URL ) ; } else { LOGGER . debug ( "Downloading OCSP response cache from the server. URL: {}" , ocspCacheServer . SF_OCSP_RESPONSE_CACHE_SERVER ) ; } readOcspResponseCacheServer ( ) ; // if the cache is downloaded from the server , it should be written // to the file cache at all times . WAS_CACHE_UPDATED = true ; } executeRevocationStatusChecks ( pairIssuerSubjectList , peerHost ) ; if ( WAS_CACHE_UPDATED ) { JsonNode input = encodeCacheToJSON ( ) ; fileCacheManager . writeCacheFile ( input ) ; WAS_CACHE_UPDATED = false ; } }
public class StatsCollector { /** * Parses the configuration to determine if any extra tags should be included * with every stat emitted . * @ param config The config object to parse * @ throws IllegalArgumentException if the config is null . Other exceptions * may be thrown if the config values are unparseable . */ public static final void setGlobalTags ( final Config config ) { } }
if ( config == null ) { throw new IllegalArgumentException ( "Configuration cannot be null." ) ; } if ( config . getBoolean ( "tsd.core.stats_with_port" ) ) { global_tags = new HashMap < String , String > ( 1 ) ; global_tags . put ( "port" , config . getString ( "tsd.network.port" ) ) ; }
public class Program { /** * / < returns > < / returns > */ public TupleTwo < TreeNode , TreeNode > anyNode ( boolean bias , RandEngine randEngine ) { } }
List < TupleTwo < TreeNode , TreeNode > > nodes = flattenNodes ( ) ; if ( bias ) { if ( randEngine . uniform ( ) <= 0.1 ) // As specified by Koza , 90 % select function node , 10 % select terminal node { List < TupleTwo < TreeNode , TreeNode > > terminal_nodes = new ArrayList < > ( ) ; for ( TupleTwo < TreeNode , TreeNode > tuple : nodes ) { TreeNode node = tuple . _1 ( ) ; if ( node . isTerminal ( ) ) { terminal_nodes . add ( tuple ) ; } } if ( terminal_nodes . size ( ) > 0 ) { return terminal_nodes . get ( randEngine . nextInt ( terminal_nodes . size ( ) ) ) ; } else { return nodes . get ( randEngine . nextInt ( nodes . size ( ) ) ) ; } } else { List < TupleTwo < TreeNode , TreeNode > > function_nodes = new ArrayList < > ( ) ; for ( TupleTwo < TreeNode , TreeNode > tuple : nodes ) { TreeNode node = tuple . _1 ( ) ; if ( ! node . isTerminal ( ) ) { function_nodes . add ( tuple ) ; } } if ( function_nodes . size ( ) > 0 ) { return function_nodes . get ( randEngine . nextInt ( function_nodes . size ( ) ) ) ; } else { return nodes . get ( randEngine . nextInt ( nodes . size ( ) ) ) ; } } } else { return nodes . get ( randEngine . nextInt ( nodes . size ( ) ) ) ; }
public class TileSourceFactory { /** * removes any tile sources whose name matches the regular expression * @ param aRegex regular expression * @ return number of sources removed */ public static int removeTileSources ( final String aRegex ) { } }
int n = 0 ; for ( int i = mTileSources . size ( ) - 1 ; i >= 0 ; -- i ) { if ( mTileSources . get ( i ) . name ( ) . matches ( aRegex ) ) { mTileSources . remove ( i ) ; ++ n ; } } return n ;
public class AbstractAsyncNodeMonitorDescriptor { /** * Perform monitoring with detailed reporting . */ protected final @ Nonnull Result < T > monitorDetailed ( ) throws InterruptedException { } }
Map < Computer , Future < T > > futures = new HashMap < > ( ) ; Set < Computer > skipped = new HashSet < > ( ) ; for ( Computer c : Jenkins . getInstance ( ) . getComputers ( ) ) { try { VirtualChannel ch = c . getChannel ( ) ; futures . put ( c , null ) ; // sentinel value if ( ch != null ) { Callable < T , ? > cc = createCallable ( c ) ; if ( cc != null ) futures . put ( c , ch . callAsync ( cc ) ) ; } } catch ( RuntimeException | IOException e ) { LOGGER . log ( WARNING , "Failed to monitor " + c . getDisplayName ( ) + " for " + getDisplayName ( ) , e ) ; } } final long now = System . currentTimeMillis ( ) ; final long end = now + getMonitoringTimeOut ( ) ; final Map < Computer , T > data = new HashMap < > ( ) ; for ( Entry < Computer , Future < T > > e : futures . entrySet ( ) ) { Computer c = e . getKey ( ) ; Future < T > f = futures . get ( c ) ; data . put ( c , null ) ; // sentinel value if ( f != null ) { try { data . put ( c , f . get ( Math . max ( 0 , end - System . currentTimeMillis ( ) ) , MILLISECONDS ) ) ; } catch ( RuntimeException | TimeoutException | ExecutionException x ) { LOGGER . log ( WARNING , "Failed to monitor " + c . getDisplayName ( ) + " for " + getDisplayName ( ) , x ) ; } } else { skipped . add ( c ) ; } } return new Result < > ( data , skipped ) ;
public class UpdateBuildRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateBuildRequest updateBuildRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateBuildRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateBuildRequest . getBuildId ( ) , BUILDID_BINDING ) ; protocolMarshaller . marshall ( updateBuildRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( updateBuildRequest . getVersion ( ) , VERSION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractLogger { /** * Logs the provided data . * @ param level * The log level * @ param message * The message parts ( may be null ) * @ param throwable * The error ( may be null ) */ protected void log ( LogLevel level , Object [ ] message , Throwable throwable ) { } }
if ( level . getValue ( ) >= this . logLevel . getValue ( ) ) { // log the message this . logImpl ( level , message , throwable ) ; }
public class BrokerSession { /** * Adds an event handler for background polling events . * @ param hostEventHandler An event handler . */ public void addHostEventHandler ( IHostEventHandler hostEventHandler ) { } }
synchronized ( hostEventHandlers ) { if ( ! hostEventHandlers . contains ( hostEventHandler ) ) { hostEventHandlers . add ( hostEventHandler ) ; } }
public class PKMigrate { private void migrate ( ) throws SQLException { } }
QueryVO pk = helper . selectAll ( "pkList" , ObjectBuilder . < String , Object > map ( ) . put ( "tables" , tables ) . get ( ) ) ; List < TablePkFkVO > tablePkFkVOs = pk . toBeans ( TablePkFkVO . class ) ; logger . info ( "TablePkFkVO size = {}" , tablePkFkVOs . size ( ) ) ; for ( TablePkFkVO table : tablePkFkVOs ) { QueryVO fk = helper . selectAll ( "fkList" , ObjectBuilder . < String , Object > map ( ) . put ( "table" , table . getName ( ) ) . get ( ) ) ; table . setReferencedBy ( fk . toBeans ( RefConsVO . class ) ) ; } logger . info ( "TablePkFkVO size = {}" , tablePkFkVOs . size ( ) ) ; for ( TablePkFkVO tablePkFkVO : tablePkFkVOs ) { migrateTable ( tablePkFkVO ) ; }
public class AbstractViewQuery { /** * Set selected item of an AdapterView . * @ param position The position of the item to be selected . * @ return self */ public T setSelection ( int position ) { } }
if ( view instanceof AdapterView ) { AdapterView < ? > alv = ( AdapterView < ? > ) view ; alv . setSelection ( position ) ; } return self ( ) ;
public class Sound { /** * Loop this sound effect at a given volume and pitch * @ param pitch The pitch to play the sound effect at * @ param volume The volumen to play the sound effect at */ public void loop ( float pitch , float volume ) { } }
sound . playAsSoundEffect ( pitch , volume * SoundStore . get ( ) . getSoundVolume ( ) , true ) ;
public class JettyHandler { /** * Add a handler explicitly instead of through injection . This is for handlers created on the fly . * @ param handler */ final void addHandler ( final HttpHandler handler ) { } }
if ( handler != null ) { if ( ! eventHandlers . containsKey ( handler . getUriSpecification ( ) . toLowerCase ( ) ) ) { eventHandlers . put ( handler . getUriSpecification ( ) . toLowerCase ( ) , handler ) ; } else { LOG . log ( Level . WARNING , "JettyHandler handle is already registered: {0} " , handler . getUriSpecification ( ) ) ; } }
public class XmlReport { /** * < p > parse . < / p > * @ param file a { @ link java . io . File } object . * @ return a { @ link com . greenpepper . report . XmlReport } object . * @ throws java . lang . Exception if any . */ public static XmlReport parse ( File file ) throws Exception { } }
Reader in = BOMUtil . newReader ( file ) ; try { return parse ( in ) ; } finally { IOUtil . closeQuietly ( in ) ; }
public class OpenSSLFactory { /** * Sets the caCertificatePath . */ public void setCACertificatePath ( Path caCertificatePath ) { } }
try { _caCertificatePath = caCertificatePath . toRealPath ( ) . toString ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; }
public class StandardDdlParser { /** * Parses DDL CREATE COLLATION { @ link AstNode } based on SQL 92 specifications . * @ param tokens the { @ link DdlTokenStream } representing the tokenized DDL content ; may not be null * @ param parentNode the parent { @ link AstNode } node ; may not be null * @ return the parsed statement node { @ link AstNode } * @ throws ParsingException */ protected AstNode parseCreateCollationStatement ( DdlTokenStream tokens , AstNode parentNode ) throws ParsingException { } }
assert tokens != null ; assert parentNode != null ; markStartOfStatement ( tokens ) ; tokens . consume ( STMT_CREATE_COLLATION ) ; String name = parseName ( tokens ) ; AstNode node = nodeFactory ( ) . node ( name , parentNode , TYPE_CREATE_COLLATION_STATEMENT ) ; // character set attribute tokens . consume ( "FOR" ) ; String charSetName = parseName ( tokens ) ; node . setProperty ( COLLATION_CHARACTER_SET_NAME , charSetName ) ; // collation source // TODO author = Horia Chiorean date = 1/4/12 description = Only parsing a string atm ( should probably be some nested nodes - // see StandardDdl . cnd tokens . consume ( "FROM" ) ; String collationSource = null ; if ( tokens . canConsume ( "EXTERNAL" ) || tokens . canConsume ( "DESC" ) ) { collationSource = consumeParenBoundedTokens ( tokens , false ) ; } else if ( tokens . canConsume ( "TRANSLATION" ) ) { StringBuilder translationCollation = new StringBuilder ( "TRANSLATION " ) . append ( tokens . consume ( ) ) ; if ( tokens . canConsume ( "THEN" , "COLLATION" ) ) { translationCollation . append ( " THEN COLLATION " ) ; translationCollation . append ( parseName ( tokens ) ) ; } collationSource = translationCollation . toString ( ) ; } else { collationSource = parseName ( tokens ) ; } node . setProperty ( COLLATION_SOURCE , collationSource ) ; // pad attribute if ( tokens . canConsume ( "PAD" , "SPACE" ) ) { node . setProperty ( PAD_ATTRIBUTE , PAD_ATTRIBUTE_PAD ) ; } else if ( tokens . canConsume ( "NO" , "PAD" ) ) { node . setProperty ( PAD_ATTRIBUTE , PAD_ATTRIBUTE_NO_PAD ) ; } parseUntilTerminator ( tokens ) ; markEndOfStatement ( tokens , node ) ; return node ;
public class SetLabelsForNumbersBot { /** * Logs information about entities changed so far . * @ param entityId * the id of the modified item * @ param numberLabel * the label written * @ param languages * the list of languages for which the label was set */ protected void logEntityModification ( EntityIdValue entityId , String numberLabel , ArrayList < String > languages ) { } }
modifiedEntities ++ ; System . out . println ( entityId . getId ( ) + ": adding label " + numberLabel + " for languages " + languages . toString ( ) + " (" + modifiedEntities + " entities modified so far)" ) ; this . logfile . println ( entityId . getId ( ) + "," + numberLabel + ",\"" + languages . toString ( ) + "\"" ) ; if ( modifiedEntities % 10 == 0 ) { this . logfile . flush ( ) ; }
public class BlobSnippets { /** * [ TARGET createAcl ( Acl ) ] */ public Acl createAcl ( ) { } }
// [ START createAcl ] Acl acl = blob . createAcl ( Acl . of ( User . ofAllAuthenticatedUsers ( ) , Acl . Role . READER ) ) ; // [ END createAcl ] return acl ;