signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ActiveMqQueue { /** * Destroy method . */ public void destroy ( ) { } }
try { super . destroy ( ) ; } finally { closeQuietly ( connection ) ; closeQuietly ( messageProducer ) ; closeQuietly ( producerSession ) ; closeQuietly ( messageConsumer ) ; closeQuietly ( consumerSession ) ; if ( connectionFactory != null && myOwnConnectionFactory ) { connectionFactory = null ; } }
public class Iso8601Format { /** * / * [ deutsch ] * < p > Liefert einen { @ code ChronoPrinter } mit dem angegebenen Dezimalstil zur Ausgabe einer Uhrzeit * im < i > basic < / i > - Format & quot ; HHmm [ ss [ , SSSSS ] ] & quot ; . < / p > * < p > Im Interesse einer maximalen Performance wird empfohlen , das Ergebnis dieser Methode in einer * statischen Konstanten zu speichern . < / p > * @ param decimalStyle iso - compatible decimal style * @ param precision controls the precision of output format with constant length * @ return ChronoPrinter as new instance * @ since 4.18 */ public static ChronoPrinter < PlainTime > ofBasicTime ( IsoDecimalStyle decimalStyle , ClockUnit precision ) { } }
ChronoFormatter . Builder < PlainTime > builder = ChronoFormatter . setUp ( PlainTime . class , Locale . ROOT ) ; addWallTime ( builder , false , decimalStyle , precision ) ; return builder . build ( ) . with ( Leniency . STRICT ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcRepresentationItem ( ) { } }
if ( ifcRepresentationItemEClass == null ) { ifcRepresentationItemEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 568 ) ; } return ifcRepresentationItemEClass ;
public class UserRepository { /** * ' Delete ' the users account such that they can no longer access it , however we do not delete * the record from the db . The name is changed such that the original name has XX = FOO if the * name were FOO originally . If we have to lop off any of the name to get our prefix to fit we * use a minus sign instead of a equals side . The password field is set to be the empty string * so that no one can log in ( since nothing hashes to the empty string . We also make sure * their email address no longer works , so in case we don ' t ignore ' deleted ' users when we do * the sql to get emailaddresses for the mass mailings we still won ' t spam delete folk . We * leave the emailaddress intact exect for the @ sign which gets turned to a # , so that we can * see what their email was incase it was an accidently deletion and we have to verify through * email . */ public void deleteUser ( final User user ) throws PersistenceException { } }
if ( user . isDeleted ( ) ) { return ; } executeUpdate ( new Operation < Object > ( ) { public Object invoke ( Connection conn , DatabaseLiaison liaison ) throws PersistenceException , SQLException { // create our modified fields mask FieldMask mask = _utable . getFieldMask ( ) ; mask . setModified ( "username" ) ; mask . setModified ( "password" ) ; mask . setModified ( "email" ) ; // set the password to unusable user . password = "" ; // ' disable ' their email address String newEmail = user . email . replace ( '@' , '#' ) ; user . email = newEmail ; String oldName = user . username ; for ( int ii = 0 ; ii < 100 ; ii ++ ) { try { user . username = StringUtil . truncate ( ii + "=" + oldName , 24 ) ; _utable . update ( conn , user , mask ) ; return null ; // nothing to return } catch ( SQLException se ) { if ( ! liaison . isDuplicateRowException ( se ) ) { throw se ; } } } // ok we failed to rename the user , lets bust an error throw new PersistenceException ( "Failed to 'delete' the user" ) ; } } ) ;
public class IntervalCollection { /** * / * [ deutsch ] * < p > Kombiniert alle Intervalle zu disjunkten Bl & ouml ; cken , die sich * weder & uuml ; berlappen noch ber & uuml ; hren . < / p > * < p > Alle Intervalle , die sich & uuml ; berlappen oder sich ber & uuml ; hren , werden zu jeweils * einem Block verschmolzen . Wenn die Intervallgrenzen noch erhalten bleiben sollen , dann * ist { @ link # withSplits ( ) } wahrscheinlich die sinnvollere Methode . < / p > * < p > < img src = " doc - files / withBlocks . jpg " alt = " withBlocks " > < / p > * @ return new interval collection containing disjunct merged blocks * while this instance remains unaffected * @ since 2.0 */ public IntervalCollection < T > withBlocks ( ) { } }
if ( this . intervals . size ( ) < 2 ) { return this ; } Boundary < T > s ; Boundary < T > e ; boolean calendrical = this . isCalendrical ( ) ; IntervalEdge edge = ( calendrical ? IntervalEdge . CLOSED : IntervalEdge . OPEN ) ; List < ChronoInterval < T > > gaps = this . withGaps ( ) . intervals ; List < ChronoInterval < T > > blocks = new ArrayList < > ( ) ; T start = this . getMinimum ( ) ; for ( int i = 0 , n = gaps . size ( ) ; i < n ; i ++ ) { T end = gaps . get ( i ) . getStart ( ) . getTemporal ( ) ; if ( calendrical ) { end = this . getTimeLine ( ) . stepBackwards ( end ) ; } s = this . createStartBoundary ( start ) ; e = Boundary . of ( edge , end ) ; blocks . add ( this . newInterval ( s , e ) ) ; Boundary < T > b = gaps . get ( i ) . getEnd ( ) ; start = b . getTemporal ( ) ; if ( b . isClosed ( ) ) { start = this . getTimeLine ( ) . stepForward ( start ) ; } } T max = this . getMaximum ( ) ; s = this . createStartBoundary ( start ) ; if ( ( max != null ) && ! calendrical ) { max = this . getTimeLine ( ) . stepForward ( max ) ; } if ( max == null ) { e = Boundary . infiniteFuture ( ) ; } else { e = Boundary . of ( edge , max ) ; } blocks . add ( this . newInterval ( s , e ) ) ; return this . create ( blocks ) ;
public class WindowsHackReader { /** * Reads into a character buffer using the correct encoding . * @ param cbuf character buffer receiving the data . * @ param off starting offset into the buffer . * @ param len number of characters to read . * @ return the number of characters read or - 1 on end of file . */ public int read ( char [ ] cbuf , int off , int len ) throws IOException { } }
int i = 0 ; for ( i = 0 ; i < len ; i ++ ) { int ch = is . read ( ) ; if ( ch < 0 ) return i == 0 ? - 1 : i ; switch ( ch ) { case - 1 : return i == 0 ? - 1 : i ; case 130 : // unicode 8218 cbuf [ off + i ] = ',' ; break ; case 131 : // unicode 402 cbuf [ off + i ] = 'f' ; break ; case 132 : // unicode 8222 cbuf [ off + i ] = '"' ; break ; case 133 : // unicode 8230 " . . . " cbuf [ off + i ] = ( char ) 8230 ; break ; case 134 : // unicode 8224 ( dagger ) cbuf [ off + i ] = '+' ; break ; case 135 : // unicode 8225 ( double dagger ) cbuf [ off + i ] = '+' ; break ; case 136 : // unicode 710 cbuf [ off + i ] = '^' ; break ; case 137 : // unicode 8240 ( per - mille 0/00) cbuf [ off + i ] = ( char ) 8240 ; break ; case 138 : // unicode 352 cbuf [ off + i ] = 'S' ; break ; case 139 : // unicode 8249 cbuf [ off + i ] = '<' ; break ; case 140 : // unicode 338 ( OE ) cbuf [ off + i ] = 'O' ; break ; case 145 : // unicode 8216 case 146 : // unicode 8217 cbuf [ off + i ] = '\'' ; break ; case 147 : // unicode 8220 case 148 : // unicode 8221 cbuf [ off + i ] = ( char ) '"' ; break ; case 149 : // unicode 8226 ( bullet ) cbuf [ off + i ] = ( char ) '*' ; break ; case 150 : // unicode 8211 case 151 : // unicode 8212 cbuf [ off + i ] = ( char ) '-' ; break ; case 152 : // unicode 732 cbuf [ off + i ] = ( char ) '~' ; break ; case 153 : // unicode 8482 ( trademark ) cbuf [ off + i ] = ( char ) 8482 ; break ; case 154 : // unicode 353 cbuf [ off + i ] = 's' ; break ; case 155 : // unicode 8250 cbuf [ off + i ] = '>' ; break ; case 156 : // unicode 339 ( oe ) cbuf [ off + i ] = 'o' ; break ; case 376 : // unicode 376 ( Y with umlaut ) cbuf [ off + i ] = 'Y' ; break ; default : cbuf [ off + i ] = ( char ) ch ; } } return i ;
public class EmailConverter { /** * Delegates to { @ link # emlToMimeMessage ( String , Session ) } using a dummy { @ link Session } instance and passes the result to { @ link * # mimeMessageToEmail ( MimeMessage ) } ; */ public static EmailPopulatingBuilder emlToEmailBuilder ( @ Nonnull final String eml ) { } }
final MimeMessage mimeMessage = emlToMimeMessage ( checkNonEmptyArgument ( eml , "eml" ) , createDummySession ( ) ) ; return mimeMessageToEmailBuilder ( mimeMessage ) ;
public class WorkflowTemplateServiceClient { /** * Creates new workflow template . * < p > Sample code : * < pre > < code > * try ( WorkflowTemplateServiceClient workflowTemplateServiceClient = WorkflowTemplateServiceClient . create ( ) ) { * RegionName parent = RegionName . of ( " [ PROJECT ] " , " [ REGION ] " ) ; * WorkflowTemplate template = WorkflowTemplate . newBuilder ( ) . build ( ) ; * WorkflowTemplate response = workflowTemplateServiceClient . createWorkflowTemplate ( parent , template ) ; * < / code > < / pre > * @ param parent Required . The " resource name " of the region , as described in * https : / / cloud . google . com / apis / design / resource _ names of the form * ` projects / { project _ id } / regions / { region } ` * @ param template Required . The Dataproc workflow template to create . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final WorkflowTemplate createWorkflowTemplate ( RegionName parent , WorkflowTemplate template ) { } }
CreateWorkflowTemplateRequest request = CreateWorkflowTemplateRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . setTemplate ( template ) . build ( ) ; return createWorkflowTemplate ( request ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcStructuralPlanarAction ( ) { } }
if ( ifcStructuralPlanarActionEClass == null ) { ifcStructuralPlanarActionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 654 ) ; } return ifcStructuralPlanarActionEClass ;
public class ValueTransformer { /** * / * ( non - Javadoc ) * @ see com . oath . cyclops . types . Zippable # zip ( java . lang . Iterable , java . util . function . BiFunction ) */ public < T2 , R > ValueTransformer < W , R > zip ( Iterable < ? extends T2 > iterable , BiFunction < ? super T , ? super T2 , ? extends R > fn ) { } }
return this . unitAnyM ( this . transformerStream ( ) . map ( v -> v . zip ( iterable , fn ) ) ) ;
public class CollectionUtils { /** * create a collection set , with initial values . * @ param < T > the generic type * @ param itemType the item type * @ param objects the objects * @ return the sets the */ @ SuppressWarnings ( "unchecked" ) public static < T > Set < T > asSet ( Class < T > itemType , T ... objects ) { } }
LinkedHashSet < T > result = new LinkedHashSet < T > ( ) ; for ( T item : objects ) { result . add ( item ) ; } return result ;
public class TaskManagerServices { /** * Shuts the { @ link TaskExecutor } services down . */ public void shutDown ( ) throws FlinkException { } }
Exception exception = null ; try { taskManagerStateStore . shutdown ( ) ; } catch ( Exception e ) { exception = e ; } try { memoryManager . shutdown ( ) ; } catch ( Exception e ) { exception = ExceptionUtils . firstOrSuppressed ( e , exception ) ; } try { ioManager . shutdown ( ) ; } catch ( Exception e ) { exception = ExceptionUtils . firstOrSuppressed ( e , exception ) ; } try { networkEnvironment . shutdown ( ) ; } catch ( Exception e ) { exception = ExceptionUtils . firstOrSuppressed ( e , exception ) ; } try { kvStateService . shutdown ( ) ; } catch ( Exception e ) { exception = ExceptionUtils . firstOrSuppressed ( e , exception ) ; } try { taskSlotTable . stop ( ) ; } catch ( Exception e ) { exception = ExceptionUtils . firstOrSuppressed ( e , exception ) ; } try { jobLeaderService . stop ( ) ; } catch ( Exception e ) { exception = ExceptionUtils . firstOrSuppressed ( e , exception ) ; } taskEventDispatcher . clearAll ( ) ; if ( exception != null ) { throw new FlinkException ( "Could not properly shut down the TaskManager services." , exception ) ; }
public class MultiPartParser { private void parseDelimiter ( ByteBuffer buffer ) { } }
while ( __delimiterStates . contains ( _state ) && hasNextByte ( buffer ) ) { HttpTokens . Token t = next ( buffer ) ; if ( t == null ) return ; if ( t . getType ( ) == HttpTokens . Type . LF ) { setState ( State . BODY_PART ) ; if ( LOG . isDebugEnabled ( ) ) LOG . debug ( "startPart {}" , this ) ; _handler . startPart ( ) ; return ; } switch ( _state ) { case DELIMITER : if ( t . getChar ( ) == '-' ) setState ( State . DELIMITER_CLOSE ) ; else setState ( State . DELIMITER_PADDING ) ; continue ; case DELIMITER_CLOSE : if ( t . getChar ( ) == '-' ) { setState ( State . EPILOGUE ) ; return ; } setState ( State . DELIMITER_PADDING ) ; continue ; case DELIMITER_PADDING : default : } }
public class DirectoryScanner { /** * Main program loop for DirectoryScanner * Runs " reconcile ( ) " periodically under the masterThread . */ @ Override public void run ( ) { } }
try { InjectionHandler . processEvent ( InjectionEvent . DIRECTORY_SCANNER_NOT_STARTED ) ; if ( ! shouldRun ) { // shutdown has been activated LOG . warn ( "this cycle terminating immediately because 'shouldRun' has been deactivated" ) ; return ; } Integer [ ] namespaceIds = datanode . getAllNamespaces ( ) ; for ( Integer nsid : namespaceIds ) { UpgradeManagerDatanode um = datanode . getUpgradeManager ( nsid ) ; if ( um != null && ! um . isUpgradeCompleted ( ) ) { // If distributed upgrades underway , exit and wait for next cycle . LOG . warn ( "this cycle terminating immediately because Distributed Upgrade is in process" ) ; return ; } } // We ' re are okay to run - do it delta . resetDelta ( ) ; delta . startRecordingDelta ( ) ; checkDifferenceAndReconcile ( ) ; } catch ( Exception e ) { // Log and continue - allows Executor to run again next cycle LOG . error ( "Exception during DirectoryScanner execution - will continue next cycle" , e ) ; } catch ( Error er ) { // Non - recoverable error - re - throw after logging the problem LOG . error ( "System Error during DirectoryScanner execution - permanently terminating periodic scanner" , er ) ; throw er ; } finally { delta . stopRecordingDelta ( ) ; InjectionHandler . processEvent ( InjectionEvent . DIRECTORY_SCANNER_FINISHED ) ; }
public class AbstractParser { /** * Return a json object from the provided array . Return an empty object if * there is any problems fetching the named entity data . * @ param jsonArray array of data * @ param index of the object to fetch * @ return json object from the provided array */ protected JSONObject getJSONObject ( final JSONArray jsonArray , final int index ) { } }
JSONObject object = new JSONObject ( ) ; try { object = ( JSONObject ) jsonArray . get ( index ) ; } catch ( JSONException e ) { e . printStackTrace ( ) ; } return object ;
public class DetectCircleGrid { /** * Number of CCW rotations to put selected corner into the canonical location . Only works * when there are 4 possible solutions * @ param g The grid * @ return number of rotations */ static int closestCorner4 ( Grid g ) { } }
double bestDistance = g . get ( 0 , 0 ) . center . normSq ( ) ; int bestIdx = 0 ; double d = g . get ( 0 , g . columns - 1 ) . center . normSq ( ) ; if ( d < bestDistance ) { bestDistance = d ; bestIdx = 3 ; } d = g . get ( g . rows - 1 , g . columns - 1 ) . center . normSq ( ) ; if ( d < bestDistance ) { bestDistance = d ; bestIdx = 2 ; } d = g . get ( g . rows - 1 , 0 ) . center . normSq ( ) ; if ( d < bestDistance ) { bestIdx = 1 ; } return bestIdx ;
public class XMLStreamEvents { /** * Shortcut to get the value of the given attribute name . */ public UnprotectedStringBuffer getAttributeValueWithNamespaceURI ( CharSequence uri , CharSequence name ) { } }
for ( Attribute attr : event . attributes ) if ( attr . localName . equals ( name ) && getNamespaceURI ( attr . namespacePrefix ) . equals ( uri ) ) return attr . value ; return null ;
public class ComparableTuple { /** * from interface Comparable */ public int compareTo ( ComparableTuple < L , R > other ) { } }
int rv = ObjectUtil . compareTo ( left , other . left ) ; return ( rv != 0 ) ? rv : ObjectUtil . compareTo ( right , other . right ) ;
public class MessageDigest { /** * Update the digest using the specified ByteBuffer . The digest is * updated using the { @ code input . remaining ( ) } bytes starting * at { @ code input . position ( ) } . * Upon return , the buffer ' s position will be equal to its limit ; * its limit will not have changed . * @ param input the ByteBuffer * @ since 1.5 */ public final void update ( ByteBuffer input ) { } }
if ( input == null ) { throw new NullPointerException ( ) ; } engineUpdate ( input ) ; state = IN_PROGRESS ;
public class MDLV2000Writer { /** * Formats a String to fit into the connectiontable . * @ param s The String to be formated * @ param le The length of the String * @ return The String to be written in the connectiontable */ protected static String formatMDLString ( String s , int le ) { } }
s = s . trim ( ) ; if ( s . length ( ) > le ) return s . substring ( 0 , le ) ; int l ; l = le - s . length ( ) ; for ( int f = 0 ; f < l ; f ++ ) s += " " ; return s ;
public class P3DatabaseReader { /** * Read task relationships . */ private void readRelationships ( ) { } }
for ( MapRow row : m_tables . get ( "REL" ) ) { Task predecessor = m_activityMap . get ( row . getString ( "PREDECESSOR_ACTIVITY_ID" ) ) ; Task successor = m_activityMap . get ( row . getString ( "SUCCESSOR_ACTIVITY_ID" ) ) ; if ( predecessor != null && successor != null ) { Duration lag = row . getDuration ( "LAG_VALUE" ) ; RelationType type = row . getRelationType ( "LAG_TYPE" ) ; successor . addPredecessor ( predecessor , type , lag ) ; } }
public class StreamEx { /** * Returns a sequential ordered { @ code StreamEx } containing the results of * applying the given mapper function to the all possible pairs of elements * taken from the provided array . * The indices of two array elements supplied to the mapper function are * always ordered : first element index is strictly less than the second * element index . The pairs are lexicographically ordered . For example , for * the array of three elements the stream of three elements is created : * { @ code mapper . apply ( array [ 0 ] , array [ 1 ] ) } , * { @ code mapper . apply ( array [ 0 ] , array [ 2 ] ) } and * { @ code mapper . apply ( array [ 1 ] , array [ 2 ] ) } . The number of elements in the * resulting stream is { @ code array . length * ( array . length + 1L ) / 2 } . * @ param < U > type of the array elements * @ param < T > type of the stream elements * @ param array an array to take the elements from * @ param mapper a non - interfering , stateless function to apply to each pair * of array elements . * @ return a new { @ code StreamEx } * @ see EntryStream # ofPairs ( Object [ ] ) * @ since 0.3.6 */ public static < U , T > StreamEx < T > ofPairs ( U [ ] array , BiFunction < ? super U , ? super U , ? extends T > mapper ) { } }
return ofPairs ( Arrays . asList ( array ) , mapper ) ;
public class ClassInfo { /** * Return public metheds according to given name */ public final List < MethodInfo > getMethods ( String name ) { } }
List < MethodInfo > namedMethod = methods . get ( name ) ; if ( null == namedMethod ) return Collections . emptyList ( ) ; else return namedMethod ;
public class StrSubstitutor { /** * Sets the variable prefix to use . * The variable prefix is the character or characters that identify the * start of a variable . This method allows a string prefix to be easily set . * @ param prefix the prefix for variables , not null * @ return this , to enable chaining * @ throws IllegalArgumentException if the prefix is null */ public StrSubstitutor setVariablePrefix ( final String prefix ) { } }
if ( prefix == null ) { throw new IllegalArgumentException ( "Variable prefix must not be null!" ) ; } return setVariablePrefixMatcher ( StrMatcher . stringMatcher ( prefix ) ) ;
public class JaxWsUtils { /** * get the targetNamespace from implementation bean . * if can get the targetNamespace attribute from annotation then return it , * otherwise return the package name as default value . * Both webService and webServiceprovider has the same logic . * @ param classInfo * @ return */ public static String getImplementedTargetNamespace ( ClassInfo classInfo ) { } }
String defaultValue = getNamespace ( classInfo , null ) ; if ( StringUtils . isEmpty ( defaultValue ) ) { defaultValue = JaxWsConstants . UNKNOWN_NAMESPACE ; } AnnotationInfo annotationInfo = getAnnotationInfoFromClass ( classInfo , JaxWsConstants . TARGETNAMESPACE_ATTRIBUTE ) ; if ( annotationInfo == null ) { return "" ; } AnnotationValue attrValue = annotationInfo . getValue ( JaxWsConstants . TARGETNAMESPACE_ATTRIBUTE ) ; String attrFromAnnotation = attrValue == null ? null : attrValue . getStringValue ( ) . trim ( ) ; return StringUtils . isEmpty ( attrFromAnnotation ) ? defaultValue : attrFromAnnotation ;
public class JcrTools { /** * Register new mixin type if does not exists on workspace * @ param session the JCR session * @ param mixin the mixin name to register * @ throws RepositoryException */ public static void registerMixinType ( Session session , String mixin ) throws RepositoryException { } }
NodeTypeManager nodeTypeManager = session . getWorkspace ( ) . getNodeTypeManager ( ) ; if ( ! nodeTypeManager . hasNodeType ( mixin ) ) { NodeTypeTemplate nodeTypeTemplate = nodeTypeManager . createNodeTypeTemplate ( ) ; nodeTypeTemplate . setMixin ( true ) ; nodeTypeTemplate . setName ( mixin ) ; NodeTypeDefinition [ ] nodeTypes = new NodeTypeDefinition [ ] { nodeTypeTemplate } ; nodeTypeManager . registerNodeTypes ( nodeTypes , true ) ; }
public class AmazonEC2Client { /** * Associates a CIDR block with your VPC . You can associate a secondary IPv4 CIDR block , or you can associate an * Amazon - provided IPv6 CIDR block . The IPv6 CIDR block size is fixed at / 56. * For more information about associating CIDR blocks with your VPC and applicable restrictions , see < a * href = " https : / / docs . aws . amazon . com / AmazonVPC / latest / UserGuide / VPC _ Subnets . html # VPC _ Sizing " > VPC and Subnet * Sizing < / a > in the < i > Amazon Virtual Private Cloud User Guide < / i > . * @ param associateVpcCidrBlockRequest * @ return Result of the AssociateVpcCidrBlock operation returned by the service . * @ sample AmazonEC2 . AssociateVpcCidrBlock * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / AssociateVpcCidrBlock " target = " _ top " > AWS API * Documentation < / a > */ @ Override public AssociateVpcCidrBlockResult associateVpcCidrBlock ( AssociateVpcCidrBlockRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAssociateVpcCidrBlock ( request ) ;
public class Cursor { /** * Closes underlying result set and , optionally , the whole connection . * @ param closeConnection Close underlying connection also . Be aware of this if * connection is used somewhere else . * @ throws SQLException */ public void close ( boolean closeConnection ) throws SQLException { } }
if ( closeConnection ) { resultSet . getStatement ( ) . getConnection ( ) . close ( ) ; } else { resultSet . close ( ) ; } finished = true ;
public class NoopTaskProcessorFactory { /** * ( non - Javadoc ) * @ see * org . duracloud . mill . workman . TaskProcessorFactoryBase # isSupported ( org . duracloud * . mill . domain . Task ) */ @ Override public boolean isSupported ( Task task ) { } }
return task . getType ( ) . equals ( Task . Type . NOOP ) ;
public class DistanceMoment { /** * Evaluate the 12 descriptors used to characterize the 3D shape of a molecule . * @ param atomContainer The molecule to consider , should have 3D coordinates * @ return A 12 element array containing the descriptors . * @ throws CDKException if there are no 3D coordinates */ public static float [ ] generateMoments ( IAtomContainer atomContainer ) throws CDKException { } }
// lets check if we have 3D coordinates Iterator < IAtom > atoms ; int natom = atomContainer . getAtomCount ( ) ; Point3d ctd = getGeometricCenter ( atomContainer ) ; Point3d cst = new Point3d ( ) ; Point3d fct = new Point3d ( ) ; Point3d ftf = new Point3d ( ) ; double [ ] distCtd = new double [ natom ] ; double [ ] distCst = new double [ natom ] ; double [ ] distFct = new double [ natom ] ; double [ ] distFtf = new double [ natom ] ; atoms = atomContainer . atoms ( ) . iterator ( ) ; int counter = 0 ; double min = Double . MAX_VALUE ; double max = Double . MIN_VALUE ; // eval dist to centroid while ( atoms . hasNext ( ) ) { IAtom atom = atoms . next ( ) ; Point3d p = atom . getPoint3d ( ) ; double d = p . distance ( ctd ) ; distCtd [ counter ++ ] = d ; if ( d < min ) { cst . x = p . x ; cst . y = p . y ; cst . z = p . z ; min = d ; } if ( d > max ) { fct . x = p . x ; fct . y = p . y ; fct . z = p . z ; max = d ; } } // eval dist to cst atoms = atomContainer . atoms ( ) . iterator ( ) ; counter = 0 ; while ( atoms . hasNext ( ) ) { IAtom atom = atoms . next ( ) ; Point3d p = atom . getPoint3d ( ) ; double d = p . distance ( cst ) ; distCst [ counter ++ ] = d ; } // eval dist to fct atoms = atomContainer . atoms ( ) . iterator ( ) ; counter = 0 ; max = Double . MIN_VALUE ; while ( atoms . hasNext ( ) ) { IAtom atom = atoms . next ( ) ; Point3d p = atom . getPoint3d ( ) ; double d = p . distance ( fct ) ; distFct [ counter ++ ] = d ; if ( d > max ) { ftf . x = p . x ; ftf . y = p . y ; ftf . z = p . z ; max = d ; } } // eval dist to ftf atoms = atomContainer . atoms ( ) . iterator ( ) ; counter = 0 ; while ( atoms . hasNext ( ) ) { IAtom atom = atoms . next ( ) ; Point3d p = atom . getPoint3d ( ) ; double d = p . distance ( ftf ) ; distFtf [ counter ++ ] = d ; } float [ ] moments = new float [ 12 ] ; float mean = mu1 ( distCtd ) ; float sigma2 = mu2 ( distCtd , mean ) ; float skewness = mu3 ( distCtd , mean , Math . sqrt ( sigma2 ) ) ; moments [ 0 ] = mean ; moments [ 1 ] = sigma2 ; moments [ 2 ] = skewness ; mean = mu1 ( distCst ) ; sigma2 = mu2 ( distCst , mean ) ; skewness = mu3 ( distCst , mean , Math . sqrt ( sigma2 ) ) ; moments [ 3 ] = mean ; moments [ 4 ] = sigma2 ; moments [ 5 ] = skewness ; mean = mu1 ( distFct ) ; sigma2 = mu2 ( distFct , mean ) ; skewness = mu3 ( distFct , mean , Math . sqrt ( sigma2 ) ) ; moments [ 6 ] = mean ; moments [ 7 ] = sigma2 ; moments [ 8 ] = skewness ; mean = mu1 ( distFtf ) ; sigma2 = mu2 ( distFtf , mean ) ; skewness = mu3 ( distFtf , mean , Math . sqrt ( sigma2 ) ) ; moments [ 9 ] = mean ; moments [ 10 ] = sigma2 ; moments [ 11 ] = skewness ; return moments ;
public class Get { /** * Retrieves a specific row from the element . If the element isn ' t present * or a table , or doesn ' t have that many rows a null value will be returned . * @ param rowNum - the row number of the table to obtain - note , row numbering * starts at 0 * @ return List : a list of the table cells in the row as WebElements */ @ SuppressWarnings ( "squid:S1168" ) public Element tableRow ( int rowNum ) { } }
Element rows = tableRows ( ) ; if ( rows == null ) { return null ; } if ( numOfTableRows ( ) < rowNum ) { return null ; } return rows . get ( rowNum ) ;
public class Convert { /** * Register void . * @ param clazz the clazz * @ param converter the converter */ public static void register ( Class < ? > clazz , Function < Object , ? > converter ) { } }
if ( clazz == null || converter == null ) { log . warn ( "Trying to register either a null class ({0}) or a null converter ({1}). Ignoring!" , clazz , converter ) ; return ; } converters . put ( clazz , converter ) ;
public class CmsVfsService { /** * Creates a bean representing a historical resource version . < p > * @ param cms the current CMS context * @ param historyRes the historical resource * @ param offline true if this resource was read from the offline project * @ param maxVersion the largest version number found * @ return the bean representing the historical resource * @ throws CmsException if something goes wrong */ private CmsHistoryResourceBean createHistoryResourceBean ( CmsObject cms , CmsResource historyRes , boolean offline , int maxVersion ) throws CmsException { } }
CmsHistoryResourceBean result = new CmsHistoryResourceBean ( ) ; Locale locale = OpenCms . getWorkplaceManager ( ) . getWorkplaceLocale ( cms ) ; result . setStructureId ( historyRes . getStructureId ( ) ) ; result . setRootPath ( historyRes . getRootPath ( ) ) ; result . setDateLastModified ( formatDate ( historyRes . getDateLastModified ( ) , locale ) ) ; CmsUUID userId = historyRes . getUserLastModified ( ) ; String userName = userId . toString ( ) ; try { CmsUser user = cms . readUser ( userId ) ; userName = user . getName ( ) ; } catch ( CmsException e ) { LOG . warn ( e . getLocalizedMessage ( ) , e ) ; } result . setUserLastModified ( userName ) ; result . setSize ( historyRes . getLength ( ) ) ; if ( historyRes instanceof I_CmsHistoryResource ) { int publishTag = ( ( I_CmsHistoryResource ) historyRes ) . getPublishTag ( ) ; CmsHistoryProject project = cms . readHistoryProject ( publishTag ) ; long publishDate = project . getPublishingDate ( ) ; result . setDatePublished ( formatDate ( publishDate , locale ) ) ; int version = ( ( I_CmsHistoryResource ) historyRes ) . getVersion ( ) ; result . setVersion ( new CmsHistoryVersion ( Integer . valueOf ( historyRes . getVersion ( ) ) , maxVersion == version ? OfflineOnline . online : null ) ) ; List < CmsProperty > historyProperties = cms . readHistoryPropertyObjects ( ( I_CmsHistoryResource ) historyRes ) ; Map < String , CmsProperty > historyPropertyMap = CmsProperty . toObjectMap ( historyProperties ) ; CmsProperty titleProp = CmsProperty . wrapIfNull ( historyPropertyMap . get ( CmsPropertyDefinition . PROPERTY_TITLE ) ) ; CmsProperty descProp = CmsProperty . wrapIfNull ( historyPropertyMap . get ( CmsPropertyDefinition . PROPERTY_DESCRIPTION ) ) ; result . setTitle ( titleProp . getValue ( ) ) ; result . setDescription ( descProp . getValue ( ) ) ; } else { if ( offline ) { result . setVersion ( new CmsHistoryVersion ( null , OfflineOnline . offline ) ) ; } else { result . setVersion ( new CmsHistoryVersion ( null , OfflineOnline . online ) ) ; } } return result ;
public class Graphics { /** * Sets the background to a RGB or HSB and alpha value . * @ param color * The RGB or HSB value of the background . */ public void setBackgroud ( Color color ) { } }
gl . glClearColor ( ( float ) color . getRed ( ) , ( float ) color . getGreen ( ) , ( float ) color . getBlue ( ) , ( float ) ( color . getAlpha ( ) * getAlpha ( ) ) ) ;
public class ValueNode { public static ValueNode toValueNode ( Object o ) { } }
if ( o == null ) return NULL_NODE ; if ( o instanceof ValueNode ) return ( ValueNode ) o ; if ( o instanceof Class ) return createClassNode ( ( Class ) o ) ; else if ( isPath ( o ) ) return new PathNode ( o . toString ( ) , false , false ) ; else if ( isJson ( o ) ) return createJsonNode ( o . toString ( ) ) ; else if ( o instanceof String ) return createStringNode ( o . toString ( ) , true ) ; else if ( o instanceof Character ) return createStringNode ( o . toString ( ) , false ) ; else if ( o instanceof Number ) return createNumberNode ( o . toString ( ) ) ; else if ( o instanceof Boolean ) return createBooleanNode ( o . toString ( ) ) ; else if ( o instanceof Pattern ) return createPatternNode ( ( Pattern ) o ) ; else throw new JsonPathException ( "Could not determine value type" ) ;
public class BoundedBuffer { /** * @ awisniew - ADDED * ( non - Javadoc ) * @ see java . util . concurrent . BlockingQueue # remove ( java . lang . Object ) */ @ Override public boolean remove ( Object o ) { } }
if ( o == null ) { return false ; } if ( size ( ) == 0 ) { return false ; } synchronized ( this ) { // First check the expedited buffer synchronized ( lock ) { // Check if we wrap around the end of the array before iterating if ( expeditedPutIndex > expeditedTakeIndex ) { for ( int i = expeditedTakeIndex ; i <= expeditedPutIndex ; i ++ ) { if ( o . equals ( expeditedBuffer [ i ] ) ) { // Remove element and shift all remaining elements for ( int j = i ; j < expeditedPutIndex ; j ++ ) { expeditedBuffer [ j ] = expeditedBuffer [ j + 1 ] ; } // Null the putIndex expeditedBuffer [ expeditedPutIndex ] = null ; expeditedPutIndex -- ; // Decrement used slots counter numberOfUsedExpeditedSlots . getAndDecrement ( ) ; // TODO if expedited is added for put or offer with timeout add notification here return true ; } } } else { // We wrap around the array . Loop through in two passes ( upper and lower ) for ( int i = expeditedTakeIndex ; i < expeditedBuffer . length ; i ++ ) { if ( o . equals ( expeditedBuffer [ i ] ) ) { // Remove element and shift all remaining elements up for ( int j = i ; j > expeditedTakeIndex ; j -- ) { expeditedBuffer [ j ] = expeditedBuffer [ j - 1 ] ; } // Null the putIndex expeditedBuffer [ expeditedTakeIndex ] = null ; if ( expeditedTakeIndex == expeditedBuffer . length - 1 ) { expeditedTakeIndex = 0 ; } else { expeditedTakeIndex ++ ; } // Decrement used slots counter numberOfUsedExpeditedSlots . getAndDecrement ( ) ; // TODO if expedited is added for put or offer with timeout add notification here return true ; } } for ( int i = 0 ; i < expeditedPutIndex ; i ++ ) { if ( o . equals ( expeditedBuffer [ i ] ) ) { // Remove element and shift all remaining elements down for ( int j = i ; j < expeditedPutIndex ; j ++ ) { expeditedBuffer [ j ] = expeditedBuffer [ j + 1 ] ; } // Null the putIndex expeditedBuffer [ expeditedPutIndex ] = null ; expeditedPutIndex -- ; // Decrement used slots counter numberOfUsedExpeditedSlots . getAndDecrement ( ) ; // TODO if expedited is added for put or offer with timeout add notification here return true ; } } } // Next check the main buffer // Check if we wrap around the end of the array before iterating if ( putIndex > takeIndex ) { for ( int i = takeIndex ; i <= putIndex ; i ++ ) { if ( o . equals ( buffer [ i ] ) ) { // Remove element and shift all remaining elements for ( int j = i ; j < putIndex ; j ++ ) { buffer [ j ] = buffer [ j + 1 ] ; } // Null the putIndex buffer [ putIndex ] = null ; putIndex -- ; // Decrement used slots counter numberOfUsedSlots . getAndDecrement ( ) ; // Notify a waiting put thread that space has cleared notifyPut_ ( ) ; return true ; } } } else { // We wrap around the array . Loop through in two passes ( upper and lower ) for ( int i = takeIndex ; i < buffer . length ; i ++ ) { if ( o . equals ( buffer [ i ] ) ) { // Remove element and shift all remaining elements up for ( int j = i ; j > takeIndex ; j -- ) { buffer [ j ] = buffer [ j - 1 ] ; } // Null the putIndex buffer [ takeIndex ] = null ; if ( takeIndex == buffer . length - 1 ) { takeIndex = 0 ; } else { takeIndex ++ ; } // Decrement used slots counter numberOfUsedSlots . getAndDecrement ( ) ; // Notify a waiting put thread that space has cleared notifyPut_ ( ) ; return true ; } } for ( int i = 0 ; i < putIndex ; i ++ ) { if ( o . equals ( buffer [ i ] ) ) { // Remove element and shift all remaining elements down for ( int j = i ; j < putIndex ; j ++ ) { buffer [ j ] = buffer [ j + 1 ] ; } // Null the putIndex buffer [ putIndex ] = null ; putIndex -- ; // Decrement used slots counter numberOfUsedSlots . getAndDecrement ( ) ; // Notify a waiting put thread that space has cleared notifyPut_ ( ) ; return true ; } } } } } return false ;
public class MediaBatchOperations { /** * Parses the batch result . * @ param response * the response * @ param mediaBatchOperations * the media batch operations * @ throws IOException * Signals that an I / O exception has occurred . * @ throws ServiceException * the service exception */ public void parseBatchResult ( ClientResponse response ) throws IOException , ServiceException { } }
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream ( ) ; InputStream inputStream = response . getEntityInputStream ( ) ; ReaderWriter . writeTo ( inputStream , byteArrayOutputStream ) ; response . setEntityInputStream ( new ByteArrayInputStream ( byteArrayOutputStream . toByteArray ( ) ) ) ; JobInfo jobInfo ; List < DataSource > parts = parseParts ( response . getEntityInputStream ( ) , response . getHeaders ( ) . getFirst ( "Content-Type" ) ) ; if ( parts . size ( ) == 0 || parts . size ( ) > entityBatchOperations . size ( ) ) { throw new UniformInterfaceException ( String . format ( "Batch response from server does not contain the correct amount " + "of parts (expecting %d, received %d instead)" , parts . size ( ) , entityBatchOperations . size ( ) ) , response ) ; } for ( int i = 0 ; i < parts . size ( ) ; i ++ ) { DataSource ds = parts . get ( i ) ; EntityBatchOperation entityBatchOperation = entityBatchOperations . get ( i ) ; StatusLine status = StatusLine . create ( ds ) ; InternetHeaders headers = parseHeaders ( ds ) ; InputStream content = parseEntity ( ds ) ; if ( status . getStatus ( ) >= HTTP_ERROR ) { InBoundHeaders inBoundHeaders = new InBoundHeaders ( ) ; @ SuppressWarnings ( "unchecked" ) Enumeration < Header > e = headers . getAllHeaders ( ) ; while ( e . hasMoreElements ( ) ) { Header header = e . nextElement ( ) ; inBoundHeaders . putSingle ( header . getName ( ) , header . getValue ( ) ) ; } ClientResponse clientResponse = new ClientResponse ( status . getStatus ( ) , inBoundHeaders , content , null ) ; UniformInterfaceException uniformInterfaceException = new UniformInterfaceException ( clientResponse ) ; throw uniformInterfaceException ; } else if ( entityBatchOperation instanceof Job . CreateBatchOperation ) { try { jobInfo = oDataAtomUnmarshaller . unmarshalEntry ( content , JobInfo . class ) ; Job . CreateBatchOperation jobCreateBatchOperation = ( Job . CreateBatchOperation ) entityBatchOperation ; jobCreateBatchOperation . setJobInfo ( jobInfo ) ; } catch ( JAXBException e ) { throw new ServiceException ( e ) ; } } else if ( entityBatchOperation instanceof Task . CreateBatchOperation ) { try { oDataAtomUnmarshaller . unmarshalEntry ( content , TaskInfo . class ) ; } catch ( JAXBException e ) { throw new ServiceException ( e ) ; } } }
public class Scope { /** * Returns a copy of the child list , with each child cast to an * { @ link AstNode } . * @ throws ClassCastException if any non - { @ code AstNode } objects are * in the child list , e . g . if this method is called after the code * generator begins the tree transformation . */ public List < AstNode > getStatements ( ) { } }
List < AstNode > stmts = new ArrayList < AstNode > ( ) ; Node n = getFirstChild ( ) ; while ( n != null ) { stmts . add ( ( AstNode ) n ) ; n = n . getNext ( ) ; } return stmts ;
public class AnalyticsContext { /** * Fill this instance with application info from the provided { @ link Context } . No need to expose a * getter for this for bundled integrations ( they ' ll automatically fill what they need * themselves ) . */ void putScreen ( Context context ) { } }
Map < String , Object > screen = createMap ( ) ; WindowManager manager = getSystemService ( context , Context . WINDOW_SERVICE ) ; Display display = manager . getDefaultDisplay ( ) ; DisplayMetrics displayMetrics = new DisplayMetrics ( ) ; display . getMetrics ( displayMetrics ) ; screen . put ( SCREEN_DENSITY_KEY , displayMetrics . density ) ; screen . put ( SCREEN_HEIGHT_KEY , displayMetrics . heightPixels ) ; screen . put ( SCREEN_WIDTH_KEY , displayMetrics . widthPixels ) ; put ( SCREEN_KEY , screen ) ;
public class BaseKvDao { /** * { @ inheritDoc } */ @ Override public byte [ ] get ( String spaceId , String key ) throws IOException { } }
String cacheKey = calcCacheKey ( spaceId , key ) ; byte [ ] data = getFromCache ( getCacheName ( ) , cacheKey , byte [ ] . class ) ; if ( data == null ) { data = kvStorage . get ( spaceId , key ) ; putToCache ( getCacheName ( ) , cacheKey , data ) ; } return data ;
public class druidGLexer { /** * $ ANTLR start " DELIMITER " */ public final void mDELIMITER ( ) throws RecognitionException { } }
try { int _type = DELIMITER ; int _channel = DEFAULT_TOKEN_CHANNEL ; // druidG . g : 590:17 : ( ( ' DELIMITER ' | ' delimiter ' ) ) // druidG . g : 590:18 : ( ' DELIMITER ' | ' delimiter ' ) { // druidG . g : 590:18 : ( ' DELIMITER ' | ' delimiter ' ) int alt7 = 2 ; int LA7_0 = input . LA ( 1 ) ; if ( ( LA7_0 == 'D' ) ) { alt7 = 1 ; } else if ( ( LA7_0 == 'd' ) ) { alt7 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 7 , 0 , input ) ; throw nvae ; } switch ( alt7 ) { case 1 : // druidG . g : 590:19 : ' DELIMITER ' { match ( "DELIMITER" ) ; } break ; case 2 : // druidG . g : 590:31 : ' delimiter ' { match ( "delimiter" ) ; } break ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class ModifySnapshotAttributeRequest { /** * The group to modify for the snapshot . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setGroupNames ( java . util . Collection ) } or { @ link # withGroupNames ( java . util . Collection ) } if you want to * override the existing values . * @ param groupNames * The group to modify for the snapshot . * @ return Returns a reference to this object so that method calls can be chained together . */ public ModifySnapshotAttributeRequest withGroupNames ( String ... groupNames ) { } }
if ( this . groupNames == null ) { setGroupNames ( new com . amazonaws . internal . SdkInternalList < String > ( groupNames . length ) ) ; } for ( String ele : groupNames ) { this . groupNames . add ( ele ) ; } return this ;
public class ClassReader { /** * Parses the header of a type annotation to extract its target _ type and * target _ path ( the result is stored in the given context ) , and returns the * start offset of the rest of the type _ annotation structure ( i . e . the * offset to the type _ index field , which is followed by * num _ element _ value _ pairs and then the name , value pairs ) . * @ param context * information about the class being parsed . This is where the * extracted target _ type and target _ path must be stored . * @ param u * the start offset of a type _ annotation structure . * @ return the start offset of the rest of the type _ annotation structure . */ private int readAnnotationTarget ( final Context context , int u ) { } }
int target = readInt ( u ) ; switch ( target >>> 24 ) { case 0x00 : // CLASS _ TYPE _ PARAMETER case 0x01 : // METHOD _ TYPE _ PARAMETER case 0x16 : // METHOD _ FORMAL _ PARAMETER target &= 0xFFFF0000 ; u += 2 ; break ; case 0x13 : // FIELD case 0x14 : // METHOD _ RETURN case 0x15 : // METHOD _ RECEIVER target &= 0xFF000000 ; u += 1 ; break ; case 0x40 : // LOCAL _ VARIABLE case 0x41 : { // RESOURCE _ VARIABLE target &= 0xFF000000 ; int n = readUnsignedShort ( u + 1 ) ; context . start = new Label [ n ] ; context . end = new Label [ n ] ; context . index = new int [ n ] ; u += 3 ; for ( int i = 0 ; i < n ; ++ i ) { int start = readUnsignedShort ( u ) ; int length = readUnsignedShort ( u + 2 ) ; context . start [ i ] = createLabel ( start , context . labels ) ; context . end [ i ] = createLabel ( start + length , context . labels ) ; context . index [ i ] = readUnsignedShort ( u + 4 ) ; u += 6 ; } break ; } case 0x47 : // CAST case 0x48 : // CONSTRUCTOR _ INVOCATION _ TYPE _ ARGUMENT case 0x49 : // METHOD _ INVOCATION _ TYPE _ ARGUMENT case 0x4A : // CONSTRUCTOR _ REFERENCE _ TYPE _ ARGUMENT case 0x4B : // METHOD _ REFERENCE _ TYPE _ ARGUMENT target &= 0xFF0000FF ; u += 4 ; break ; // case 0x10 : / / CLASS _ EXTENDS // case 0x11 : / / CLASS _ TYPE _ PARAMETER _ BOUND // case 0x12 : / / METHOD _ TYPE _ PARAMETER _ BOUND // case 0x17 : / / THROWS // case 0x42 : / / EXCEPTION _ PARAMETER // case 0x43 : / / INSTANCEOF // case 0x44 : / / NEW // case 0x45 : / / CONSTRUCTOR _ REFERENCE // case 0x46 : / / METHOD _ REFERENCE default : target &= ( target >>> 24 ) < 0x43 ? 0xFFFFFF00 : 0xFF000000 ; u += 3 ; break ; } int pathLength = readByte ( u ) ; context . typeRef = target ; context . typePath = pathLength == 0 ? null : new TypePath ( b , u ) ; return u + 1 + 2 * pathLength ;
public class BitUtils { /** * Method to get The next bytes with the specified size * @ param pSize * the size in bit to read * @ param pShift * boolean to indicate if the data read will be shift to the * left . < br > * < ul > * < li > if true : ( Ex 10110000b if we start read 2 bit at index 2 * the returned data will be 1100000b ) < / li > * < li > if false : ( Ex 10110000b if we start read 2 bit at index 2 * the returned data will be 00110000b ) < / li > * < / ul > * @ return a byte array */ public byte [ ] getNextByte ( final int pSize , final boolean pShift ) { } }
byte [ ] tab = new byte [ ( int ) Math . ceil ( pSize / BYTE_SIZE_F ) ] ; if ( currentBitIndex % BYTE_SIZE != 0 ) { int index = 0 ; int max = currentBitIndex + pSize ; while ( currentBitIndex < max ) { int mod = currentBitIndex % BYTE_SIZE ; int modTab = index % BYTE_SIZE ; int length = Math . min ( max - currentBitIndex , Math . min ( BYTE_SIZE - mod , BYTE_SIZE - modTab ) ) ; byte val = ( byte ) ( byteTab [ currentBitIndex / BYTE_SIZE ] & getMask ( mod , length ) ) ; if ( pShift || pSize % BYTE_SIZE == 0 ) { if ( mod != 0 ) { val = ( byte ) ( val << Math . min ( mod , BYTE_SIZE - length ) ) ; } else { val = ( byte ) ( ( val & DEFAULT_VALUE ) >> modTab ) ; } } tab [ index / BYTE_SIZE ] |= val ; currentBitIndex += length ; index += length ; } if ( ! pShift && pSize % BYTE_SIZE != 0 ) { tab [ tab . length - 1 ] = ( byte ) ( tab [ tab . length - 1 ] & getMask ( ( max - pSize - 1 ) % BYTE_SIZE , BYTE_SIZE ) ) ; } } else { System . arraycopy ( byteTab , currentBitIndex / BYTE_SIZE , tab , 0 , tab . length ) ; int val = pSize % BYTE_SIZE ; if ( val == 0 ) { val = BYTE_SIZE ; } tab [ tab . length - 1 ] = ( byte ) ( tab [ tab . length - 1 ] & getMask ( currentBitIndex % BYTE_SIZE , val ) ) ; currentBitIndex += pSize ; } return tab ;
public class LFltSupplierBuilder { /** * Builds the functional interface implementation and if previously provided calls the consumer . */ @ Nonnull public final LFltSupplier build ( ) { } }
final LFltSupplier eventuallyFinal = this . eventually ; LFltSupplier retval ; final Case < LBoolSupplier , LFltSupplier > [ ] casesArray = cases . toArray ( new Case [ cases . size ( ) ] ) ; retval = LFltSupplier . fltSup ( ( ) -> { try { for ( Case < LBoolSupplier , LFltSupplier > aCase : casesArray ) { if ( aCase . casePredicate ( ) . getAsBool ( ) ) { return aCase . caseFunction ( ) . getAsFlt ( ) ; } } return eventuallyFinal . getAsFlt ( ) ; } catch ( Error e ) { // NOSONAR throw e ; } catch ( Throwable e ) { // NOSONAR throw Handler . handleOrPropagate ( e , handling ) ; } } ) ; if ( consumer != null ) { consumer . accept ( retval ) ; } return retval ;
public class SqlTableContext { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . paros . TableContext # deleteAllDataForContext ( int ) */ @ Override public synchronized void deleteAllDataForContext ( int contextId ) throws DatabaseException { } }
SqlPreparedStatementWrapper psDeleteAllDataForContext = null ; try { psDeleteAllDataForContext = DbSQL . getSingleton ( ) . getPreparedStatement ( "context.ps.deletealldataforcontext" ) ; psDeleteAllDataForContext . getPs ( ) . setInt ( 1 , contextId ) ; psDeleteAllDataForContext . getPs ( ) . executeUpdate ( ) ; } catch ( SQLException e ) { throw new DatabaseException ( e ) ; } finally { DbSQL . getSingleton ( ) . releasePreparedStatement ( psDeleteAllDataForContext ) ; }
public class DurationUtility { /** * Retrieve an Duration instance . Use shared objects to * represent common values for memory efficiency . * @ param dur duration formatted as a string * @ param format number format * @ param locale target locale * @ return Duration instance * @ throws MPXJException */ public static Duration getInstance ( String dur , NumberFormat format , Locale locale ) throws MPXJException { } }
try { int lastIndex = dur . length ( ) - 1 ; int index = lastIndex ; double duration ; TimeUnit units ; while ( ( index > 0 ) && ( Character . isDigit ( dur . charAt ( index ) ) == false ) ) { -- index ; } // If we have no units suffix , assume days to allow for MPX3 if ( index == lastIndex ) { duration = format . parse ( dur ) . doubleValue ( ) ; units = TimeUnit . DAYS ; } else { ++ index ; duration = format . parse ( dur . substring ( 0 , index ) ) . doubleValue ( ) ; while ( ( index < lastIndex ) && ( Character . isWhitespace ( dur . charAt ( index ) ) ) ) { ++ index ; } units = TimeUnitUtility . getInstance ( dur . substring ( index ) , locale ) ; } return ( Duration . getInstance ( duration , units ) ) ; } catch ( ParseException ex ) { throw new MPXJException ( "Failed to parse duration" , ex ) ; }
public class GetIntegrationResponsesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetIntegrationResponsesRequest getIntegrationResponsesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getIntegrationResponsesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getIntegrationResponsesRequest . getApiId ( ) , APIID_BINDING ) ; protocolMarshaller . marshall ( getIntegrationResponsesRequest . getIntegrationId ( ) , INTEGRATIONID_BINDING ) ; protocolMarshaller . marshall ( getIntegrationResponsesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( getIntegrationResponsesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ChunkedInputStream { /** * Read the next chunk . * @ throws IOException in case of an I / O error */ private void nextChunk ( ) throws IOException { } }
if ( state == CHUNK_INVALID ) { throw new MalformedChunkCodingException ( "Corrupt data stream" ) ; } try { chunkSize = getChunkSize ( ) ; if ( chunkSize < 0L ) { throw new MalformedChunkCodingException ( "Negative chunk size" ) ; } state = CHUNK_DATA ; pos = 0L ; if ( chunkSize == 0L ) { eof = true ; parseTrailerHeaders ( ) ; } } catch ( MalformedChunkCodingException ex ) { state = CHUNK_INVALID ; throw ex ; }
public class CopyConditions { /** * Set matching ETag condition , copy object which matches * the following ETag . * @ throws InvalidArgumentException * When etag is null */ public void setMatchETag ( String etag ) throws InvalidArgumentException { } }
if ( etag == null ) { throw new InvalidArgumentException ( "ETag cannot be empty" ) ; } copyConditions . put ( "x-amz-copy-source-if-match" , etag ) ;
public class DefaultConfiguration { /** * { @ inheritDoc } */ @ Override protected void configure ( ) { } }
// Primitive mappers primitiveType ( boolean . class ) . serializer ( BooleanJsonSerializer . class ) . deserializer ( BooleanJsonDeserializer . class ) ; primitiveType ( char . class ) . serializer ( CharacterJsonSerializer . class ) . deserializer ( CharacterJsonDeserializer . class ) ; primitiveType ( byte . class ) . serializer ( ByteJsonSerializer . class ) . deserializer ( ByteJsonDeserializer . class ) ; primitiveType ( double . class ) . serializer ( DoubleJsonSerializer . class ) . deserializer ( DoubleJsonDeserializer . class ) ; primitiveType ( float . class ) . serializer ( FloatJsonSerializer . class ) . deserializer ( FloatJsonDeserializer . class ) ; primitiveType ( int . class ) . serializer ( IntegerJsonSerializer . class ) . deserializer ( IntegerJsonDeserializer . class ) ; primitiveType ( long . class ) . serializer ( LongJsonSerializer . class ) . deserializer ( LongJsonDeserializer . class ) ; primitiveType ( short . class ) . serializer ( ShortJsonSerializer . class ) . deserializer ( ShortJsonDeserializer . class ) ; // Common mappers type ( String . class ) . serializer ( StringJsonSerializer . class ) . deserializer ( StringJsonDeserializer . class ) ; type ( Boolean . class ) . serializer ( BooleanJsonSerializer . class ) . deserializer ( BooleanJsonDeserializer . class ) ; type ( Character . class ) . serializer ( CharacterJsonSerializer . class ) . deserializer ( CharacterJsonDeserializer . class ) ; type ( UUID . class ) . serializer ( UUIDJsonSerializer . class ) . deserializer ( UUIDJsonDeserializer . class ) ; type ( Void . class ) . serializer ( VoidJsonSerializer . class ) . deserializer ( VoidJsonDeserializer . class ) ; type ( JavaScriptObject . class ) . serializer ( JavaScriptObjectJsonSerializer . class ) . deserializer ( JavaScriptObjectJsonDeserializer . class ) ; type ( Enum . class ) . serializer ( EnumJsonSerializer . class ) . deserializer ( EnumJsonDeserializer . class ) ; // Number mappers type ( BigDecimal . class ) . serializer ( BigDecimalJsonSerializer . class ) . deserializer ( BigDecimalJsonDeserializer . class ) ; type ( BigInteger . class ) . serializer ( BigIntegerJsonSerializer . class ) . deserializer ( BigIntegerJsonDeserializer . class ) ; type ( Byte . class ) . serializer ( ByteJsonSerializer . class ) . deserializer ( ByteJsonDeserializer . class ) ; type ( Double . class ) . serializer ( DoubleJsonSerializer . class ) . deserializer ( DoubleJsonDeserializer . class ) ; type ( Float . class ) . serializer ( FloatJsonSerializer . class ) . deserializer ( FloatJsonDeserializer . class ) ; type ( Integer . class ) . serializer ( IntegerJsonSerializer . class ) . deserializer ( IntegerJsonDeserializer . class ) ; type ( Long . class ) . serializer ( LongJsonSerializer . class ) . deserializer ( LongJsonDeserializer . class ) ; type ( Short . class ) . serializer ( ShortJsonSerializer . class ) . deserializer ( ShortJsonDeserializer . class ) ; type ( Number . class ) . serializer ( NumberJsonSerializer . class ) . deserializer ( NumberJsonDeserializer . class ) ; // Date mappers type ( Date . class ) . serializer ( DateJsonSerializer . class ) . deserializer ( DateJsonDeserializer . class ) ; type ( java . sql . Date . class ) . serializer ( SqlDateJsonSerializer . class ) . deserializer ( SqlDateJsonDeserializer . class ) ; type ( Time . class ) . serializer ( SqlTimeJsonSerializer . class ) . deserializer ( SqlTimeJsonDeserializer . class ) ; type ( Timestamp . class ) . serializer ( SqlTimestampJsonSerializer . class ) . deserializer ( SqlTimestampJsonDeserializer . class ) ; // Iterable mappers type ( Iterable . class ) . serializer ( IterableJsonSerializer . class ) . deserializer ( IterableJsonDeserializer . class ) ; type ( Collection . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( CollectionJsonDeserializer . class ) ; type ( AbstractCollection . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( AbstractCollectionJsonDeserializer . class ) ; type ( AbstractList . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( AbstractListJsonDeserializer . class ) ; type ( AbstractQueue . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( AbstractQueueJsonDeserializer . class ) ; type ( AbstractSequentialList . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( AbstractSequentialListJsonDeserializer . class ) ; type ( AbstractSet . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( AbstractSetJsonDeserializer . class ) ; type ( ArrayList . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( ArrayListJsonDeserializer . class ) ; type ( EnumSet . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( EnumSetJsonDeserializer . class ) ; type ( HashSet . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( HashSetJsonDeserializer . class ) ; type ( LinkedHashSet . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( LinkedHashSetJsonDeserializer . class ) ; type ( LinkedList . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( LinkedListJsonDeserializer . class ) ; type ( List . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( ListJsonDeserializer . class ) ; type ( PriorityQueue . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( PriorityQueueJsonDeserializer . class ) ; type ( Queue . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( QueueJsonDeserializer . class ) ; type ( Set . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( SetJsonDeserializer . class ) ; type ( SortedSet . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( SortedSetJsonDeserializer . class ) ; type ( Stack . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( StackJsonDeserializer . class ) ; type ( TreeSet . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( TreeSetJsonDeserializer . class ) ; type ( Vector . class ) . serializer ( CollectionJsonSerializer . class ) . deserializer ( VectorJsonDeserializer . class ) ; // Map mappers type ( Map . class ) . serializer ( MapJsonSerializer . class ) . deserializer ( MapJsonDeserializer . class ) ; type ( AbstractMap . class ) . serializer ( MapJsonSerializer . class ) . deserializer ( AbstractMapJsonDeserializer . class ) ; type ( EnumMap . class ) . serializer ( MapJsonSerializer . class ) . deserializer ( EnumMapJsonDeserializer . class ) ; type ( HashMap . class ) . serializer ( MapJsonSerializer . class ) . deserializer ( HashMapJsonDeserializer . class ) ; type ( IdentityHashMap . class ) . serializer ( MapJsonSerializer . class ) . deserializer ( IdentityHashMapJsonDeserializer . class ) ; type ( LinkedHashMap . class ) . serializer ( MapJsonSerializer . class ) . deserializer ( LinkedHashMapJsonDeserializer . class ) ; type ( SortedMap . class ) . serializer ( MapJsonSerializer . class ) . deserializer ( SortedMapJsonDeserializer . class ) ; type ( TreeMap . class ) . serializer ( MapJsonSerializer . class ) . deserializer ( TreeMapJsonDeserializer . class ) ; // Primitive array mappers type ( boolean [ ] . class ) . serializer ( PrimitiveBooleanArrayJsonSerializer . class ) . deserializer ( PrimitiveBooleanArrayJsonDeserializer . class ) ; type ( byte [ ] . class ) . serializer ( PrimitiveByteArrayJsonSerializer . class ) . deserializer ( PrimitiveByteArrayJsonDeserializer . class ) ; type ( char [ ] . class ) . serializer ( PrimitiveCharacterArrayJsonSerializer . class ) . deserializer ( PrimitiveCharacterArrayJsonDeserializer . class ) ; type ( double [ ] . class ) . serializer ( PrimitiveDoubleArrayJsonSerializer . class ) . deserializer ( PrimitiveDoubleArrayJsonDeserializer . class ) ; type ( float [ ] . class ) . serializer ( PrimitiveFloatArrayJsonSerializer . class ) . deserializer ( PrimitiveFloatArrayJsonDeserializer . class ) ; type ( int [ ] . class ) . serializer ( PrimitiveIntegerArrayJsonSerializer . class ) . deserializer ( PrimitiveIntegerArrayJsonDeserializer . class ) ; type ( long [ ] . class ) . serializer ( PrimitiveLongArrayJsonSerializer . class ) . deserializer ( PrimitiveLongArrayJsonDeserializer . class ) ; type ( short [ ] . class ) . serializer ( PrimitiveShortArrayJsonSerializer . class ) . deserializer ( PrimitiveShortArrayJsonDeserializer . class ) ; // Primitive 2D Array mappers type ( boolean [ ] [ ] . class ) . serializer ( PrimitiveBooleanArray2dJsonSerializer . class ) . deserializer ( PrimitiveBooleanArray2dJsonDeserializer . class ) ; type ( byte [ ] [ ] . class ) . serializer ( PrimitiveByteArray2dJsonSerializer . class ) . deserializer ( PrimitiveByteArray2dJsonDeserializer . class ) ; type ( char [ ] [ ] . class ) . serializer ( PrimitiveCharacterArray2dJsonSerializer . class ) . deserializer ( PrimitiveCharacterArray2dJsonDeserializer . class ) ; type ( double [ ] [ ] . class ) . serializer ( PrimitiveDoubleArray2dJsonSerializer . class ) . deserializer ( PrimitiveDoubleArray2dJsonDeserializer . class ) ; type ( float [ ] [ ] . class ) . serializer ( PrimitiveFloatArray2dJsonSerializer . class ) . deserializer ( PrimitiveFloatArray2dJsonDeserializer . class ) ; type ( int [ ] [ ] . class ) . serializer ( PrimitiveIntegerArray2dJsonSerializer . class ) . deserializer ( PrimitiveIntegerArray2dJsonDeserializer . class ) ; type ( long [ ] [ ] . class ) . serializer ( PrimitiveLongArray2dJsonSerializer . class ) . deserializer ( PrimitiveLongArray2dJsonDeserializer . class ) ; type ( short [ ] [ ] . class ) . serializer ( PrimitiveShortArray2dJsonSerializer . class ) . deserializer ( PrimitiveShortArray2dJsonDeserializer . class ) ; // Map ' s key mappers key ( Object . class ) . serializer ( ObjectKeySerializer . class ) . deserializer ( StringKeyDeserializer . class ) ; key ( Serializable . class ) . serializer ( ObjectKeySerializer . class ) . deserializer ( StringKeyDeserializer . class ) ; key ( BigDecimal . class ) . serializer ( NumberKeySerializer . class ) . deserializer ( BigDecimalKeyDeserializer . class ) ; key ( BigInteger . class ) . serializer ( NumberKeySerializer . class ) . deserializer ( BigIntegerKeyDeserializer . class ) ; key ( Boolean . class ) . serializer ( BooleanKeySerializer . class ) . deserializer ( BooleanKeyDeserializer . class ) ; key ( Byte . class ) . serializer ( NumberKeySerializer . class ) . deserializer ( ByteKeyDeserializer . class ) ; key ( Character . class ) . serializer ( ToStringKeySerializer . class ) . deserializer ( CharacterKeyDeserializer . class ) ; key ( Date . class ) . serializer ( DateKeySerializer . class ) . deserializer ( DateKeyDeserializer . class ) ; key ( Double . class ) . serializer ( NumberKeySerializer . class ) . deserializer ( DoubleKeyDeserializer . class ) ; key ( Enum . class ) . serializer ( EnumKeySerializer . class ) . deserializer ( EnumKeyDeserializer . class ) ; key ( Float . class ) . serializer ( NumberKeySerializer . class ) . deserializer ( FloatKeyDeserializer . class ) ; key ( Integer . class ) . serializer ( NumberKeySerializer . class ) . deserializer ( IntegerKeyDeserializer . class ) ; key ( Long . class ) . serializer ( NumberKeySerializer . class ) . deserializer ( LongKeyDeserializer . class ) ; key ( Short . class ) . serializer ( NumberKeySerializer . class ) . deserializer ( ShortKeyDeserializer . class ) ; key ( java . sql . Date . class ) . serializer ( DateKeySerializer . class ) . deserializer ( SqlDateKeyDeserializer . class ) ; key ( Time . class ) . serializer ( DateKeySerializer . class ) . deserializer ( SqlTimeKeyDeserializer . class ) ; key ( Timestamp . class ) . serializer ( DateKeySerializer . class ) . deserializer ( SqlTimestampKeyDeserializer . class ) ; key ( String . class ) . serializer ( ToStringKeySerializer . class ) . deserializer ( StringKeyDeserializer . class ) ; key ( UUID . class ) . serializer ( UUIDKeySerializer . class ) . deserializer ( UUIDKeyDeserializer . class ) ;
public class SQLExpressions { /** * CORR returns the coefficient of correlation of a set of number pairs . * @ param expr1 first arg * @ param expr2 second arg * @ return corr ( expr1 , expr2) */ public static WindowOver < Double > covarPop ( Expression < ? extends Number > expr1 , Expression < ? extends Number > expr2 ) { } }
return new WindowOver < Double > ( Double . class , SQLOps . COVARPOP , expr1 , expr2 ) ;
public class ManagedProcessBuilder { /** * Adds a single argument to the command , composed of two parts and a given separator . * The two parts are independently escaped ( see above ) , and then concatenated using the separator . */ protected ManagedProcessBuilder addArgument ( String argPart1 , String separator , String argPart2 ) { } }
// @ see MariaDB4j Issue # 30 why ' quoting ' ( https : / / github . com / vorburger / MariaDB4j / issues / 30) StringBuilder sb = new StringBuilder ( ) ; sb . append ( StringUtils . quoteArgument ( argPart1 ) ) ; sb . append ( separator ) ; sb . append ( StringUtils . quoteArgument ( argPart2 ) ) ; // @ see https : / / issues . apache . org / jira / browse / EXEC - 93 why we have to use ' false ' here // TODO Remove the false when commons - exec has a release including EXEC - 93 fixed . addArgument ( sb . toString ( ) , false ) ; return this ;
public class MithraManager { /** * This method will load the cache of the object already initialized . A Collection is used * to keep track of the objects to load . * @ param portals list of portals to load caches for * @ param threads number of parallel threads to load * @ throws MithraBusinessException if something goes wrong during the load */ public void loadMithraCache ( List < MithraObjectPortal > portals , int threads ) throws MithraBusinessException { } }
this . configManager . loadMithraCache ( portals , threads ) ;
public class PasswordHash { /** * Validates a password using a hash . * @ param password * the password to check * @ param correctHash * the hash of the valid password * @ return true if the password is correct , false if not * @ throws NoSuchAlgorithmException if jdk does not support the algorithm * @ throws InvalidKeySpecException if the password or salt are invalid */ public static boolean validatePassword ( String password , String correctHash ) throws NoSuchAlgorithmException , InvalidKeySpecException { } }
return validatePassword ( password . toCharArray ( ) , correctHash ) ;
public class HtmlEscape { /** * Perform an HTML5 level 2 ( result is ASCII ) < strong > escape < / strong > operation on a < tt > String < / tt > input . * < em > Level 2 < / em > means this method will escape : * < ul > * < li > The five markup - significant characters : < tt > & lt ; < / tt > , < tt > & gt ; < / tt > , < tt > & amp ; < / tt > , * < tt > & quot ; < / tt > and < tt > & # 39 ; < / tt > < / li > * < li > All non ASCII characters . < / li > * < / ul > * This escape will be performed by replacing those chars by the corresponding HTML5 Named Character References * ( e . g . < tt > ' & amp ; acute ; ' < / tt > ) when such NCR exists for the replaced character , and replacing by a decimal * character reference ( e . g . < tt > ' & amp ; # 8345 ; ' < / tt > ) when there there is no NCR for the replaced character . * This method calls { @ link # escapeHtml ( String , HtmlEscapeType , HtmlEscapeLevel ) } with the following * preconfigured values : * < ul > * < li > < tt > type < / tt > : * { @ link org . unbescape . html . HtmlEscapeType # HTML5 _ NAMED _ REFERENCES _ DEFAULT _ TO _ DECIMAL } < / li > * < li > < tt > level < / tt > : * { @ link org . unbescape . html . HtmlEscapeLevel # LEVEL _ 2 _ ALL _ NON _ ASCII _ PLUS _ MARKUP _ SIGNIFICANT } < / li > * < / ul > * This method is < strong > thread - safe < / strong > . * @ param text the < tt > String < / tt > to be escaped . * @ return The escaped result < tt > String < / tt > . As a memory - performance improvement , will return the exact * same object as the < tt > text < / tt > input argument if no escaping modifications were required ( and * no additional < tt > String < / tt > objects will be created during processing ) . Will * return < tt > null < / tt > if input is < tt > null < / tt > . */ public static String escapeHtml5 ( final String text ) { } }
return escapeHtml ( text , HtmlEscapeType . HTML5_NAMED_REFERENCES_DEFAULT_TO_DECIMAL , HtmlEscapeLevel . LEVEL_2_ALL_NON_ASCII_PLUS_MARKUP_SIGNIFICANT ) ;
public class TypeFactory { /** * org . glassfish . jersey . server . spi . internal . ValueFactoryProvider */ @ Override public Factory < ? > getValueFactory ( Parameter parameter ) { } }
if ( type . equals ( parameter . getRawType ( ) ) && parameter . isAnnotationPresent ( Auth . class ) ) { return this ; } return null ;
public class InternalCompilerTools { /** * Compiles the names files . */ private void executeInt ( String [ ] path , LineMap lineMap ) throws JavaCompileException , IOException { } }
MemoryStream tempStream = new MemoryStream ( ) ; WriteStreamOld error = new WriteStreamOld ( tempStream ) ; try { // String parent = javaPath . getParent ( ) . getNativePath ( ) ; ArrayList < String > argList = new ArrayList < String > ( ) ; argList . add ( "-d" ) ; argList . add ( _compiler . getClassDirName ( ) ) ; if ( _compiler . getEncoding ( ) != null ) { String encoding = Encoding . getJavaName ( _compiler . getEncoding ( ) ) ; if ( encoding != null && ! encoding . equals ( "ISO8859_1" ) ) { argList . add ( "-encoding" ) ; argList . add ( _compiler . getEncoding ( ) ) ; } } argList . add ( "-classpath" ) ; argList . add ( _compiler . getClassPath ( ) ) ; ArrayList < String > args = _compiler . getArgs ( ) ; if ( args != null ) argList . addAll ( args ) ; for ( int i = 0 ; i < path . length ; i ++ ) { PathImpl javaPath = _compiler . getSourceDir ( ) . lookup ( path [ i ] ) ; argList . add ( javaPath . getNativePath ( ) ) ; } if ( log . isLoggable ( Level . FINER ) ) { CharBuffer msg = new CharBuffer ( ) ; msg . append ( "javac(int)" ) ; for ( int i = 0 ; i < argList . size ( ) ; i ++ ) { if ( argList . get ( i ) . equals ( "-classpath" ) && ! log . isLoggable ( Level . FINEST ) ) { i ++ ; continue ; } msg . append ( " " ) ; msg . append ( argList . get ( i ) ) ; } log . finer ( msg . toString ( ) ) ; } String [ ] argArray = argList . toArray ( new String [ argList . size ( ) ] ) ; int status = - 1 ; Thread thread = Thread . currentThread ( ) ; ClassLoader oldLoader = thread . getContextClassLoader ( ) ; // env = _ internalLoader ; try { thread . setContextClassLoader ( _envClassLoader ) ; if ( _javac == null ) throw new ConfigException ( L . l ( "javac compiler is not available in {0}. Check that you are using the JDK, not the JRE." , System . getProperty ( "java.runtime.name" ) + " " + System . getProperty ( "java.runtime.version" ) ) ) ; status = _javac . run ( null , error , error , argArray ) ; error . close ( ) ; tempStream . close ( ) ; } finally { thread . setContextClassLoader ( oldLoader ) ; } ReadStreamOld read = tempStream . openReadAndSaveBuffer ( ) ; JavacErrorParser parser = new JavacErrorParser ( this , path [ 0 ] , _compiler . getEncoding ( ) ) ; String errors = parser . parseErrors ( ( InputStream ) read , lineMap ) ; read . close ( ) ; if ( errors != null ) errors = errors . trim ( ) ; if ( log . isLoggable ( Level . FINE ) ) { read = tempStream . openReadAndSaveBuffer ( ) ; CharBuffer cb = new CharBuffer ( ) ; int ch ; while ( ( ch = read . read ( ) ) >= 0 ) { cb . append ( ( char ) ch ) ; } read . close ( ) ; log . fine ( cb . toString ( ) ) ; } else if ( status == 0 && errors != null && ! errors . equals ( "" ) ) { final String msg = errors ; new com . caucho . v5 . loader . ClassLoaderContext ( _compiler . getClassLoader ( ) ) { public void run ( ) { log . warning ( msg ) ; } } ; } if ( status != 0 ) throw new JavaCompileException ( errors ) ; } finally { tempStream . destroy ( ) ; }
public class DisplaySingle { /** * Enables / disables the glowing of the glow indicator * @ param GLOWING */ public void setGlowing ( final boolean GLOWING ) { } }
glowing = GLOWING ; init ( getInnerBounds ( ) . width , getInnerBounds ( ) . height ) ; repaint ( ) ;
public class SAML2AuthnResponseValidator { /** * Validate Bearer subject confirmation data * - notBefore * - NotOnOrAfter * - recipient * @ param data the data * @ param context the context * @ return true if all Bearer subject checks are passing */ protected final boolean isValidBearerSubjectConfirmationData ( final SubjectConfirmationData data , final SAML2MessageContext context ) { } }
if ( data == null ) { logger . debug ( "SubjectConfirmationData cannot be null for Bearer confirmation" ) ; return false ; } // TODO Validate inResponseTo if ( data . getNotBefore ( ) != null ) { logger . debug ( "SubjectConfirmationData notBefore must be null for Bearer confirmation" ) ; return false ; } if ( data . getNotOnOrAfter ( ) == null ) { logger . debug ( "SubjectConfirmationData notOnOrAfter cannot be null for Bearer confirmation" ) ; return false ; } if ( data . getNotOnOrAfter ( ) . plusSeconds ( acceptedSkew ) . isBeforeNow ( ) ) { logger . debug ( "SubjectConfirmationData notOnOrAfter is too old" ) ; return false ; } try { if ( data . getRecipient ( ) == null ) { logger . debug ( "SubjectConfirmationData recipient cannot be null for Bearer confirmation" ) ; return false ; } else { final Endpoint endpoint = context . getSAMLEndpointContext ( ) . getEndpoint ( ) ; if ( endpoint == null ) { logger . warn ( "No endpoint was found in the SAML endpoint context" ) ; return false ; } final URI recipientUri = new URI ( data . getRecipient ( ) ) ; final URI appEndpointUri = new URI ( endpoint . getLocation ( ) ) ; if ( ! SAML2Utils . urisEqualAfterPortNormalization ( recipientUri , appEndpointUri ) ) { logger . debug ( "SubjectConfirmationData recipient {} does not match SP assertion consumer URL, found. " + "SP ACS URL from context: {}" , recipientUri , appEndpointUri ) ; return false ; } } } catch ( URISyntaxException use ) { logger . error ( "Unable to check SubjectConfirmationData recipient, a URI has invalid syntax." , use ) ; return false ; } return true ;
public class TOTPValidator { /** * Returns { @ code true } if the specified TOTP { @ code value } matches the * value of the TOTP generated at validation , otherwise { @ code false } . The * current system time ( current time in milliseconds since the UNIX epoch ) * is used as the validation reference time . * @ param key * the encoded shared secret key * @ param timeStep * the time step size in milliseconds * @ param digits * the number of digits a TOTP should contain * @ param hmacShaAlgorithm * { @ link HmacShaAlgorithm } * @ param value * the TOTP value to validate * @ return { @ code true } if the specified TOTP { @ code code } value matches the * code value of the TOTP generated at validation , otherwise * { @ code false } . */ public boolean isValid ( byte [ ] key , long timeStep , int digits , HmacShaAlgorithm hmacShaAlgorithm , String value ) { } }
return isValid ( key , timeStep , digits , hmacShaAlgorithm , value , System . currentTimeMillis ( ) ) ;
public class TcpServer { /** * Setups a callback called when { @ link io . netty . channel . ServerChannel } is * bound . * @ param doOnBound a consumer observing server started event * @ return a new { @ link TcpServer } */ public final TcpServer doOnBound ( Consumer < ? super DisposableServer > doOnBound ) { } }
Objects . requireNonNull ( doOnBound , "doOnBound" ) ; return new TcpServerDoOn ( this , null , doOnBound , null ) ;
public class SpringIOUtils { /** * Copy the contents of the given String to the given output Writer . * Closes the write when done . * @ param in the String to copy from * @ param out the Writer to copy to * @ throws IOException in case of I / O errors */ public static void copy ( String in , Writer out ) throws IOException { } }
assert in != null : "No input String specified" ; assert out != null : "No output Writer specified" ; try { out . write ( in ) ; } finally { try { out . close ( ) ; } catch ( IOException ex ) { } }
public class DiffToolLogMessages { /** * Logs the shutdown of the logger . * @ param logger * reference to the logger * @ param endTime * time since start */ public static void logShutdown ( final Logger logger , final long endTime ) { } }
logger . logMessage ( Level . INFO , "DiffTool initiates SHUTDOWN\t" + Time . toClock ( endTime ) ) ;
public class GenbankWriterHelper { /** * Write a collection of NucleotideSequences to a file * @ param outputStream * @ param dnaSequences * @ param seqType * @ throws Exception */ public static void writeNucleotideSequence ( OutputStream outputStream , Collection < DNASequence > dnaSequences , String seqType ) throws Exception { } }
GenericGenbankHeaderFormat < DNASequence , NucleotideCompound > genericGenbankHeaderFormat = new GenericGenbankHeaderFormat < DNASequence , NucleotideCompound > ( seqType ) ; // genericGenbankHeaderFormat . setLineSeparator ( lineSep ) ; GenbankWriter < DNASequence , NucleotideCompound > genbankWriter = new GenbankWriter < DNASequence , NucleotideCompound > ( outputStream , dnaSequences , genericGenbankHeaderFormat ) ; // genbankWriter . setLineSeparator ( lineSep ) ; genbankWriter . process ( ) ;
public class TransformerIdentityImpl { /** * Get a parameter that was explicitly set with setParameter * or setParameters . * < p > This method does not return a default parameter value , which * cannot be determined until the node context is evaluated during * the transformation process . * @ param name Name of the parameter . * @ return A parameter that has been set with setParameter . */ public Object getParameter ( String name ) { } }
if ( null == m_params ) return null ; return m_params . get ( name ) ;
public class NCBIQBlastService { /** * A simple method to check the availability of the QBlast service . Sends { @ code Info } command to QBlast * @ return QBlast info output concatenated to String * @ throws Exception if unable to connect to the NCBI QBlast service */ public String getRemoteBlastInfo ( ) throws Exception { } }
OutputStreamWriter writer = null ; BufferedReader reader = null ; try { URLConnection serviceConnection = setQBlastServiceProperties ( serviceUrl . openConnection ( ) ) ; writer = new OutputStreamWriter ( serviceConnection . getOutputStream ( ) ) ; writer . write ( "CMD=Info" ) ; writer . flush ( ) ; reader = new BufferedReader ( new InputStreamReader ( serviceConnection . getInputStream ( ) ) ) ; StringBuilder sb = new StringBuilder ( ) ; String line ; while ( ( line = reader . readLine ( ) ) != null ) { sb . append ( line ) ; sb . append ( System . getProperty ( "line.separator" ) ) ; } return sb . toString ( ) ; } catch ( IOException e ) { throw new Exception ( "Impossible to get info from QBlast service at this time. Cause: " + e . getMessage ( ) , e ) ; } finally { IOUtils . close ( reader ) ; IOUtils . close ( writer ) ; }
public class Pattern { /** * A pattern which matches < code > pattern < / code > as many times as possible * but at least < code > min < / code > times and at most < code > max < / code > times . * @ param pattern * @ param min * @ param max * @ return */ public static Pattern repeat ( Pattern pattern , int min , int max ) { } }
if ( pattern == null ) { throw new IllegalArgumentException ( "Pattern can not be null" ) ; } return new RepeatPattern ( pattern , min , max ) ;
public class VisualizeFiducial { /** * Renders a translucent chessboard pattern * @ param g2 Graphics object it ' s drawn in * @ param fiducialToPixel Coverts a coordinate from fiducial into pixel * @ param numRows Number of rows in the calibration grid * @ param numCols Number of columns in the calibration grid * @ param squareWidth Width of each square */ public static void drawChessboard ( Graphics2D g2 , WorldToCameraToPixel fiducialToPixel , int numRows , int numCols , double squareWidth ) { } }
Point3D_F64 fidPt = new Point3D_F64 ( ) ; Point2D_F64 pixel0 = new Point2D_F64 ( ) ; Point2D_F64 pixel1 = new Point2D_F64 ( ) ; Point2D_F64 pixel2 = new Point2D_F64 ( ) ; Point2D_F64 pixel3 = new Point2D_F64 ( ) ; Line2D . Double l = new Line2D . Double ( ) ; int polyX [ ] = new int [ 4 ] ; int polyY [ ] = new int [ 4 ] ; int alpha = 100 ; Color red = new Color ( 255 , 0 , 0 , alpha ) ; Color black = new Color ( 0 , 0 , 0 , alpha ) ; for ( int row = 0 ; row < numRows ; row ++ ) { double y0 = - numRows * squareWidth / 2 + row * squareWidth ; for ( int col = row % 2 ; col < numCols ; col += 2 ) { double x0 = - numCols * squareWidth / 2 + col * squareWidth ; fidPt . set ( x0 , y0 , 0 ) ; fiducialToPixel . transform ( fidPt , pixel0 ) ; fidPt . set ( x0 + squareWidth , y0 , 0 ) ; fiducialToPixel . transform ( fidPt , pixel1 ) ; fidPt . set ( x0 + squareWidth , y0 + squareWidth , 0 ) ; fiducialToPixel . transform ( fidPt , pixel2 ) ; fidPt . set ( x0 , y0 + squareWidth , 0 ) ; fiducialToPixel . transform ( fidPt , pixel3 ) ; polyX [ 0 ] = ( int ) ( pixel0 . x + 0.5 ) ; polyX [ 1 ] = ( int ) ( pixel1 . x + 0.5 ) ; polyX [ 2 ] = ( int ) ( pixel2 . x + 0.5 ) ; polyX [ 3 ] = ( int ) ( pixel3 . x + 0.5 ) ; polyY [ 0 ] = ( int ) ( pixel0 . y + 0.5 ) ; polyY [ 1 ] = ( int ) ( pixel1 . y + 0.5 ) ; polyY [ 2 ] = ( int ) ( pixel2 . y + 0.5 ) ; polyY [ 3 ] = ( int ) ( pixel3 . y + 0.5 ) ; g2 . setColor ( black ) ; g2 . fillPolygon ( polyX , polyY , 4 ) ; g2 . setColor ( red ) ; drawLine ( g2 , l , pixel0 . x , pixel0 . y , pixel1 . x , pixel1 . y ) ; drawLine ( g2 , l , pixel1 . x , pixel1 . y , pixel2 . x , pixel2 . y ) ; drawLine ( g2 , l , pixel2 . x , pixel2 . y , pixel3 . x , pixel3 . y ) ; drawLine ( g2 , l , pixel3 . x , pixel3 . y , pixel0 . x , pixel0 . y ) ; } }
public class ClientPrepareResult { /** * Separate query in a String list and set flag isQueryMultipleRewritable . The resulting string * list is separed by ? that are not in comments . isQueryMultipleRewritable flag is set if query * can be rewrite in one query ( all case but if using " - - comment " ) . example for query : " INSERT * INTO tableName ( id , name ) VALUES ( ? , ? ) " result list will be : { " INSERT INTO tableName ( id , name ) * VALUES ( " , " , " , " ) " } * @ param queryString query * @ param noBackslashEscapes escape mode * @ return ClientPrepareResult */ public static ClientPrepareResult parameterParts ( String queryString , boolean noBackslashEscapes ) { } }
try { boolean reWritablePrepare = false ; boolean multipleQueriesPrepare = true ; List < byte [ ] > partList = new ArrayList < > ( ) ; LexState state = LexState . Normal ; char lastChar = '\0' ; boolean endingSemicolon = false ; boolean singleQuotes = false ; int lastParameterPosition = 0 ; char [ ] query = queryString . toCharArray ( ) ; int queryLength = query . length ; for ( int i = 0 ; i < queryLength ; i ++ ) { char car = query [ i ] ; if ( state == LexState . Escape && ! ( ( car == '\'' && singleQuotes ) || ( car == '"' && ! singleQuotes ) ) ) { state = LexState . String ; lastChar = car ; continue ; } switch ( car ) { case '*' : if ( state == LexState . Normal && lastChar == '/' ) { state = LexState . SlashStarComment ; } break ; case '/' : if ( state == LexState . SlashStarComment && lastChar == '*' ) { state = LexState . Normal ; } else if ( state == LexState . Normal && lastChar == '/' ) { state = LexState . EOLComment ; } break ; case '#' : if ( state == LexState . Normal ) { state = LexState . EOLComment ; } break ; case '-' : if ( state == LexState . Normal && lastChar == '-' ) { state = LexState . EOLComment ; multipleQueriesPrepare = false ; } break ; case '\n' : if ( state == LexState . EOLComment ) { multipleQueriesPrepare = true ; state = LexState . Normal ; } break ; case '"' : if ( state == LexState . Normal ) { state = LexState . String ; singleQuotes = false ; } else if ( state == LexState . String && ! singleQuotes ) { state = LexState . Normal ; } else if ( state == LexState . Escape && ! singleQuotes ) { state = LexState . String ; } break ; case '\'' : if ( state == LexState . Normal ) { state = LexState . String ; singleQuotes = true ; } else if ( state == LexState . String && singleQuotes ) { state = LexState . Normal ; } else if ( state == LexState . Escape && singleQuotes ) { state = LexState . String ; } break ; case '\\' : if ( noBackslashEscapes ) { break ; } if ( state == LexState . String ) { state = LexState . Escape ; } break ; case ';' : if ( state == LexState . Normal ) { endingSemicolon = true ; multipleQueriesPrepare = false ; } break ; case '?' : if ( state == LexState . Normal ) { partList . add ( queryString . substring ( lastParameterPosition , i ) . getBytes ( "UTF-8" ) ) ; lastParameterPosition = i + 1 ; } break ; case '`' : if ( state == LexState . Backtick ) { state = LexState . Normal ; } else if ( state == LexState . Normal ) { state = LexState . Backtick ; } break ; default : // multiple queries if ( state == LexState . Normal && endingSemicolon && ( ( byte ) car >= 40 ) ) { endingSemicolon = false ; multipleQueriesPrepare = true ; } break ; } lastChar = car ; } if ( lastParameterPosition == 0 ) { partList . add ( queryString . getBytes ( "UTF-8" ) ) ; } else { partList . add ( queryString . substring ( lastParameterPosition , queryLength ) . getBytes ( "UTF-8" ) ) ; } return new ClientPrepareResult ( queryString , partList , reWritablePrepare , multipleQueriesPrepare , false ) ; } catch ( UnsupportedEncodingException u ) { // cannot happen return null ; }
public class ListToMapConverter { /** * Converts the passed list of inners to unmodifiable map of impls . * @ param innerList list of the inners . * @ return map of the impls */ public Map < String , ImplT > convertToUnmodifiableMap ( List < InnerT > innerList ) { } }
Map < String , ImplT > result = new HashMap < > ( ) ; for ( InnerT inner : innerList ) { result . put ( name ( inner ) , impl ( inner ) ) ; } return Collections . unmodifiableMap ( result ) ;
public class SubsystemSuspensionLevels { /** * Retrieves the SubsystemSuspensionLevels object for the given subsystem . * @ param em The EntityManager to use . * @ param subSystem The subsystem for which to retrieve suspension level . * @ return The SubsystemSuspensionLevels object for the given subsystem . Throws a runtime exception if no such record exists . This is because * suspension levels for non - existent subsystems must never be queried . */ public static SubsystemSuspensionLevels findBySubsystem ( EntityManager em , SubSystem subSystem ) { } }
SystemAssert . requireArgument ( em != null , "Entity manager can not be null." ) ; SystemAssert . requireArgument ( subSystem != null , "Subsystem cannot be null." ) ; TypedQuery < SubsystemSuspensionLevels > query = em . createNamedQuery ( "SubsystemSuspensionLevels.findBySubsystem" , SubsystemSuspensionLevels . class ) ; try { query . setParameter ( "subSystem" , subSystem ) ; return query . getSingleResult ( ) ; } catch ( NoResultException ex ) { Map < Integer , Long > levels = new HashMap < > ( ) ; levels . put ( 1 , 60 * 60 * 1000L ) ; levels . put ( 2 , 10 * 60 * 60 * 1000L ) ; levels . put ( 3 , 24 * 60 * 60 * 1000L ) ; levels . put ( 4 , 3 * 24 * 60 * 60 * 1000L ) ; levels . put ( 5 , 10 * 24 * 60 * 60 * 1000L ) ; SubsystemSuspensionLevels suspensionLevels = new SubsystemSuspensionLevels ( null , subSystem , levels ) ; return em . merge ( suspensionLevels ) ; }
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 4233:1 : ruleXBlockExpression returns [ EObject current = null ] : ( ( ) otherlv _ 1 = ' { ' ( ( ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 3 = ' ; ' ) ? ) * otherlv _ 4 = ' } ' ) ; */ public final EObject ruleXBlockExpression ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_1 = null ; Token otherlv_3 = null ; Token otherlv_4 = null ; EObject lv_expressions_2_0 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 4239:2 : ( ( ( ) otherlv _ 1 = ' { ' ( ( ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 3 = ' ; ' ) ? ) * otherlv _ 4 = ' } ' ) ) // InternalXbaseWithAnnotations . g : 4240:2 : ( ( ) otherlv _ 1 = ' { ' ( ( ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 3 = ' ; ' ) ? ) * otherlv _ 4 = ' } ' ) { // InternalXbaseWithAnnotations . g : 4240:2 : ( ( ) otherlv _ 1 = ' { ' ( ( ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 3 = ' ; ' ) ? ) * otherlv _ 4 = ' } ' ) // InternalXbaseWithAnnotations . g : 4241:3 : ( ) otherlv _ 1 = ' { ' ( ( ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 3 = ' ; ' ) ? ) * otherlv _ 4 = ' } ' { // InternalXbaseWithAnnotations . g : 4241:3 : ( ) // InternalXbaseWithAnnotations . g : 4242:4: { if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getXBlockExpressionAccess ( ) . getXBlockExpressionAction_0 ( ) , current ) ; } } otherlv_1 = ( Token ) match ( input , 55 , FOLLOW_62 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getXBlockExpressionAccess ( ) . getLeftCurlyBracketKeyword_1 ( ) ) ; } // InternalXbaseWithAnnotations . g : 4252:3 : ( ( ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 3 = ' ; ' ) ? ) * loop73 : do { int alt73 = 2 ; int LA73_0 = input . LA ( 1 ) ; if ( ( ( LA73_0 >= RULE_STRING && LA73_0 <= RULE_ID ) || LA73_0 == 14 || ( LA73_0 >= 18 && LA73_0 <= 19 ) || LA73_0 == 26 || ( LA73_0 >= 42 && LA73_0 <= 43 ) || LA73_0 == 48 || LA73_0 == 55 || LA73_0 == 59 || LA73_0 == 61 || ( LA73_0 >= 65 && LA73_0 <= 82 ) || LA73_0 == 84 ) ) { alt73 = 1 ; } switch ( alt73 ) { case 1 : // InternalXbaseWithAnnotations . g : 4253:4 : ( ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) ) ( otherlv _ 3 = ' ; ' ) ? { // InternalXbaseWithAnnotations . g : 4253:4 : ( ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) ) // InternalXbaseWithAnnotations . g : 4254:5 : ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) { // InternalXbaseWithAnnotations . g : 4254:5 : ( lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration ) // InternalXbaseWithAnnotations . g : 4255:6 : lv _ expressions _ 2_0 = ruleXExpressionOrVarDeclaration { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBlockExpressionAccess ( ) . getExpressionsXExpressionOrVarDeclarationParserRuleCall_2_0_0 ( ) ) ; } pushFollow ( FOLLOW_63 ) ; lv_expressions_2_0 = ruleXExpressionOrVarDeclaration ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXBlockExpressionRule ( ) ) ; } add ( current , "expressions" , lv_expressions_2_0 , "org.eclipse.xtext.xbase.Xbase.XExpressionOrVarDeclaration" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXbaseWithAnnotations . g : 4272:4 : ( otherlv _ 3 = ' ; ' ) ? int alt72 = 2 ; int LA72_0 = input . LA ( 1 ) ; if ( ( LA72_0 == 58 ) ) { alt72 = 1 ; } switch ( alt72 ) { case 1 : // InternalXbaseWithAnnotations . g : 4273:5 : otherlv _ 3 = ' ; ' { otherlv_3 = ( Token ) match ( input , 58 , FOLLOW_62 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_3 , grammarAccess . getXBlockExpressionAccess ( ) . getSemicolonKeyword_2_1 ( ) ) ; } } break ; } } break ; default : break loop73 ; } } while ( true ) ; otherlv_4 = ( Token ) match ( input , 56 , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_4 , grammarAccess . getXBlockExpressionAccess ( ) . getRightCurlyBracketKeyword_3 ( ) ) ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class RandomLong { /** * Updates ( drifts ) a long value within specified range defined * @ param value a long value to drift . * @ param range ( optional ) a range . Default : 10 % of the value * @ return updated random long value . */ public static long updateLong ( long value , long range ) { } }
range = range == 0 ? ( long ) ( 0.1 * value ) : range ; long minValue = value - range ; long maxValue = value + range ; return nextLong ( minValue , maxValue ) ;
public class SnapshotsInner { /** * Updates ( patches ) a snapshot . * @ param resourceGroupName The name of the resource group . * @ param snapshotName The name of the snapshot that is being created . The name can ' t be changed after the snapshot is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The max name length is 80 characters . * @ param snapshot Snapshot object supplied in the body of the Patch snapshot operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < SnapshotInner > updateAsync ( String resourceGroupName , String snapshotName , SnapshotUpdate snapshot , final ServiceCallback < SnapshotInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( resourceGroupName , snapshotName , snapshot ) , serviceCallback ) ;
public class VMInfo { /** * Creates a dead VMInfo , representing a jvm in a given state which cannot * be attached or other monitoring issues occurred . */ public static VMInfo createDeadVM ( String pid , VMInfoState state ) { } }
VMInfo vmInfo = new VMInfo ( ) ; vmInfo . state = state ; vmInfo . pid = pid ; return vmInfo ;
public class ShadowClassLoader { /** * Return a { @ link Set } of members in the Jar identified by { @ code absolutePathToJar } . * @ param absolutePathToJar Cache key * @ return a Set with the Jar member - names */ private Set < String > getJarMemberSet ( String absolutePathToJar ) { } }
/* * Note : * Our implementation returns a HashSet . initialCapacity and loadFactor are carefully tweaked for speed and RAM optimization purposes . * Benchmark : * The HashSet implementation is about 10 % slower to build ( only happens once ) than the ArrayList . * The HashSet with shiftBits = 1 was about 33 times ( ! ) faster than the ArrayList for retrievals . */ try { int shiftBits = 1 ; // ( fast , but big ) 0 < = shiftBits < = 5 , say ( slower & compact ) JarFile jar = new JarFile ( absolutePathToJar ) ; /* * Find the first power of 2 > = JarSize ( as calculated in HashSet constructor ) */ int jarSizePower2 = Integer . highestOneBit ( jar . size ( ) ) ; if ( jarSizePower2 != jar . size ( ) ) jarSizePower2 <<= 1 ; if ( jarSizePower2 == 0 ) jarSizePower2 = 1 ; Set < String > jarMembers = new HashSet < String > ( jarSizePower2 >> shiftBits , 1 << shiftBits ) ; try { Enumeration < JarEntry > entries = jar . entries ( ) ; while ( entries . hasMoreElements ( ) ) { JarEntry jarEntry = entries . nextElement ( ) ; if ( jarEntry . isDirectory ( ) ) continue ; jarMembers . add ( jarEntry . getName ( ) ) ; } } catch ( Exception ignore ) { // ignored ; if the jar can ' t be read , treating it as if the jar contains no classes is just what we want . } finally { jar . close ( ) ; } return jarMembers ; } catch ( Exception newJarFileException ) { return Collections . emptySet ( ) ; }
public class ArrayUtil { /** * convert a primitive array ( value type ) to Object Array ( reference type ) . * @ param primArr value type Array * @ return reference type Array */ public static Byte [ ] toReferenceType ( byte [ ] primArr ) { } }
Byte [ ] refArr = new Byte [ primArr . length ] ; for ( int i = 0 ; i < primArr . length ; i ++ ) refArr [ i ] = new Byte ( primArr [ i ] ) ; return refArr ;
public class KeyVaultClientCustomImpl { /** * List the versions of a certificate . * @ param vaultBaseUrl * The vault name , e . g . https : / / myvault . vault . azure . net * @ param certificateName * The name of the certificate * @ param maxresults * Maximum number of results to return in a page . If not specified * the service will return up to 25 results . * @ return the PagedList & lt ; CertificateItem & gt ; if successful . */ public PagedList < CertificateItem > listCertificateVersions ( final String vaultBaseUrl , final String certificateName , final Integer maxresults ) { } }
return getCertificateVersions ( vaultBaseUrl , certificateName , maxresults ) ;
public class Properties { /** * Sets a dynamic property value on an object . */ public static void setObjectDynamicProperty ( Object object , String propertyName , Object value ) { } }
propertyValues . setObjectDynamicProperty ( object , propertyName , value ) ;
public class OpenEntityManagerAspect { /** * Obtain the transactional EntityManager for this accessor ' s EntityManagerFactory , if any . * @ return the transactional EntityManager , or < code > null < / code > if none * @ throws IllegalStateException if this accessor is not configured with an EntityManagerFactory * @ see * EntityManagerFactoryUtils # getTransactionalEntityManager ( javax . persistence . EntityManagerFactory ) * @ see * EntityManagerFactoryUtils # getTransactionalEntityManager ( javax . persistence . EntityManagerFactory , * java . util . Map ) */ protected EntityManager getTransactionalEntityManager ( EntityManagerFactory emf ) throws IllegalStateException { } }
Assert . state ( emf != null , "No EntityManagerFactory specified" ) ; return EntityManagerFactoryUtils . getTransactionalEntityManager ( emf ) ;
public class SqlUtils { /** * 字符串取值增加单引号 、 日期格式化 * 该方法只在内部使用 , 它偷偷将map内容给改变了 , 可能会给其他引用map的程序造成麻烦 , 请使用SqlUtils . formatMap ( map ) 替代 */ private static void forSpecialValue ( Map < String , Object > conditionMap ) { } }
conditionMap . keySet ( ) . forEach ( key -> { Object value = conditionMap . get ( key ) ; if ( value instanceof String ) { conditionMap . put ( key , "'" . concat ( value . toString ( ) ) . concat ( "'" ) ) ; } if ( value instanceof Date ) { String dateString = DateConverterFactory . getDateConverter ( ) . toStandardString ( ( Date ) value ) ; conditionMap . put ( key , "'" . concat ( dateString ) . concat ( "'" ) ) ; } if ( value instanceof Calendar ) { String dateString = DateConverterFactory . getDateConverter ( ) . toStandardString ( ( Calendar ) value ) ; conditionMap . put ( key , "'" . concat ( dateString ) . concat ( "'" ) ) ; } if ( value instanceof Number ) { conditionMap . put ( key , value . toString ( ) ) ; } } ) ;
public class Base { /** * Same as { @ link DB # withDb ( String , String , String , String , Supplier ) } , but with db name { @ link DB # DEFAULT _ NAME } . */ public static < T > T withDb ( String driver , String url , String user , String password , Supplier < T > supplier ) { } }
return new DB ( DB . DEFAULT_NAME ) . withDb ( driver , url , user , password , supplier ) ;
public class MessageMLParser { /** * Create a MessageML element based on the DOM element ' s name and attributes . */ public Element createElement ( org . w3c . dom . Element element , Element parent ) throws InvalidInputException { } }
String tag = element . getNodeName ( ) ; if ( Header . isHeaderElement ( tag ) ) { return new Header ( parent , tag ) ; } switch ( tag ) { case Chime . MESSAGEML_TAG : validateFormat ( tag ) ; return new Chime ( parent , FormatEnum . MESSAGEML ) ; case Chime . PRESENTATIONML_TAG : return new Chime ( parent , FormatEnum . PRESENTATIONML ) ; case Paragraph . MESSAGEML_TAG : return new Paragraph ( parent ) ; case LineBreak . MESSAGEML_TAG : return new LineBreak ( parent ) ; case HorizontalRule . MESSAGEML_TAG : return new HorizontalRule ( parent ) ; case Span . MESSAGEML_TAG : switch ( element . getAttribute ( CLASS_ATTR ) ) { case Entity . PRESENTATIONML_CLASS : return createEntity ( element , parent ) ; default : return new Span ( parent ) ; } case Div . MESSAGEML_TAG : switch ( element . getAttribute ( CLASS_ATTR ) ) { case Entity . PRESENTATIONML_CLASS : return createEntity ( element , parent ) ; case Card . PRESENTATIONML_CLASS : element . removeAttribute ( CLASS_ATTR ) ; return new Card ( parent , FormatEnum . PRESENTATIONML ) ; case CardBody . PRESENTATIONML_CLASS : element . removeAttribute ( CLASS_ATTR ) ; return new CardBody ( parent , FormatEnum . PRESENTATIONML ) ; case CardHeader . PRESENTATIONML_CLASS : element . removeAttribute ( CLASS_ATTR ) ; return new CardHeader ( parent , FormatEnum . PRESENTATIONML ) ; default : return new Div ( parent ) ; } case Bold . MESSAGEML_TAG : return new Bold ( parent ) ; case Italic . MESSAGEML_TAG : return new Italic ( parent ) ; case Preformatted . MESSAGEML_TAG : return new Preformatted ( parent ) ; case HashTag . MESSAGEML_TAG : validateFormat ( tag ) ; return new HashTag ( parent , ++ index ) ; case CashTag . MESSAGEML_TAG : validateFormat ( tag ) ; return new CashTag ( parent , ++ index ) ; case Mention . MESSAGEML_TAG : validateFormat ( tag ) ; return new Mention ( parent , ++ index , dataProvider ) ; case Link . MESSAGEML_TAG : return new Link ( parent , dataProvider ) ; case Image . MESSAGEML_TAG : return new Image ( parent ) ; case BulletList . MESSAGEML_TAG : return new BulletList ( parent ) ; case OrderedList . MESSAGEML_TAG : return new OrderedList ( parent ) ; case ListItem . MESSAGEML_TAG : return new ListItem ( parent ) ; case Table . MESSAGEML_TAG : return new Table ( parent ) ; case TableHeader . MESSAGEML_TAG : return new TableHeader ( parent ) ; case TableBody . MESSAGEML_TAG : return new TableBody ( parent ) ; case TableFooter . MESSAGEML_TAG : return new TableFooter ( parent ) ; case TableRow . MESSAGEML_TAG : return new TableRow ( parent ) ; case TableHeaderCell . MESSAGEML_TAG : return new TableHeaderCell ( parent ) ; case TableCell . MESSAGEML_TAG : return new TableCell ( parent ) ; case Card . MESSAGEML_TAG : validateFormat ( tag ) ; return new Card ( parent , FormatEnum . MESSAGEML ) ; case Code . MESSAGEML_TAG : return new Code ( parent ) ; case CardHeader . MESSAGEML_TAG : validateFormat ( tag ) ; return new CardHeader ( parent , FormatEnum . MESSAGEML ) ; case CardBody . MESSAGEML_TAG : validateFormat ( tag ) ; return new CardBody ( parent , FormatEnum . MESSAGEML ) ; case Emoji . MESSAGEML_TAG : return new Emoji ( parent , ++ index ) ; case Form . MESSAGEML_TAG : return new Form ( parent ) ; case Button . MESSAGEML_TAG : return new Button ( parent ) ; default : throw new InvalidInputException ( "Invalid MessageML content at element \"" + tag + "\"" ) ; }
public class IconEnricher { /** * Iterates through all nodes of the given KeePass file and replace the * nodes with enriched icon data nodes . * @ param keePassFile * the KeePass file which should be iterated * @ return an enriched KeePass file */ public KeePassFile enrichNodesWithIconData ( KeePassFile keePassFile ) { } }
CustomIcons iconLibrary = keePassFile . getMeta ( ) . getCustomIcons ( ) ; GroupZipper zipper = new GroupZipper ( keePassFile ) ; Iterator < Group > iter = zipper . iterator ( ) ; while ( iter . hasNext ( ) ) { Group group = iter . next ( ) ; byte [ ] iconData = getIconData ( group . getCustomIconUuid ( ) , group . getIconId ( ) , iconLibrary ) ; Group groupWithIcon = new GroupBuilder ( group ) . iconData ( iconData ) . build ( ) ; zipper . replace ( groupWithIcon ) ; enrichEntriesWithIcons ( iconLibrary , group ) ; } return zipper . close ( ) ;
public class DRL5Lexer { /** * $ ANTLR start " DOUBLE _ AMPER " */ public final void mDOUBLE_AMPER ( ) throws RecognitionException { } }
try { int _type = DOUBLE_AMPER ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 271:5 : ( ' & & ' ) // src / main / resources / org / drools / compiler / lang / DRL5Lexer . g : 271:7 : ' & & ' { match ( "&&" ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class Inflector { /** * Forge a name from a class and a method . If an annotation is provided , then the method * content is used . * @ param cl Class to get the name * @ param m Method to get the name * @ param methodAnnotation The method annotation to override the normal forged name * @ return The name forge and humanize */ public static String forgeName ( Class cl , Method m , ProbeTest methodAnnotation ) { } }
return forgeName ( cl , m . getName ( ) , methodAnnotation ) ;
public class Category { /** * Return the level in effect for this category / logger . * The result is computed by simulation . * @ return */ public Level getEffectiveLevel ( ) { } }
if ( slf4jLogger . isTraceEnabled ( ) ) { return Level . TRACE ; } if ( slf4jLogger . isDebugEnabled ( ) ) { return Level . DEBUG ; } if ( slf4jLogger . isInfoEnabled ( ) ) { return Level . INFO ; } if ( slf4jLogger . isWarnEnabled ( ) ) { return Level . WARN ; } return Level . ERROR ;
public class TagSetMappingsToNAF { /** * Mapping between CoNLL 2009 German tagset and NAF tagset . Based on the * Stuttgart - Tuebingen tagset . * @ param postag * the postag * @ return NAF POS tag */ private static String mapGermanCoNLL09TagSetToNAF ( final String postag ) { } }
if ( postag . startsWith ( "ADV" ) ) { return "A" ; // adverb } else if ( postag . startsWith ( "KO" ) ) { return "C" ; // conjunction } else if ( postag . equalsIgnoreCase ( "ART" ) ) { return "D" ; // determiner and predeterminer } else if ( postag . startsWith ( "ADJ" ) ) { return "G" ; // adjective } else if ( postag . equalsIgnoreCase ( "NN" ) ) { return "N" ; // common noun } else if ( postag . startsWith ( "NE" ) ) { return "R" ; // proper noun } else if ( postag . startsWith ( "AP" ) ) { return "P" ; // preposition } else if ( postag . startsWith ( "PD" ) || postag . startsWith ( "PI" ) || postag . startsWith ( "PP" ) || postag . startsWith ( "PR" ) || postag . startsWith ( "PW" ) || postag . startsWith ( "PA" ) ) { return "Q" ; // pronoun } else if ( postag . startsWith ( "V" ) ) { return "V" ; // verb } else { return "O" ; // other }
public class DefaultStorageStrategy { /** * Gets the currently stored token . * @ param envKey Environment key for token . * @ return Currently stored token instance or null . */ @ Override public OAuthToken get ( String envKey ) { } }
if ( ! oAuthToken . containsKey ( envKey ) ) return null ; return oAuthToken . get ( envKey ) ;
public class CachingJaxbLoaderImpl { /** * / * ( non - Javadoc ) * @ see org . jasig . services . persondir . support . xml . CachingJaxbLoader # getUnmarshalledObject ( org . jasig . services . persondir . support . xml . CachingJaxbLoader . UnmarshallingCallback ) */ @ Override public T getUnmarshalledObject ( final UnmarshallingCallback < T > callback ) { } }
// Only bother checking for a change if the object already exists Long lastModified = null ; if ( this . unmarshalledObject != null ) { lastModified = this . getLastModified ( ) ; // Return immediately if nothing has changed if ( this . isCacheValid ( lastModified ) ) { return this . unmarshalledObject ; } } final InputStream xmlInputStream = this . getXmlInputStream ( ) ; final JAXBContext jaxbContext = this . getJAXBContext ( ) ; final Unmarshaller unmarshaller = this . getUnmarshaller ( jaxbContext ) ; final T unmarshalledObject = this . unmarshal ( xmlInputStream , unmarshaller ) ; if ( callback != null ) { callback . postProcessUnmarshalling ( unmarshalledObject ) ; } this . unmarshalledObject = unmarshalledObject ; if ( lastModified != null ) { this . lastModifiedTime = lastModified ; } else { this . lastModifiedTime = System . currentTimeMillis ( ) ; } return this . unmarshalledObject ;
public class EnterpriseContainerBase { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . api . container . EnterpriseContainer # addAsModules ( java . io . File [ ] ) */ @ Override public T addAsModules ( final File ... resources ) throws IllegalArgumentException { } }
// Precondition checks Validate . notNull ( resources , "resources must be specified" ) ; // Add each for ( final File resource : resources ) { this . addAsModule ( resource ) ; } // Return return this . covarientReturn ( ) ;
public class BigtableTableAdminGrpcClient { /** * { @ inheritDoc } */ @ Override public ListenableFuture < ListTablesResponse > listTablesAsync ( ListTablesRequest request ) { } }
return createUnaryListener ( request , listTablesRpc , request . getParent ( ) ) . getAsyncResult ( ) ;
public class LinkedTransferQueue { /** * Returns the first unmatched data node , or null if none . * Callers must recheck if the returned node ' s item field is null * or self - linked before using . */ final Node firstDataNode ( ) { } }
restartFromHead : for ( ; ; ) { for ( Node p = head ; p != null ; ) { Object item = p . item ; if ( p . isData ) { if ( item != null && item != FORGOTTEN ) return p ; } else if ( item == null ) break ; if ( UNLINKED == ( p = p . next ) ) continue restartFromHead ; } return null ; }
public class DeviceManager { /** * Check if an attribute or an command is polled * @ param polledObject The name of the polled object ( attribute or command ) * @ return true if polled * @ throws DevFailed */ public boolean isPolled ( final String polledObject ) throws DevFailed { } }
try { return AttributeGetterSetter . getAttribute ( polledObject , device . getAttributeList ( ) ) . isPolled ( ) ; } catch ( final DevFailed e ) { return device . getCommand ( polledObject ) . isPolled ( ) ; }
public class RRFedNonFedBudgetV1_1Generator { /** * This method gets Travel cost information including DomesticTravelCost , ForeignTravelCost and TotalTravelCost in the * BudgetYearDataType based on BudgetPeriodInfo for the RRFedNonFedBudget . * @ param periodInfo ( BudgetPeriodInfo ) budget period entry . * @ return Travel cost details corresponding to the BudgetPeriodInfo object . */ private Travel getTravel ( BudgetPeriodDto periodInfo ) { } }
Travel travel = Travel . Factory . newInstance ( ) ; if ( periodInfo != null ) { TotalDataType total = TotalDataType . Factory . newInstance ( ) ; if ( periodInfo . getDomesticTravelCost ( ) != null ) { total . setFederal ( periodInfo . getDomesticTravelCost ( ) . bigDecimalValue ( ) ) ; } if ( periodInfo . getDomesticTravelCostSharing ( ) != null ) { total . setNonFederal ( periodInfo . getDomesticTravelCostSharing ( ) . bigDecimalValue ( ) ) ; if ( periodInfo . getDomesticTravelCost ( ) != null ) { total . setTotalFedNonFed ( periodInfo . getDomesticTravelCost ( ) . add ( periodInfo . getDomesticTravelCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { total . setTotalFedNonFed ( periodInfo . getDomesticTravelCostSharing ( ) . bigDecimalValue ( ) ) ; } } travel . setDomesticTravelCost ( total ) ; TotalDataType totalForeign = TotalDataType . Factory . newInstance ( ) ; if ( periodInfo . getForeignTravelCost ( ) != null ) { totalForeign . setFederal ( periodInfo . getForeignTravelCost ( ) . bigDecimalValue ( ) ) ; } if ( periodInfo . getForeignTravelCostSharing ( ) != null ) { totalForeign . setNonFederal ( periodInfo . getForeignTravelCostSharing ( ) . bigDecimalValue ( ) ) ; if ( periodInfo . getForeignTravelCost ( ) != null ) { totalForeign . setTotalFedNonFed ( periodInfo . getForeignTravelCost ( ) . add ( periodInfo . getForeignTravelCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { totalForeign . setTotalFedNonFed ( periodInfo . getForeignTravelCostSharing ( ) . bigDecimalValue ( ) ) ; } } travel . setForeignTravelCost ( totalForeign ) ; SummaryDataType summary = SummaryDataType . Factory . newInstance ( ) ; if ( periodInfo . getTotalTravelCost ( ) != null ) { summary . setFederalSummary ( periodInfo . getTotalTravelCost ( ) . bigDecimalValue ( ) ) ; } if ( periodInfo . getTotalTravelCostSharing ( ) != null ) { summary . setNonFederalSummary ( periodInfo . getTotalTravelCostSharing ( ) . bigDecimalValue ( ) ) ; if ( periodInfo . getTotalTravelCost ( ) != null ) { summary . setTotalFedNonFedSummary ( periodInfo . getTotalTravelCost ( ) . add ( periodInfo . getTotalTravelCostSharing ( ) ) . bigDecimalValue ( ) ) ; } else { summary . setTotalFedNonFedSummary ( periodInfo . getTotalTravelCostSharing ( ) . bigDecimalValue ( ) ) ; } } travel . setTotalTravelCost ( summary ) ; } return travel ;
public class PortletRequestContextImpl { /** * ( non - Javadoc ) * @ see org . apache . pluto . container . PortletRequestContext # getAttribute ( java . lang . String , javax . servlet . ServletRequest ) */ @ Override public Object getAttribute ( String name , ServletRequest request ) { } }
if ( this . isServletContainerManagedAttribute ( name ) ) { return request . getAttribute ( name ) ; } return null ;
public class MapApi { /** * Get list value by path . * @ param < T > list value type * @ param clazz type of value * @ param map subject * @ param path nodes to walk in map * @ return value */ public static < T > List < T > getNullableList ( final Map map , final Class < T > clazz , final Object ... path ) { } }
return ( List < T > ) getNullable ( map , List . class , path ) ;
public class MementoUtils { /** * Add { @ code Vary : accept - datetime } header and { @ code Link } header for * timegate response . See * { @ link # generateMementoLinkHeaders ( CaptureSearchResults , WaybackRequest , boolean , boolean ) } * for details of { @ code Link } header . * @ param response * @ param results * @ param wbr * @ param includeOriginal */ public static void addTimegateHeaders ( HttpServletResponse response , CaptureSearchResults results , WaybackRequest wbr , boolean includeOriginal ) { } }
addVaryHeader ( response ) ; addLinkHeader ( response , results , wbr , false , includeOriginal ) ;
public class ConverterForOPML10 { /** * Creates real feed with a deep copy / conversion of the values of a SyndFeedImpl . * @ param syndFeed SyndFeedImpl to copy / convert value from . * @ return a real feed with copied / converted values of the SyndFeedImpl . */ @ Override public WireFeed createRealFeed ( final SyndFeed syndFeed ) { } }
final List < SyndEntry > entries = Collections . synchronizedList ( syndFeed . getEntries ( ) ) ; final HashMap < String , Outline > entriesByNode = new HashMap < String , Outline > ( ) ; // this will hold entries that we can ' t parent the first time . final ArrayList < OutlineHolder > doAfterPass = new ArrayList < OutlineHolder > ( ) ; // this holds root level outlines ; final ArrayList < Outline > root = new ArrayList < Outline > ( ) ; for ( int i = 0 ; i < entries . size ( ) ; i ++ ) { final SyndEntry entry = entries . get ( i ) ; final Outline o = new Outline ( ) ; final List < SyndCategory > cats = Collections . synchronizedList ( entry . getCategories ( ) ) ; boolean parentFound = false ; final StringBuffer category = new StringBuffer ( ) ; for ( int j = 0 ; j < cats . size ( ) ; j ++ ) { final SyndCategory cat = cats . get ( j ) ; if ( cat . getTaxonomyUri ( ) != null && cat . getTaxonomyUri ( ) . equals ( URI_TREE ) ) { final String nodeVal = cat . getName ( ) . substring ( cat . getName ( ) . lastIndexOf ( "." ) , cat . getName ( ) . length ( ) ) ; if ( cat . getName ( ) . startsWith ( "node." ) ) { entriesByNode . put ( nodeVal , o ) ; } else if ( cat . getName ( ) . startsWith ( "parent." ) ) { parentFound = true ; final Outline parent = entriesByNode . get ( nodeVal ) ; if ( parent != null ) { parent . getChildren ( ) . add ( o ) ; } else { doAfterPass . add ( new OutlineHolder ( o , nodeVal ) ) ; } } } else if ( cat . getTaxonomyUri ( ) != null && cat . getTaxonomyUri ( ) . startsWith ( URI_ATTRIBUTE ) ) { final String name = cat . getTaxonomyUri ( ) . substring ( cat . getTaxonomyUri ( ) . indexOf ( "#" ) + 1 , cat . getTaxonomyUri ( ) . length ( ) ) ; o . getAttributes ( ) . add ( new Attribute ( name , cat . getName ( ) ) ) ; } else { if ( category . length ( ) > 0 ) { category . append ( ", " ) ; } category . append ( cat . getName ( ) ) ; } } if ( ! parentFound ) { root . add ( o ) ; } if ( category . length ( ) > 0 ) { o . getAttributes ( ) . add ( new Attribute ( "category" , category . toString ( ) ) ) ; } final List < SyndLink > links = Collections . synchronizedList ( entry . getLinks ( ) ) ; // final String entryLink = entry . getLink ( ) ; for ( int j = 0 ; j < links . size ( ) ; j ++ ) { final SyndLink link = links . get ( j ) ; // if ( link . getHref ( ) . equals ( entryLink ) ) { if ( link . getType ( ) != null && link . getRel ( ) != null && link . getRel ( ) . equals ( "alternate" ) && ( link . getType ( ) . equals ( "application/rss+xml" ) || link . getType ( ) . equals ( "application/atom+xml" ) ) ) { o . setType ( "rss" ) ; if ( o . getXmlUrl ( ) == null ) { o . getAttributes ( ) . add ( new Attribute ( "xmlUrl" , link . getHref ( ) ) ) ; } } else if ( link . getType ( ) != null && link . getType ( ) . equals ( "text/html" ) ) { if ( o . getHtmlUrl ( ) == null ) { o . getAttributes ( ) . add ( new Attribute ( "htmlUrl" , link . getHref ( ) ) ) ; } } else { o . setType ( link . getType ( ) ) ; } } if ( o . getType ( ) == null || o . getType ( ) . equals ( "link" ) ) { o . setText ( entry . getTitle ( ) ) ; } else { o . setTitle ( entry . getTitle ( ) ) ; } if ( o . getText ( ) == null && entry . getDescription ( ) != null ) { o . setText ( entry . getDescription ( ) . getValue ( ) ) ; } } // Do back and parenting for things we missed . for ( int i = 0 ; i < doAfterPass . size ( ) ; i ++ ) { final OutlineHolder o = doAfterPass . get ( i ) ; final Outline parent = entriesByNode . get ( o . parent ) ; if ( parent == null ) { root . add ( o . outline ) ; LOG . warn ( "Unable to find parent node: {}" , o . parent ) ; } else { parent . getChildren ( ) . add ( o . outline ) ; } } final Opml opml = new Opml ( ) ; opml . setFeedType ( getType ( ) ) ; opml . setCreated ( syndFeed . getPublishedDate ( ) ) ; opml . setTitle ( syndFeed . getTitle ( ) ) ; final List < SyndPerson > authors = Collections . synchronizedList ( syndFeed . getAuthors ( ) ) ; for ( int i = 0 ; i < authors . size ( ) ; i ++ ) { final SyndPerson p = authors . get ( i ) ; if ( syndFeed . getAuthor ( ) == null || syndFeed . getAuthor ( ) . equals ( p . getName ( ) ) ) { opml . setOwnerName ( p . getName ( ) ) ; opml . setOwnerEmail ( p . getEmail ( ) ) ; opml . setOwnerId ( p . getUri ( ) ) ; } } opml . setOutlines ( root ) ; return opml ;