signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class WorkBookAccesser { /** * 合并指定sheet指定区域的单元格 * @ param sheet sheet * @ param firstRow 要合并的第一行 * @ param lastRow 要合并的最后一行 * @ param firstCol 要合并的第一列 * @ param lastCol 要合并的最后一列 */ private void mergedRegion ( Sheet sheet , int firstRow , int lastRow , int firstCol , int lastCol ) { } }
sheet . addMergedRegion ( new CellRangeAddress ( firstRow , lastRow , firstCol , lastCol ) ) ;
public class ImagingObjectSelection { /** * syntactic sugar */ public StudyComponent addStudy ( ) { } }
StudyComponent t = new StudyComponent ( ) ; if ( this . study == null ) this . study = new ArrayList < StudyComponent > ( ) ; this . study . add ( t ) ; return t ;
public class IcosepOctTreeNode { /** * Clear the tree . * < p > Caution : this method also destroyes the * links between the child nodes inside the tree . * If you want to unlink the first - level * child node with * this node but leave the rest of the tree * unchanged , please call < code > setChildAt ( i , null ) < / code > . */ @ Override public void clear ( ) { } }
super . clear ( ) ; if ( this . nicosep != null ) { final N child = this . nicosep ; setIcosepChild ( null ) ; child . clear ( ) ; }
public class RejectSkillRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RejectSkillRequest rejectSkillRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( rejectSkillRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( rejectSkillRequest . getSkillId ( ) , SKILLID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SitPathUtils { /** * オープン先となるURLを構築します 。 * @ param baseUrl * 基準となるURL * @ param path * 基準となるURLからの相対パス * @ return オープン先となるURLの文字列 */ public static String buildUrl ( String baseUrl , String path ) { } }
if ( path . startsWith ( "http:" ) || path . startsWith ( "https:" ) ) { return path ; } if ( StringUtils . isEmpty ( baseUrl ) ) { return file2url ( concatPath ( LOCAL_BASE_URL , path ) ) ; } else { if ( baseUrl . startsWith ( "http:" ) || baseUrl . startsWith ( "https:" ) ) { return concatPath ( baseUrl , path ) ; } else { return concatPath ( file2url ( baseUrl ) , path ) ; } }
public class SignatureVerifier { /** * Transforms the { @ link JsonNode } into a map to integrate with the { @ link SignatureChecker } utility . * @ param messageJson JSON of message . * @ return Transformed map . */ private Map < String , String > toMap ( JsonNode messageJson ) { } }
Map < String , String > fields = new HashMap < String , String > ( messageJson . size ( ) ) ; Iterator < Map . Entry < String , JsonNode > > jsonFields = messageJson . fields ( ) ; while ( jsonFields . hasNext ( ) ) { Map . Entry < String , JsonNode > next = jsonFields . next ( ) ; fields . put ( next . getKey ( ) , next . getValue ( ) . asText ( ) ) ; } return fields ;
public class JobScheduleOperations { /** * Checks whether the specified job schedule exists . * @ param jobScheduleId The ID of the job schedule which you want to check . * @ param additionalBehaviors A collection of { @ link BatchClientBehavior } instances that are applied to the Batch service request . * @ return True if the specified job schedule exists ; otherwise , false . * @ throws BatchErrorException Exception thrown when an error response is received from the Batch service . * @ throws IOException Exception thrown when there is an error in serialization / deserialization of data sent to / received from the Batch service . */ public boolean existsJobSchedule ( String jobScheduleId , Iterable < BatchClientBehavior > additionalBehaviors ) throws BatchErrorException , IOException { } }
JobScheduleExistsOptions options = new JobScheduleExistsOptions ( ) ; BehaviorManager bhMgr = new BehaviorManager ( this . customBehaviors ( ) , additionalBehaviors ) ; bhMgr . applyRequestBehaviors ( options ) ; return this . parentBatchClient . protocolLayer ( ) . jobSchedules ( ) . exists ( jobScheduleId , options ) ;
public class ErrorCorrection { /** * Creates the ECC200 error correction for an encoded message . * @ param codewords the codewords * @ param symbolInfo information about the symbol to be encoded * @ return the codewords with interleaved error correction . */ public static String encodeECC200 ( String codewords , SymbolInfo symbolInfo ) { } }
if ( codewords . length ( ) != symbolInfo . getDataCapacity ( ) ) { throw new IllegalArgumentException ( "The number of codewords does not match the selected symbol" ) ; } StringBuilder sb = new StringBuilder ( symbolInfo . getDataCapacity ( ) + symbolInfo . getErrorCodewords ( ) ) ; sb . append ( codewords ) ; int blockCount = symbolInfo . getInterleavedBlockCount ( ) ; if ( blockCount == 1 ) { String ecc = createECCBlock ( codewords , symbolInfo . getErrorCodewords ( ) ) ; sb . append ( ecc ) ; } else { sb . setLength ( sb . capacity ( ) ) ; int [ ] dataSizes = new int [ blockCount ] ; int [ ] errorSizes = new int [ blockCount ] ; for ( int i = 0 ; i < blockCount ; i ++ ) { dataSizes [ i ] = symbolInfo . getDataLengthForInterleavedBlock ( i + 1 ) ; errorSizes [ i ] = symbolInfo . getErrorLengthForInterleavedBlock ( i + 1 ) ; } for ( int block = 0 ; block < blockCount ; block ++ ) { StringBuilder temp = new StringBuilder ( dataSizes [ block ] ) ; for ( int d = block ; d < symbolInfo . getDataCapacity ( ) ; d += blockCount ) { temp . append ( codewords . charAt ( d ) ) ; } String ecc = createECCBlock ( temp . toString ( ) , errorSizes [ block ] ) ; int pos = 0 ; for ( int e = block ; e < errorSizes [ block ] * blockCount ; e += blockCount ) { sb . setCharAt ( symbolInfo . getDataCapacity ( ) + e , ecc . charAt ( pos ++ ) ) ; } } } return sb . toString ( ) ;
public class CPInstanceUtil { /** * Returns the first cp instance in the ordered set where CPDefinitionId = & # 63 ; and status = & # 63 ; . * @ param CPDefinitionId the cp definition ID * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp instance , or < code > null < / code > if a matching cp instance could not be found */ public static CPInstance fetchByC_ST_First ( long CPDefinitionId , int status , OrderByComparator < CPInstance > orderByComparator ) { } }
return getPersistence ( ) . fetchByC_ST_First ( CPDefinitionId , status , orderByComparator ) ;
public class Setup { /** * Look for template in the jar resources , otherwise look for it on filepath * @ param filename template name * @ return file * @ throws java . io . IOException on io error */ private File getTemplateFile ( String filename ) throws IOException { } }
File templateFile = null ; final String resource = TEMPLATE_RESOURCES_PATH + "/" + filename ; InputStream is = Setup . class . getClassLoader ( ) . getResourceAsStream ( resource ) ; if ( null == is ) { throw new RuntimeException ( "Unable to load required template: " + resource ) ; } templateFile = File . createTempFile ( "temp" , filename ) ; templateFile . deleteOnExit ( ) ; try { return copyToNativeLineEndings ( is , templateFile ) ; } finally { is . close ( ) ; }
public class AmazonLightsailClient { /** * Returns a Boolean value indicating whether your Lightsail VPC is peered . * @ param isVpcPeeredRequest * @ return Result of the IsVpcPeered operation returned by the service . * @ throws ServiceException * A general service exception . * @ throws InvalidInputException * Lightsail throws this exception when user input does not conform to the validation rules of an input * field . < / p > < note > * Domain - related APIs are only available in the N . Virginia ( us - east - 1 ) Region . Please set your AWS Region * configuration to us - east - 1 to create , view , or edit these resources . * @ throws NotFoundException * Lightsail throws this exception when it cannot find a resource . * @ throws OperationFailureException * Lightsail throws this exception when an operation fails to execute . * @ throws AccessDeniedException * Lightsail throws this exception when the user cannot be authenticated or uses invalid credentials to * access a resource . * @ throws AccountSetupInProgressException * Lightsail throws this exception when an account is still in the setup in progress state . * @ throws UnauthenticatedException * Lightsail throws this exception when the user has not been authenticated . * @ sample AmazonLightsail . IsVpcPeered * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / lightsail - 2016-11-28 / IsVpcPeered " target = " _ top " > AWS API * Documentation < / a > */ @ Override public IsVpcPeeredResult isVpcPeered ( IsVpcPeeredRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeIsVpcPeered ( request ) ;
public class FileSnap { /** * find the most recent snapshot in the database . * @ return the file containing the most recent snapshot */ @ Override public File findMostRecentSnapshot ( ) throws IOException { } }
List < File > files = findNValidSnapshots ( 1 ) ; if ( files . size ( ) == 0 ) { return null ; } return files . get ( 0 ) ;
public class DocBookUtilities { /** * Basic method to clean a date string to fix any partial day names . It currently cleans " Thur " , " Thurs " and " Tues " . * @ param dateString * @ return */ private static String cleanDate ( final String dateString ) { } }
if ( dateString == null ) { return dateString ; } String retValue = dateString ; retValue = THURSDAY_DATE_RE . matcher ( retValue ) . replaceAll ( "Thu" ) ; retValue = TUESDAY_DATE_RE . matcher ( retValue ) . replaceAll ( "Tue" ) ; return retValue ;
public class CommercePriceListUtil { /** * Returns the commerce price list where parentCommercePriceListId = & # 63 ; or throws a { @ link NoSuchPriceListException } if it could not be found . * @ param parentCommercePriceListId the parent commerce price list ID * @ return the matching commerce price list * @ throws NoSuchPriceListException if a matching commerce price list could not be found */ public static CommercePriceList findByParentCommercePriceListId ( long parentCommercePriceListId ) throws com . liferay . commerce . price . list . exception . NoSuchPriceListException { } }
return getPersistence ( ) . findByParentCommercePriceListId ( parentCommercePriceListId ) ;
public class ListCodeRepositoriesResult { /** * Gets a list of summaries of the Git repositories . Each summary specifies the following values for the repository : * < ul > * < li > * Name * < / li > * < li > * Amazon Resource Name ( ARN ) * < / li > * < li > * Creation time * < / li > * < li > * Last modified time * < / li > * < li > * Configuration information , including the URL location of the repository and the ARN of the AWS Secrets Manager * secret that contains the credentials used to access the repository . * < / li > * < / ul > * @ param codeRepositorySummaryList * Gets a list of summaries of the Git repositories . Each summary specifies the following values for the * repository : < / p > * < ul > * < li > * Name * < / li > * < li > * Amazon Resource Name ( ARN ) * < / li > * < li > * Creation time * < / li > * < li > * Last modified time * < / li > * < li > * Configuration information , including the URL location of the repository and the ARN of the AWS Secrets * Manager secret that contains the credentials used to access the repository . * < / li > */ public void setCodeRepositorySummaryList ( java . util . Collection < CodeRepositorySummary > codeRepositorySummaryList ) { } }
if ( codeRepositorySummaryList == null ) { this . codeRepositorySummaryList = null ; return ; } this . codeRepositorySummaryList = new java . util . ArrayList < CodeRepositorySummary > ( codeRepositorySummaryList ) ;
public class CmsSiteManagerImpl { /** * Adds a site . < p > * @ param cms the CMS object * @ param site the site to add * @ throws CmsException if something goes wrong */ public void addSite ( CmsObject cms , CmsSite site ) throws CmsException { } }
// check permissions if ( OpenCms . getRunLevel ( ) > OpenCms . RUNLEVEL_1_CORE_OBJECT ) { // simple unit tests will have runlevel 1 and no CmsObject OpenCms . getRoleManager ( ) . checkRole ( cms , CmsRole . DATABASE_MANAGER ) ; } // un - freeze m_frozen = false ; // set aliases and parameters , they will be used in the addSite method // this is necessary because of a digester workaround m_siteParams = site . getParameters ( ) ; m_aliases = site . getAliases ( ) ; String secureUrl = null ; if ( site . hasSecureServer ( ) ) { secureUrl = site . getSecureUrl ( ) ; } // add the site addSite ( site . getUrl ( ) , site . getSiteRoot ( ) , site . getTitle ( ) , Float . toString ( site . getPosition ( ) ) , site . getErrorPage ( ) , Boolean . toString ( site . isWebserver ( ) ) , site . getSSLMode ( ) . getXMLValue ( ) , secureUrl , Boolean . toString ( site . isExclusiveUrl ( ) ) , Boolean . toString ( site . isExclusiveError ( ) ) , Boolean . toString ( site . usesPermanentRedirects ( ) ) ) ; // re - initialize , will freeze the state when finished initialize ( cms ) ; OpenCms . writeConfiguration ( CmsSitesConfiguration . class ) ;
public class EditText { /** * Set the TextView ' s elegant height metrics flag . This setting selects font * variants that have not been compacted to fit Latin - based vertical * metrics , and also increases top and bottom bounds to provide more space . * @ param elegant set the paint ' s elegant metrics flag . * @ attr ref android . R . styleable # TextView _ elegantTextHeight */ @ TargetApi ( Build . VERSION_CODES . LOLLIPOP ) public void setElegantTextHeight ( boolean elegant ) { } }
if ( Build . VERSION . SDK_INT >= Build . VERSION_CODES . LOLLIPOP ) mInputView . setElegantTextHeight ( elegant ) ;
public class ClientFactory { /** * Accept a { @ link IMessageSession } from service bus using the client settings with specified session id . Session Id can be null , if null , service will return the first available session . * @ param messagingFactory messaging factory ( which represents a connection ) on which the session receiver needs to be created . * @ param entityPath path of entity * @ param sessionId session id , if null , service will return the first available session , otherwise , service will return specified session * @ param receiveMode PeekLock or ReceiveAndDelete * @ return IMessageSession instance * @ throws InterruptedException if the current thread was interrupted while waiting * @ throws ServiceBusException if the session cannot be accepted */ public static IMessageSession acceptSessionFromEntityPath ( MessagingFactory messagingFactory , String entityPath , String sessionId , ReceiveMode receiveMode ) throws InterruptedException , ServiceBusException { } }
return Utils . completeFuture ( acceptSessionFromEntityPathAsync ( messagingFactory , entityPath , sessionId , receiveMode ) ) ;
public class OBDAModelStatisticsPanel { /** * < editor - fold defaultstate = " collapsed " desc = " Generated Code " > / / GEN - BEGIN : initComponents */ private void initComponents ( ) { } }
pnlSummary = new javax . swing . JPanel ( ) ; lblSummary = new javax . swing . JLabel ( ) ; lblSummaryValue = new javax . swing . JLabel ( ) ; pnlTriplesSummary = new javax . swing . JPanel ( ) ; tabDataSources = new javax . swing . JTabbedPane ( ) ; setFont ( new java . awt . Font ( "Arial" , 0 , 18 ) ) ; setMinimumSize ( new java . awt . Dimension ( 520 , 400 ) ) ; setPreferredSize ( new java . awt . Dimension ( 520 , 400 ) ) ; setLayout ( new java . awt . BorderLayout ( ) ) ; pnlSummary . setMinimumSize ( new java . awt . Dimension ( 156 , 23 ) ) ; pnlSummary . setPreferredSize ( new java . awt . Dimension ( 156 , 23 ) ) ; pnlSummary . setLayout ( new java . awt . FlowLayout ( java . awt . FlowLayout . LEFT ) ) ; lblSummary . setFont ( new java . awt . Font ( "Tahoma" , 1 , 11 ) ) ; lblSummary . setText ( "Total triples produced:" ) ; pnlSummary . add ( lblSummary ) ; lblSummaryValue . setFont ( new java . awt . Font ( "Tahoma" , 1 , 11 ) ) ; pnlSummary . add ( lblSummaryValue ) ; add ( pnlSummary , java . awt . BorderLayout . NORTH ) ; pnlTriplesSummary . setLayout ( new java . awt . BorderLayout ( ) ) ; tabDataSources . setTabLayoutPolicy ( javax . swing . JTabbedPane . SCROLL_TAB_LAYOUT ) ; tabDataSources . setTabPlacement ( javax . swing . JTabbedPane . BOTTOM ) ; pnlTriplesSummary . add ( tabDataSources , java . awt . BorderLayout . CENTER ) ; add ( pnlTriplesSummary , java . awt . BorderLayout . CENTER ) ;
public class Utils { /** * Common part for appendEscapedIdentifier . * @ param sbuf Either StringBuffer or StringBuilder as we do not expect any IOException to be * thrown . * @ param value value to append */ private static void doAppendEscapedIdentifier ( Appendable sbuf , String value ) throws SQLException { } }
try { sbuf . append ( '"' ) ; for ( int i = 0 ; i < value . length ( ) ; ++ i ) { char ch = value . charAt ( i ) ; if ( ch == '\0' ) { throw new PSQLException ( GT . tr ( "Zero bytes may not occur in identifiers." ) , PSQLState . INVALID_PARAMETER_VALUE ) ; } if ( ch == '"' ) { sbuf . append ( ch ) ; } sbuf . append ( ch ) ; } sbuf . append ( '"' ) ; } catch ( IOException e ) { throw new PSQLException ( GT . tr ( "No IOException expected from StringBuffer or StringBuilder" ) , PSQLState . UNEXPECTED_ERROR , e ) ; }
public class HexUtil { /** * Converts a Hex encoded String into a byte vector . * @ param str The String to be encoded . * @ return A byte vector representing the String . */ public static byte [ ] hexToBytes ( String str ) { } }
if ( str == null ) return null ; byte [ ] hexChars ; try { hexChars = str . toUpperCase ( ) . getBytes ( "ISO_8859-1" ) ; } catch ( UnsupportedEncodingException e ) { throw new NexmoUnexpectedException ( "ISO_8859_1 is an unsupported encoding in this JVM" ) ; } int size = hexChars . length ; byte [ ] bytes = new byte [ size / 2 ] ; int first ; int second ; int rIndex = 0 ; // Convert to bytes . for ( int i = 0 ; i + 1 < size ; i = i + 2 ) { // Convert first first = hexChars [ i ] ; if ( first < 58 ) first = ( ( first - 48 ) * 16 ) ; // 0 - 9 else first = ( ( first - 55 ) * 16 ) ; // A - F // Convert second second = hexChars [ i + 1 ] ; if ( second < 58 ) second = second - 48 ; // 0 - 9 else second = second - 55 ; // A - F // Value must be between - 128 and 127 int total = ( first + second ) ; if ( total > 127 ) total = ( 256 + total ) ; bytes [ rIndex ] = ( byte ) total ; rIndex ++ ; } return bytes ;
public class PngProcessor { /** * Inflate ( decompress ) the compressed image data * @ param bytes A stream containing the compressed image data * @ return A byte array containing the uncompressed data */ public PngByteArrayOutputStream inflate ( PngByteArrayOutputStream bytes ) throws IOException { } }
try ( final PngByteArrayOutputStream inflatedOut = new PngByteArrayOutputStream ( ) ; final InflaterInputStream inflater = new InflaterInputStream ( new ByteArrayInputStream ( bytes . get ( ) , 0 , bytes . len ( ) ) ) ) { int readLength ; final byte [ ] block = new byte [ 8192 ] ; while ( ( readLength = inflater . read ( block ) ) != - 1 ) { inflatedOut . write ( block , 0 , readLength ) ; } return inflatedOut ; }
public class InternationalizedController { /** * end : : retrieve - message - request [ ] */ @ Route ( method = HttpMethod . GET , uri = "internationalization/directory" ) public Result format ( ) { } }
File dir = new File ( "conf" ) ; return ok ( i18n . get ( request ( ) . languages ( ) , "files.summary" , dir . list ( ) . length , dir . getName ( ) ) ) . as ( MimeTypes . TEXT ) ;
public class FunctionArgumentInjector { /** * Bootstrap a traversal to look for parameters referenced after a non - local side - effect . * NOTE : This assumes no - inner functions . * @ param parameters The set of parameter names . * @ param root The function code block . * @ return The subset of parameters referenced after the first * seen non - local side - effect . */ private static ImmutableSet < String > findParametersReferencedAfterSideEffect ( ImmutableSet < String > parameters , Node root ) { } }
// TODO ( johnlenz ) : Consider using scope for this . Set < String > locals = new HashSet < > ( parameters ) ; gatherLocalNames ( root , locals ) ; ReferencedAfterSideEffect collector = new ReferencedAfterSideEffect ( parameters , ImmutableSet . copyOf ( locals ) ) ; NodeUtil . visitPostOrder ( root , collector , collector ) ; return collector . getResults ( ) ;
public class WebLocatorAbstractBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * @ param labelTag label tag element * @ param < T > the element which calls this method * @ return this element */ @ SuppressWarnings ( "unchecked" ) public < T extends WebLocatorAbstractBuilder > T setLabelTag ( final String labelTag ) { } }
pathBuilder . setLabelTag ( labelTag ) ; return ( T ) this ;
public class CompositeRecordReader { /** * Create a value to be used internally for joins . */ protected TupleWritable createInternalValue ( ) { } }
Writable [ ] vals = new Writable [ kids . length ] ; for ( int i = 0 ; i < vals . length ; ++ i ) { vals [ i ] = kids [ i ] . createValue ( ) ; } return new TupleWritable ( vals ) ;
public class ChessboardCornerClusterFinder { /** * Use nearest neighbor search to find closest corners . Split those into two groups , parallel and * perpendicular . */ void findVertexNeighbors ( Vertex target , List < ChessboardCorner > corners ) { } }
// if ( target . index = = 18 ) { // System . out . println ( " Vertex Neighbors " + target . index ) ; ChessboardCorner targetCorner = corners . get ( target . index ) ; // distance is Euclidean squared double maxDist = Double . MAX_VALUE == maxNeighborDistance ? maxNeighborDistance : maxNeighborDistance * maxNeighborDistance ; nnSearch . findNearest ( corners . get ( target . index ) , maxDist , maxNeighbors , nnResults ) ; // storage distances here to find median distance of closest neighbors distanceTmp . reset ( ) ; for ( int i = 0 ; i < nnResults . size ; i ++ ) { NnData < ChessboardCorner > r = nnResults . get ( i ) ; if ( r . index == target . index ) continue ; distanceTmp . add ( r . distance ) ; double oriDiff = UtilAngle . distHalf ( targetCorner . orientation , r . point . orientation ) ; Edge edge = edges . grow ( ) ; boolean parallel ; if ( oriDiff <= orientationTol ) { // see if it ' s parallel parallel = true ; } else if ( Math . abs ( oriDiff - Math . PI / 2.0 ) <= orientationTol ) { // see if it ' s perpendicular parallel = false ; } else { edges . removeTail ( ) ; continue ; } // Use the relative angles of orientation and direction to prune more obviously bad matches double dx = r . point . x - targetCorner . x ; double dy = r . point . y - targetCorner . y ; edge . distance = Math . sqrt ( r . distance ) ; edge . dst = vertexes . get ( r . index ) ; edge . direction = Math . atan2 ( dy , dx ) ; double direction180 = UtilAngle . boundHalf ( edge . direction ) ; double directionDiff = UtilAngle . distHalf ( direction180 , r . point . orientation ) ; boolean remove ; EdgeSet edgeSet ; if ( parallel ) { // test to see if direction and orientation are aligned or off by 90 degrees remove = directionDiff > 2 * directionTol && Math . abs ( directionDiff - Math . PI / 2.0 ) > 2 * directionTol ; edgeSet = target . parallel ; } else { // should be at 45 degree angle remove = Math . abs ( directionDiff - Math . PI / 4.0 ) > 2 * directionTol ; edgeSet = target . perpendicular ; } if ( remove ) { edges . removeTail ( ) ; continue ; } edgeSet . add ( edge ) ; } // Compute the distance of the closest neighbors . This is used later on to identify ambiguous corners . // If it ' s a graph corner there should be at least 3 right next to the node . if ( distanceTmp . size == 0 ) { target . neighborDistance = 0 ; } else { sorter . sort ( distanceTmp . data , distanceTmp . size ) ; int idx = Math . min ( 3 , distanceTmp . size - 1 ) ; target . neighborDistance = Math . sqrt ( distanceTmp . data [ idx ] ) ; // NN distance is Euclidean squared }
public class NamedParameterParser { /** * Parse the SQL statement and locate any placeholders or named parameters . * Named parameters are substituted for a JDBC placeholder . * @ param sql the SQL statement * @ return the parsed statement , represented as ParsedSql instance */ public static ParsedSql parseSqlStatement ( final String sql ) { } }
Assert . notNull ( sql , "SQL must not be null" ) ; Set < String > namedParameters = new HashSet < > ( ) ; String sqlToUse = sql ; List < ParameterHolder > parameterList = new ArrayList < > ( ) ; char [ ] statement = sql . toCharArray ( ) ; int namedParameterCount = 0 ; int unnamedParameterCount = 0 ; int totalParameterCount = 0 ; int escapes = 0 ; int i = 0 ; while ( i < statement . length ) { int skipToPosition ; while ( i < statement . length ) { skipToPosition = skipCommentsAndQuotes ( statement , i ) ; if ( i == skipToPosition ) { break ; } else { i = skipToPosition ; } } if ( i >= statement . length ) { break ; } char c = statement [ i ] ; if ( c == ':' || c == '&' ) { int j = i + 1 ; if ( j < statement . length && statement [ j ] == ':' && c == ':' ) { // Postgres - style " : : " casting operator should be skipped i = i + 2 ; continue ; } String parameter ; if ( j < statement . length && c == ':' && statement [ j ] == '{' ) { // : { x } style parameter while ( j < statement . length && ! ( '}' == statement [ j ] ) ) { j ++ ; if ( ':' == statement [ j ] || '{' == statement [ j ] ) { throw new CommonRuntimeException ( "Parameter name contains invalid character '" + statement [ j ] + "' at position " + i + " in statement: " + sql ) ; } } if ( j >= statement . length ) { throw new CommonRuntimeException ( "Non-terminated named parameter declaration at position " + i + " in statement: " + sql ) ; } if ( j - i > 3 ) { parameter = sql . substring ( i + 2 , j ) ; namedParameterCount = addNewNamedParameter ( namedParameters , namedParameterCount , parameter ) ; totalParameterCount = addNamedParameter ( parameterList , totalParameterCount , escapes , i , j + 1 , parameter ) ; } j ++ ; } else { while ( j < statement . length && ! isParameterSeparator ( statement [ j ] ) ) { j ++ ; } if ( j - i > 1 ) { parameter = sql . substring ( i + 1 , j ) ; namedParameterCount = addNewNamedParameter ( namedParameters , namedParameterCount , parameter ) ; totalParameterCount = addNamedParameter ( parameterList , totalParameterCount , escapes , i , j , parameter ) ; } } i = j - 1 ; } else { if ( c == '\\' ) { int j = i + 1 ; if ( j < statement . length && statement [ j ] == ':' ) { // escaped " : " should be skipped sqlToUse = sqlToUse . substring ( 0 , i - escapes ) + sqlToUse . substring ( i - escapes + 1 ) ; escapes ++ ; i = i + 2 ; continue ; } } if ( c == '?' ) { int j = i + 1 ; if ( j < statement . length && ( statement [ j ] == '?' || statement [ j ] == '|' || statement [ j ] == '&' ) ) { // Postgres - style " ? ? " , " ? | " , " ? & " operator should be skipped i = i + 2 ; continue ; } unnamedParameterCount ++ ; totalParameterCount ++ ; } } i ++ ; } ParsedSql parsedSql = new ParsedSql ( sqlToUse ) ; parsedSql . setParameterList ( parameterList ) ; parsedSql . setNamedParameterCount ( namedParameterCount ) ; parsedSql . setUnnamedParameterCount ( unnamedParameterCount ) ; parsedSql . setTotalParameterCount ( totalParameterCount ) ; return parsedSql ;
public class QueryBuilder { /** * Add raw columns or aggregate functions ( COUNT , MAX , . . . ) to the query . This will turn the query into something * only suitable for the { @ link Dao # queryRaw ( String , String . . . ) } type of statement . This can be called multiple * times to add more columns to select . */ public QueryBuilder < T , ID > selectRaw ( String ... columns ) { } }
for ( String column : columns ) { addSelectToList ( ColumnNameOrRawSql . withRawSql ( column ) ) ; } return this ;
public class MaF06 { /** * Evaluates a solution * @ param solution The solution to evaluate */ @ Override public void evaluate ( DoubleSolution solution ) { } }
int numberOfVariables_ = solution . getNumberOfVariables ( ) ; int numberOfObjectives_ = solution . getNumberOfObjectives ( ) ; double [ ] x = new double [ numberOfVariables_ ] ; double [ ] f = new double [ numberOfObjectives_ ] ; for ( int i = 0 ; i < numberOfVariables_ ; i ++ ) { x [ i ] = solution . getVariableValue ( i ) ; } double [ ] thet = new double [ numberOfObjectives_ - 1 ] ; double g = 0 , sub1 , sub2 ; // evaluate g , thet for ( int i = numberOfObjectives_ - 1 ; i < numberOfVariables_ ; i ++ ) { g += Math . pow ( x [ i ] - 0.5 , 2 ) ; } sub1 = 100 * g + 1 ; sub2 = 1 + g ; for ( int i = 0 ; i < 1 ; i ++ ) { thet [ i ] = Math . PI * x [ i ] / 2 ; } for ( int i = 1 ; i < numberOfObjectives_ - 1 ; i ++ ) { thet [ i ] = Math . PI * ( 1 + 2 * g * x [ i ] ) / ( 4 * sub2 ) ; } // evaluate fm , fm - 1 , . . . , 2 , f1 f [ numberOfObjectives_ - 1 ] = Math . sin ( thet [ 0 ] ) * sub1 ; double subf1 = 1 ; // fi = cos ( thet1 ) cos ( thet2 ) . . . cos ( thet [ m - i ] ) * sin ( thet ( m - i + 1 ) ) * ( 1 + g [ i ] ) , fi = subf1 * subf2 * subf3 for ( int i = numberOfObjectives_ - 2 ; i > 0 ; i -- ) { subf1 *= Math . cos ( thet [ numberOfObjectives_ - i - 2 ] ) ; f [ i ] = subf1 * Math . sin ( thet [ numberOfObjectives_ - i - 1 ] ) * sub1 ; } f [ 0 ] = subf1 * Math . cos ( thet [ numberOfObjectives_ - 2 ] ) * sub1 ; for ( int i = 0 ; i < numberOfObjectives_ ; i ++ ) { solution . setObjective ( i , f [ i ] ) ; }
public class GetOTAUpdateRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetOTAUpdateRequest getOTAUpdateRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getOTAUpdateRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getOTAUpdateRequest . getOtaUpdateId ( ) , OTAUPDATEID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DateAccessor { /** * ( non - Javadoc ) * @ see com . impetus . kundera . property . PropertyAccessor # fromBytes ( byte [ ] ) */ @ Override public /* final */ Date fromBytes ( Class targetClass , byte [ ] bytes ) { } }
try { if ( bytes == null ) { return null ; } try { // In case date . getTime ( ) is stored in DB . LongAccessor longAccessor = new LongAccessor ( ) ; return new Date ( longAccessor . fromBytes ( targetClass , bytes ) ) ; } catch ( NumberFormatException nfex ) { return getDateByPattern ( new String ( bytes , Constants . ENCODING ) ) ; } } catch ( Exception e ) { log . error ( "Caused by {}." , e ) ; throw new PropertyAccessException ( e ) ; }
public class NoxView { /** * Configures the Shape used to show the list of NoxItems . */ private void initializeShapeConfig ( TypedArray attributes ) { } }
defaultShapeKey = attributes . getInteger ( R . styleable . nox_shape , ShapeFactory . FIXED_CIRCULAR_SHAPE_KEY ) ;
public class StringContentTilePainter { /** * Paint the tile ! The tile must be an instance of { @ link InternalTile } . This function will create 2 DOM documents * for geometries and labels . In case the renderer says " SVG " these documents will be of the type * { @ link org . geomajas . internal . rendering . DefaultSvgDocument } , otherwise * { @ link org . geomajas . internal . rendering . DefaultVmlDocument } . These documents in turn are built using * { @ link org . geomajas . internal . rendering . writer . GraphicsWriter } classes . * @ param tileToPaint * The instance of { @ link InternalTile } . Using the DOM documents , the tile ' s " featureFragment " and * " labelFragment " will be created . * @ return Returns a fully rendered vector tile . */ public InternalTile paint ( InternalTile tileToPaint ) throws RenderException { } }
if ( tileToPaint != null && null != tileToPaint . getFeatures ( ) ) { tile = tileToPaint ; Collections . sort ( tile . getFeatures ( ) ) ; // make sure features are sorted by style , needed for grouping // Create the SVG / VML feature fragment : if ( paintGeometries && featureDocument == null ) { StringWriter writer = new StringWriter ( ) ; try { featureDocument = createFeatureDocument ( writer ) ; featureDocument . setRootId ( layer . getId ( ) ) ; featureDocument . writeObject ( tile , false ) ; featureDocument . flush ( ) ; } catch ( RenderException e ) { log . error ( "Unable to write this tile's feature fragment" , e ) ; } tile . setFeatureContent ( writer . toString ( ) ) ; } // Create the SVG / VML label fragment : if ( paintLabels && labelDocument == null ) { StringWriter writer = new StringWriter ( ) ; try { labelDocument = createLabelDocument ( writer , style . getLabelStyle ( ) ) ; labelDocument . setRootId ( layer . getId ( ) ) ; labelDocument . writeObject ( tileToPaint , false ) ; labelDocument . flush ( ) ; } catch ( RenderException e ) { log . error ( "Unable to write this tile's label fragment" , e ) ; } tile . setLabelContent ( writer . toString ( ) ) ; } return tile ; } return tileToPaint ;
public class VerboseXmlEncoder { /** * Use a nasty reflection hack to make sure that the given object is * always written explicitly , without using " idref " , in the given * VerboseXmlEncoder * @ param encoder The encoder * @ param object The object */ static void forceWritingEnumValue ( VerboseXmlEncoder encoder , Object object ) { } }
Field valueToExpressionField = null ; Field refsField = null ; try { Class < ? > encoderClass = encoder . getClass ( ) . getSuperclass ( ) ; valueToExpressionField = encoderClass . getDeclaredField ( "valueToExpression" ) ; valueToExpressionField . setAccessible ( true ) ; Object valueToExpressionObject = valueToExpressionField . get ( encoder ) ; Map < ? , ? > valueToExpression = ( Map < ? , ? > ) valueToExpressionObject ; Object valueData = valueToExpression . get ( object ) ; if ( valueData != null ) { Class < ? > valueDataClass = valueData . getClass ( ) ; refsField = valueDataClass . getDeclaredField ( "refs" ) ; refsField . setAccessible ( true ) ; refsField . setInt ( valueData , 0 ) ; } } catch ( NoSuchFieldException e ) { logger . warning ( e . toString ( ) ) ; } catch ( SecurityException e ) { logger . warning ( e . toString ( ) ) ; } catch ( IllegalArgumentException e ) { logger . warning ( e . toString ( ) ) ; } catch ( IllegalAccessException e ) { logger . warning ( e . toString ( ) ) ; } finally { if ( valueToExpressionField != null ) { valueToExpressionField . setAccessible ( false ) ; } if ( refsField != null ) { refsField . setAccessible ( false ) ; } }
public class Encoding { /** * Percent and UTF8 decode a { @ link CharSequence } . Returns null if invalid . * @ param s Percent encoded { @ link CharSequence } . * @ return Decoded CharSequence or null if invalid encoding . */ private static CharSequence decode0 ( final CharSequence s ) { } }
final int length = s . length ( ) ; final CharBuffer cb = CharBuffer . allocate ( length ) ; for ( int i = 0 ; i < length ; ) { final char c = s . charAt ( i ) ; // Not encoded ? if ( c != '%' ) { cb . append ( c ) ; i ++ ; continue ; } // UTF8 - 1 Byte int b1 = decodePercent ( s , length , i ) ; if ( b1 == INVALID ) { return null ; } i += 3 ; final int n = utf8Length ( b1 ) ; if ( n == INVALID ) { return null ; } if ( n == 1 ) { cb . append ( ( char ) b1 ) ; continue ; } // UTF8 - 2 Bytes final int b2 = decodePercent ( s , length , i ) ; if ( b2 == INVALID ) { return null ; } i += 3 ; if ( n == 2 ) { final int cp = utf8Read2 ( b1 , b2 ) ; if ( cp == INVALID ) { return null ; } cb . append ( ( char ) cp ) ; continue ; } // UTF8 - 3 Bytes final int b3 = decodePercent ( s , length , i ) ; if ( b3 == INVALID ) { return null ; } i += 3 ; if ( n == 3 ) { final int cp = utf8Read3 ( b1 , b2 , b3 ) ; if ( cp == INVALID ) { return null ; } cb . append ( ( char ) cp ) ; continue ; } // UTF8 - 4 Bytes final int b4 = decodePercent ( s , length , i ) ; if ( b4 == INVALID ) { return null ; } i += 3 ; final int cp = utf8Read4 ( b1 , b2 , b3 , b4 ) ; if ( cp == INVALID ) { return null ; } final int offset = cp - MIN_SUPPLEMENTARY_CODE_POINT ; cb . append ( ( char ) ( ( offset >>> 10 ) + MIN_HIGH_SURROGATE ) ) ; cb . append ( ( char ) ( ( offset & 0x3ff ) + MIN_LOW_SURROGATE ) ) ; } cb . flip ( ) ; return cb ;
public class TypeUtils { /** * Finds the first implementation of the given interface . * @ param interfaceType a Class object representing the interface type * @ param types the set of Class objects to search through * @ return the first concrete implementation class of the given interface , * or null if one can ' t be found */ public static Class findFirstImplementationOfInterface ( Class interfaceType , Set < Class < ? > > types ) { } }
if ( interfaceType == null ) { throw new IllegalArgumentException ( "An interface type must be provided." ) ; } else if ( ! interfaceType . isInterface ( ) ) { throw new IllegalArgumentException ( "The interface type must represent an interface." ) ; } if ( types == null ) { throw new IllegalArgumentException ( "The set of types to search through must be provided." ) ; } for ( Class < ? > type : types ) { boolean isInterface = type . isInterface ( ) ; boolean isAbstract = Modifier . isAbstract ( type . getModifiers ( ) ) ; boolean isAssignable = interfaceType . isAssignableFrom ( type ) ; if ( ! isInterface && ! isAbstract && isAssignable ) { return type ; } } return null ;
public class IntervalST { /** * test client */ public static void main ( String [ ] args ) { } }
int N ; if ( args != null && args . length != 0 ) { N = Integer . parseInt ( args [ 0 ] ) ; } else { N = 10 ; } IntervalST < Integer , Double > st = new IntervalST < > ( ) ; double intervalId = 0d ; st . put ( new Interval1D < > ( 1 , 5 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 2 , 5 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 3 , 6 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 3 , 6 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 7 , 7 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 4 , 10 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 15 , 20 ) , ++ intervalId ) ; List < Integer > vals = new ArrayList < > ( ) ; vals . add ( 52 ) ; vals . add ( 54 ) ; vals . add ( 56 ) ; vals . add ( 58 ) ; st . put ( new Interval1D < > ( 4 , 100 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 4 , 101 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 4 , 102 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 4 , 103 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 0 , 103 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( - 10 , 103 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( Integer . MIN_VALUE , - 1000 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( Integer . MIN_VALUE , - 1100 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( Integer . MIN_VALUE , - 900 ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 1000 , Integer . MAX_VALUE ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 900 , Integer . MAX_VALUE ) , ++ intervalId ) ; st . put ( new Interval1D < > ( 1100 , Integer . MAX_VALUE ) , ++ intervalId ) ; st . put ( new Interval1D < > ( Integer . MIN_VALUE , Integer . MAX_VALUE ) , ++ intervalId ) ; Node < Integer , Double > removed = st . remove ( new Interval1D < > ( - 10 , 103 ) ) ; System . err . flush ( ) ; // print out tree statistics System . out . println ( "Tree stats:" ) ; System . out . println ( "\theight: " + st . height ( ) ) ; System . out . println ( "\tsize: " + st . size ( ) ) ; System . out . println ( "\tintegrity check: " + st . check ( ) ) ; System . out . println ( ) ; System . out . flush ( ) ; ArrayList < Interval1D < Integer > > queries = new ArrayList < > ( ) ; queries . add ( new Interval1D < > ( - 1 , - 1 ) ) ; queries . add ( new Interval1D < > ( 0 , 0 ) ) ; queries . add ( new Interval1D < > ( 1 , 1 ) ) ; queries . add ( new Interval1D < > ( 2 , 2 ) ) ; queries . add ( new Interval1D < > ( 3 , 3 ) ) ; queries . add ( new Interval1D < > ( 4 , 4 ) ) ; queries . add ( new Interval1D < > ( 5 , 5 ) ) ; queries . add ( new Interval1D < > ( 6 , 6 ) ) ; queries . add ( new Interval1D < > ( 7 , 7 ) ) ; queries . add ( new Interval1D < > ( 10 , 10 ) ) ; queries . add ( new Interval1D < > ( 11 , 11 ) ) ; queries . add ( new Interval1D < > ( 12 , 16 ) ) ; queries . add ( new Interval1D < > ( 100 , 100 ) ) ; queries . add ( new Interval1D < > ( 200 , 400 ) ) ; queries . add ( new Interval1D < > ( Integer . MIN_VALUE , Integer . MIN_VALUE ) ) ; queries . add ( new Interval1D < > ( Integer . MIN_VALUE , - 10000 ) ) ; queries . add ( new Interval1D < > ( Integer . MIN_VALUE , - 1101 ) ) ; queries . add ( new Interval1D < > ( Integer . MIN_VALUE , - 1100 ) ) ; queries . add ( new Interval1D < > ( Integer . MIN_VALUE , - 999 ) ) ; queries . add ( new Interval1D < > ( Integer . MIN_VALUE , - 899 ) ) ; queries . add ( new Interval1D < > ( - 900 , - 899 ) ) ; queries . add ( new Interval1D < > ( - 899 , - 800 ) ) ; queries . add ( new Interval1D < > ( Integer . MAX_VALUE , Integer . MAX_VALUE ) ) ; queries . add ( new Interval1D < > ( 10000 , Integer . MAX_VALUE ) ) ; queries . add ( new Interval1D < > ( 1101 , Integer . MAX_VALUE ) ) ; queries . add ( new Interval1D < > ( 1100 , Integer . MAX_VALUE ) ) ; queries . add ( new Interval1D < > ( 1000 , Integer . MAX_VALUE ) ) ; queries . add ( new Interval1D < > ( 900 , Integer . MAX_VALUE ) ) ; queries . add ( new Interval1D < > ( 899 , 900 ) ) ; queries . add ( new Interval1D < > ( 898 , 899 ) ) ; for ( Interval1D < Integer > query : queries ) { System . out . println ( "Query: " + query . toString ( ) ) ; Iterable < Node < Integer , Double > > nodes = st . searchAll ( query ) ; if ( ! nodes . iterator ( ) . hasNext ( ) ) { System . out . print ( "No intersections" ) ; } for ( Node < Integer , Double > x : nodes ) { System . out . print ( "\t" + x . getInterval ( ) + " :: " + x . getValue ( ) + "\n" ) ; } System . out . println ( ) ; System . out . println ( ) ; } System . exit ( 1 ) ; // generate N random intervals and insert into data structure st = new IntervalST < > ( ) ; for ( int i = 0 ; i < N ; i ++ ) { int low = ( int ) ( java . lang . Math . random ( ) * 1000 ) ; int high = ( int ) ( java . lang . Math . random ( ) * 50 ) + low ; Interval1D < Integer > interval = new Interval1D < > ( low , high ) ; System . out . println ( interval ) ; st . put ( interval , ( double ) i ) ; } // print out tree statistics System . out . println ( "height: " + st . height ( ) ) ; System . out . println ( "size: " + st . size ( ) ) ; System . out . println ( "integrity check: " + st . check ( ) ) ; System . out . println ( ) ; // generate random intervals and check for overlap for ( int i = 0 ; i < N ; i ++ ) { int low = ( int ) ( java . lang . Math . random ( ) * 100 ) ; int high = ( int ) ( java . lang . Math . random ( ) * 10 ) + low ; Interval1D < Integer > interval = new Interval1D < > ( low , high ) ; System . out . println ( interval + ": " + st . search ( interval ) ) ; System . out . print ( interval + ": " ) ; for ( Node < Integer , Double > x : st . searchAll ( interval ) ) { System . out . print ( x + " " ) ; } System . out . println ( ) ; System . out . println ( ) ; }
public class CommercePriceListUtil { /** * Returns the commerce price list where companyId = & # 63 ; and externalReferenceCode = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param companyId the company ID * @ param externalReferenceCode the external reference code * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching commerce price list , or < code > null < / code > if a matching commerce price list could not be found */ public static CommercePriceList fetchByC_ERC ( long companyId , String externalReferenceCode , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByC_ERC ( companyId , externalReferenceCode , retrieveFromCache ) ;
public class SubscriptionLoader { /** * Read the existing subscriptions from the database */ @ SuppressWarnings ( "unused" ) @ Scheduled ( fixedDelay = DateUtils . MILLIS_PER_MINUTE ) public void syncSubscriptions ( ) { } }
if ( ! mySyncSubscriptionsSemaphore . tryAcquire ( ) ) { return ; } try { doSyncSubscriptionsWithRetry ( ) ; } finally { mySyncSubscriptionsSemaphore . release ( ) ; }
public class LogUtils { /** * Logs a formatted string to the console using the source object ' s name as the log tag . If the * source object is null , the default tag ( see { @ link LogUtils # TAG } ) is used . * < p > Example usage : < br > * < code > * LogUtils . log ( this , Log . ERROR , " Invalid value : % d " , value ) ; * < / code > * @ param source The object that generated the log event . * @ param priority The log entry priority , see { @ link Log # println ( int , String , String ) } . * @ param format A format string , see { @ link String # format ( String , Object . . . ) } . * @ param args String formatter arguments . */ public static void log ( Object source , int priority , String format , Object ... args ) { } }
log ( source , priority , null , format , args ) ;
public class IndexGroupProcessor { /** * Puts index entries to the group they are belongs * @ param theIndexEntries index entries * @ param theIndexConfiguration index configuration * @ param theLocale locale used to sort and compare index entries * @ return groups with sorted index entries inside */ public IndexGroup [ ] process ( final IndexEntry [ ] theIndexEntries , final IndexConfiguration theIndexConfiguration , final Locale theLocale ) { } }
final IndexCollator collator = new IndexCollator ( theLocale ) ; final ArrayList < MyIndexGroup > result = new ArrayList < MyIndexGroup > ( ) ; final ConfigEntry [ ] entries = theIndexConfiguration . getEntries ( ) ; final HashMap < String , IndexEntry > indexMap = createMap ( theIndexEntries ) ; // Creating array of index groups for ( final ConfigEntry configEntry : entries ) { final String label = configEntry . getLabel ( ) ; final MyIndexGroup group = new MyIndexGroup ( label , configEntry ) ; result . add ( group ) ; } final MyIndexGroup [ ] IndexGroups = ( MyIndexGroup [ ] ) result . toArray ( new MyIndexGroup [ result . size ( ) ] ) ; // Adding dependecies to group array for ( int i = 0 ; i < IndexGroups . length ; i ++ ) { final MyIndexGroup thisGroup = IndexGroups [ i ] ; final String [ ] thisGroupMembers = thisGroup . getConfigEntry ( ) . getGroupMembers ( ) ; for ( int j = 0 ; j < IndexGroups . length ; j ++ ) { if ( j != i ) { final MyIndexGroup compGroup = IndexGroups [ j ] ; final String [ ] compGroupMembers = compGroup . getConfigEntry ( ) . getGroupMembers ( ) ; if ( doesStart ( compGroupMembers , thisGroupMembers ) ) { thisGroup . addChild ( compGroup ) ; } } } } /* for ( int i = 0 ; i < IndexGroups . length ; i + + ) { IndexGroups [ i ] . printDebug ( ) ; */ for ( int i = 0 ; i < IndexGroups . length ; i ++ ) { final MyIndexGroup group = IndexGroups [ i ] ; final ConfigEntry configEntry = group . getConfigEntry ( ) ; final String [ ] groupMembers = configEntry . getGroupMembers ( ) ; if ( groupMembers . length > 0 ) { // Find entries by comaping first letter with a chars in current config entry for ( final String key : new ArrayList < String > ( indexMap . keySet ( ) ) ) { if ( key . length ( ) > 0 ) { final String value = getValue ( ( IndexEntry ) indexMap . get ( key ) ) ; // final char c = value . charAt ( 0 ) ; if ( configEntry . isInRange ( value , collator ) ) { final IndexEntry entry = ( IndexEntry ) indexMap . remove ( key ) ; group . addEntry ( entry ) ; } } } } else { // Get index entries by range specified by two keys final String key1 = configEntry . getKey ( ) ; String key2 = null ; if ( ( i + 1 ) < entries . length ) { final ConfigEntry nextEntry = entries [ i + 1 ] ; key2 = nextEntry . getKey ( ) ; } final String [ ] indexMapKeys = getIndexKeysOfIndexesInRange ( key1 , key2 , collator , indexMap ) ; for ( final String mapKey : indexMapKeys ) { final IndexEntry entry = ( IndexEntry ) indexMap . remove ( mapKey ) ; group . addEntry ( entry ) ; } } /* if ( group . getEntries ( ) . length > 0 ) { result . add ( group ) ; */ } // If some terms remain uncategorized , and a recognized special character // group is available , place remaining terms in that group for ( int i = 0 ; i < IndexGroups . length ; i ++ ) { final MyIndexGroup group = IndexGroups [ i ] ; final ConfigEntry configEntry = group . getConfigEntry ( ) ; final String configKey = configEntry . getKey ( ) ; if ( configKey . equals ( SPECIAL_CHARACTER_GROUP_KEY ) ) { for ( final String key : new ArrayList < String > ( indexMap . keySet ( ) ) ) { if ( key . length ( ) > 0 ) { final String value = getValue ( ( IndexEntry ) indexMap . get ( key ) ) ; // final char c = value . charAt ( 0 ) ; logger . info ( MessageUtils . getMessage ( "PDFJ003I" , value ) . toString ( ) ) ; final IndexEntry entry = ( IndexEntry ) indexMap . remove ( key ) ; group . addEntry ( entry ) ; } } } } // No recognized " Special characters " group ; uncategorized terms have no place to go , must be dropped if ( ! indexMap . isEmpty ( ) ) { for ( final String key : new ArrayList < String > ( indexMap . keySet ( ) ) ) { if ( key . length ( ) > 0 ) { final IndexEntry entry = ( IndexEntry ) indexMap . get ( key ) ; logger . error ( MessageUtils . getMessage ( "PDFJ001E" , entry . toString ( ) ) . toString ( ) ) ; } } if ( IndexPreprocessorTask . failOnError ) { logger . error ( MessageUtils . getMessage ( "PDFJ002E" ) . toString ( ) ) ; IndexPreprocessorTask . processingFaild = true ; } } final ArrayList < MyIndexGroup > cleanResult = new ArrayList < MyIndexGroup > ( ) ; for ( final MyIndexGroup indexGroup : IndexGroups ) { if ( indexGroup . getEntries ( ) . length > 0 ) { cleanResult . add ( indexGroup ) ; } } final MyIndexGroup [ ] cleanIndexGroups = ( MyIndexGroup [ ] ) cleanResult . toArray ( new MyIndexGroup [ cleanResult . size ( ) ] ) ; return cleanIndexGroups ;
public class BinaryResourcesHandler { /** * Checks the bundle hashcode type of the requested binary resource * @ param requestedPath * the requested path * @ return true if the requested image is a valid one or not */ public BundleHashcodeType getBundleHashcodeType ( String requestedPath ) { } }
if ( binaryResourcePathMap . containsValue ( requestedPath ) ) { return BundleHashcodeType . VALID_HASHCODE ; } BundleHashcodeType bundleHashcodeType = BundleHashcodeType . UNKNOW_BUNDLE ; String [ ] resourceInfo = PathNormalizer . extractBinaryResourceInfo ( requestedPath ) ; String binaryRequest = resourceInfo [ 0 ] ; if ( resourceInfo [ 1 ] != null ) { // an hashcode is defined in the path try { String cacheBustedPath = CheckSumUtils . getCacheBustedUrl ( binaryRequest , getRsReaderHandler ( ) , jawrConfig ) ; addMapping ( binaryRequest , cacheBustedPath ) ; if ( requestedPath . equals ( cacheBustedPath ) ) { bundleHashcodeType = BundleHashcodeType . VALID_HASHCODE ; } else { bundleHashcodeType = BundleHashcodeType . INVALID_HASHCODE ; } } catch ( IOException | ResourceNotFoundException e ) { // Nothing to do } } return bundleHashcodeType ;
public class BeanProperty { /** * 获取数值型的属性值 * @ throws IllegalAccessException * @ throws IllegalArgumentException * @ throws InvocationTargetException * @ throws BeanPropertyException */ public long getNum ( Object obj ) throws IllegalArgumentException , IllegalAccessException , InvocationTargetException , BeanPropertyException , NoSuchMethodException { } }
switch ( getTypeCode ( ) ) { case SHORT : if ( m_f != null ) return ( long ) m_f . getShort ( obj ) ; if ( m_mGet != null ) return ( ( Short ) m_mGet . invoke ( obj , EMPTYOBJECTARRAY ) ) . shortValue ( ) ; break ; case INTEGER : if ( m_f != null ) return ( long ) m_f . getInt ( obj ) ; if ( m_mGet != null ) return ( ( Integer ) m_mGet . invoke ( obj , EMPTYOBJECTARRAY ) ) . intValue ( ) ; break ; case LONG : if ( m_f != null ) return m_f . getLong ( obj ) ; if ( m_mGet != null ) return ( ( Long ) m_mGet . invoke ( obj , EMPTYOBJECTARRAY ) ) . longValue ( ) ; break ; default : throw new BeanPropertyException ( "Property is not numeric." ) ; } throw new BeanPropertyException ( "Try to read write-only property." ) ;
public class StoryRunner { /** * Run steps before or after a collection of stories . Steps are execute only * < b > once < / b > per collection of stories . * @ param configuration the Configuration used to find the steps to run * @ param candidateSteps the List of CandidateSteps containing the candidate * steps methods * @ param stage the Stage * @ return The State after running the steps */ public State runBeforeOrAfterStories ( Configuration configuration , List < CandidateSteps > candidateSteps , Stage stage ) { } }
String storyPath = capitalizeFirstLetter ( stage . name ( ) . toLowerCase ( ) ) + "Stories" ; reporter . set ( configuration . storyReporter ( storyPath ) ) ; reporter . get ( ) . beforeStory ( new Story ( storyPath ) , false ) ; RunContext context = new RunContext ( configuration , candidateSteps , storyPath , MetaFilter . EMPTY ) ; if ( stage == Stage . BEFORE ) { resetStoryFailure ( context ) ; } if ( stage == Stage . AFTER && storiesState . get ( ) != null ) { context . stateIs ( storiesState . get ( ) ) ; } try { runStepsWhileKeepingState ( context , configuration . stepCollector ( ) . collectBeforeOrAfterStoriesSteps ( context . candidateSteps ( ) , stage ) ) ; } catch ( InterruptedException e ) { throw new UUIDExceptionWrapper ( e ) ; } reporter . get ( ) . afterStory ( false ) ; storiesState . set ( context . state ( ) ) ; // if we are running with multiple threads , call delayed // methods , otherwise we will forget to close files on BeforeStories if ( stage == Stage . BEFORE ) { if ( reporter . get ( ) instanceof ConcurrentStoryReporter ) { ( ( ConcurrentStoryReporter ) reporter . get ( ) ) . invokeDelayed ( ) ; } } // handle any after stories failure according to strategy if ( stage == Stage . AFTER ) { try { handleStoryFailureByStrategy ( ) ; } catch ( Throwable e ) { return new SomethingHappened ( storyFailure . get ( ) ) ; } finally { if ( reporter . get ( ) instanceof ConcurrentStoryReporter ) { ( ( ConcurrentStoryReporter ) reporter . get ( ) ) . invokeDelayed ( ) ; } } } return context . state ( ) ;
public class Record { /** * Reads the field value starting at { @ code fromIndex } and calls * { @ link FieldHandler # data ( char [ ] , String ) } . * @ param fromIndex index at which the identifier of the field value starts . * @ return the index of the end of field marker . This is the position write * after the data field in the buffer . It can be used as the next starting * position when processing multiple subfields . */ private int processDataValue ( final int fromIndex ) { } }
final char [ ] identifier = getIdentifier ( fromIndex ) ; final int dataStart = fromIndex + identifierLength ; final int dataLength = buffer . distanceTo ( DATA_SEPARATORS , dataStart ) ; final String data = buffer . stringAt ( dataStart , dataLength , charset ) ; fieldHandler . data ( identifier , data ) ; return dataStart + dataLength ;
public class PermissionOverrideAction { /** * Sets the value of explicitly granted permissions * using the bitwise representation of a set of { @ link net . dv8tion . jda . core . Permission Permissions } . * < br > This value can be retrieved through { @ link net . dv8tion . jda . core . Permission # getRaw ( net . dv8tion . jda . core . Permission . . . ) Permissions . getRaw ( Permission . . . ) } ! * < br > < b > Note : Permissions not marked as { @ link net . dv8tion . jda . core . Permission # isChannel ( ) isChannel ( ) } will have no affect ! < / b > * @ param allowBits * The < b > positive < / b > bits representing the granted * permissions for the new PermissionOverride * @ throws java . lang . IllegalArgumentException * If the provided bits are negative * or higher than { @ link net . dv8tion . jda . core . Permission # ALL _ PERMISSIONS Permission . ALL _ PERMISSIONS } * @ return The current PermissionOverrideAction - for chaining convenience * @ see # setAllow ( java . util . Collection ) setAllow ( Collection ) * @ see # setAllow ( net . dv8tion . jda . core . Permission . . . ) setAllow ( Permission . . . ) */ @ CheckReturnValue public PermissionOverrideAction setAllow ( long allowBits ) { } }
Checks . notNegative ( allowBits , "Granted permissions value" ) ; Checks . check ( allowBits <= Permission . ALL_PERMISSIONS , "Specified allow value may not be greater than a full permission set" ) ; this . allow = allowBits ; return this ;
public class SnowflakeCreator { /** * a diode is a long value whose left and right margin are ZERO , while middle * bits are ONE in binary string layout . it looks like a diode in shape . * @ param offset * left margin position * @ param length * offset + length is right margin position * @ return a long value */ private long diode ( long offset , long length ) { } }
int lb = ( int ) ( 64 - offset ) ; int rb = ( int ) ( 64 - ( offset + length ) ) ; return ( - 1L << lb ) ^ ( - 1L << rb ) ;
public class HSQLInterface { /** * Take an equality - test expression that represents in - list * and munge it into the simpler thing we want to output * to the AbstractParsedStmt for its AbstractExpression classes . */ private void inFixup ( VoltXMLElement inElement ) { } }
// make this an in expression inElement . name = "operation" ; inElement . attributes . put ( "optype" , "in" ) ; VoltXMLElement rowElem = null ; VoltXMLElement tableElem = null ; VoltXMLElement subqueryElem = null ; VoltXMLElement valueElem = null ; for ( VoltXMLElement child : inElement . children ) { if ( child . name . equals ( "row" ) ) { rowElem = child ; } else if ( child . name . equals ( "table" ) ) { tableElem = child ; } else if ( child . name . equals ( "tablesubquery" ) ) { subqueryElem = child ; } else if ( child . name . equals ( "value" ) ) { valueElem = child ; } } VoltXMLElement inlist ; if ( tableElem != null ) { // make the table expression an in - list inlist = new VoltXMLElement ( "vector" ) ; for ( VoltXMLElement child : tableElem . children ) { assert ( child . name . equals ( "row" ) ) ; assert ( child . children . size ( ) == 1 ) ; inlist . children . addAll ( child . children ) ; } } else if ( subqueryElem != null ) { inlist = subqueryElem ; } else { assert valueElem != null ; inlist = valueElem ; } assert ( rowElem != null ) ; assert ( inlist != null ) ; inElement . children . clear ( ) ; // add the row inElement . children . add ( rowElem ) ; // add the inlist inElement . children . add ( inlist ) ;
public class GenericConversionService { /** * Return the default converter if no converter is found for the given sourceType / targetType pair . * Returns a NO _ OP Converter if the sourceType is assignable to the targetType . * Returns { @ code null } otherwise , indicating no suitable converter could be found . * Subclasses may override . * @ param sourceType the source type to convert from * @ param targetType the target type to convert to * @ return the default generic converter that will perform the conversion */ protected GenericConverter getDefaultConverter ( TypeDescriptor sourceType , TypeDescriptor targetType ) { } }
return ( sourceType . isAssignableTo ( targetType ) ? NO_OP_CONVERTER : null ) ;
public class MergeHtml { /** * OpenMergeRecord Method . */ public Record openMergeRecord ( ) { } }
Record record = super . openMergeRecord ( ) ; if ( record == null ) record = new EmptyMemoryRecord ( this ) ; // Usually return record ;
public class LocalDateModifiedFollowingHandler { public LocalDate adjustDate ( LocalDate startDate , int increment , NonWorkingDayChecker < LocalDate > checker ) { } }
LocalDate date = startDate ; final int month = date . getMonthOfYear ( ) ; int stepToUse = increment ; while ( checker . isNonWorkingDay ( date ) ) { date = date . plusDays ( stepToUse ) ; if ( date . getMonthOfYear ( ) != month ) { // flick to backward stepToUse *= - 1 ; date = date . plusDays ( stepToUse ) ; } } return date ;
public class OSGiUtil { /** * get local bundle , but does not download from update provider ! * @ param name * @ param version * @ return * @ throws BundleException */ public static void removeLocalBundle ( String name , Version version , boolean removePhysical , boolean doubleTap ) throws BundleException { } }
name = name . trim ( ) ; CFMLEngine engine = CFMLEngineFactory . getInstance ( ) ; CFMLEngineFactory factory = engine . getCFMLEngineFactory ( ) ; BundleFile bf = _getBundleFile ( factory , name , version , null ) ; if ( bf != null ) { BundleDefinition bd = bf . toBundleDefinition ( ) ; if ( bd != null ) { Bundle b = bd . getLocalBundle ( ) ; if ( b != null ) { stopIfNecessary ( b ) ; b . uninstall ( ) ; } } } if ( ! removePhysical ) return ; // remove file if ( bf != null ) { if ( ! bf . getFile ( ) . delete ( ) && doubleTap ) bf . getFile ( ) . deleteOnExit ( ) ; }
public class AbstractConnectProtocol { /** * Default collation used for string exchanges with server . ( always use utf8) * @ param serverLanguage server default collation * @ return collation byte */ private byte decideLanguage ( int serverLanguage ) { } }
// force UTF8mb4 if possible , UTF8 if not . if ( serverLanguage == 45 // utf8mb4 _ general _ ci || serverLanguage == 46 // utf8mb4 _ bin || ( serverLanguage >= 224 && serverLanguage <= 247 ) ) { return ( byte ) serverLanguage ; } else if ( serverLanguage == 33 // utf8 _ general _ ci || serverLanguage == 83 // utf8 _ bin || serverLanguage == 223 // utf8 _ general _ mysql500 _ ci || ( serverLanguage >= 192 && serverLanguage <= 215 ) ) { return ( byte ) serverLanguage ; } if ( getMajorServerVersion ( ) == 5 && getMinorServerVersion ( ) <= 1 ) { return ( byte ) 33 ; // utf8 _ general _ ci } return ( byte ) 224 ; // UTF8MB4 _ UNICODE _ CI ;
public class ActionPathResolver { protected String buildActionName ( MappingPathResource mappingResource , String pkg , String classPrefix ) { } }
final String actionNameSuffix = mappingResource . getActionNameSuffix ( ) . orElse ( "" ) ; // option so basically empty final String actionSuffix = namingConvention . getActionSuffix ( ) ; // e . g . ' Action ' return ( pkg != null ? pkg : "" ) + classPrefix + actionNameSuffix + actionSuffix ; // e . g . sea _ seaLandAction , sea _ seaLandSpAction
public class EntitiesParseUtil { /** * / * package */ static MediaEntity [ ] getMedia ( JSONObject entities ) throws JSONException , TwitterException { } }
if ( ! entities . isNull ( "media" ) ) { JSONArray mediaArray = entities . getJSONArray ( "media" ) ; int len = mediaArray . length ( ) ; MediaEntity [ ] mediaEntities = new MediaEntity [ len ] ; for ( int i = 0 ; i < len ; i ++ ) { mediaEntities [ i ] = new MediaEntityJSONImpl ( mediaArray . getJSONObject ( i ) ) ; } return mediaEntities ; } else { return null ; }
public class ViewPagerEx { /** * Set a drawable that will be used to fill the margin between pages . * @ param d Drawable to display between pages */ public void setPageMarginDrawable ( Drawable d ) { } }
mMarginDrawable = d ; if ( d != null ) refreshDrawableState ( ) ; setWillNotDraw ( d == null ) ; invalidate ( ) ;
public class FlashImpl { /** * Returns true if the current phase is the last phase in the request * and thus if doPostPhaseActions ( ) is called for the last time . * This will be true if either we are in phase 6 ( render response ) * or if setRedirect ( true ) was called on this request and we are * in phase 5 ( invoke application ) . */ private boolean _isLastPhaseInRequest ( FacesContext facesContext ) { } }
final PhaseId currentPhaseId = facesContext . getCurrentPhaseId ( ) ; boolean lastPhaseNormalRequest = PhaseId . RENDER_RESPONSE . equals ( currentPhaseId ) ; // According to the spec , if there is a redirect , responseComplete ( ) // has been called , and Flash . setRedirect ( ) has been called too , // so we just need to check both are present . boolean lastPhaseIfRedirect = facesContext . getResponseComplete ( ) && _isRedirectTrueOnThisRequest ( facesContext ) ; return lastPhaseNormalRequest || lastPhaseIfRedirect ;
public class V1InstanceGetter { /** * Returns an Entity of Type T with the given ID or null if the ID is * invalid . * @ param < T > Entity Type to retrieve . * @ param clazz - T Class . * @ param id ID of the Entity to retrieve . * @ return an instance of an Entity of Type T or null if ID is invalid . */ public < T extends Entity > T byID ( Class < T > clazz , AssetID id ) { } }
return instance . getWrapperManager ( ) . create ( clazz , id , true ) ;
public class CSP2SourceList { /** * Add a host * @ param sHost * Host to add . Must be a valid URL or a star prefixed version . * @ return this */ @ Nonnull public CSP2SourceList addHost ( @ Nonnull @ Nonempty final String sHost ) { } }
ValueEnforcer . notEmpty ( sHost , "Host" ) ; m_aList . add ( sHost ) ; return this ;
public class VoiceApi { /** * Merge the two specified calls . * @ param connId The connection ID of the first call to be merged . * @ param otherConnId The connection ID of the second call to be merged . * @ param reasons Information on causes for , and results of , actions taken by the user of the current DN . For details about reasons , refer to the [ * Genesys Events and Models Reference Manual * ] ( https : / / docs . genesys . com / Documentation / System / Current / GenEM / Reasons ) . ( optional ) * @ param extensions Media device / hardware reason codes and similar information . For details about extensions , refer to the [ * Genesys Events and Models Reference Manual * ] ( https : / / docs . genesys . com / Documentation / System / Current / GenEM / Extensions ) . ( optional ) */ public void mergeCalls ( String connId , String otherConnId , KeyValueCollection reasons , KeyValueCollection extensions ) throws WorkspaceApiException { } }
try { VoicecallsidmergeData mergeData = new VoicecallsidmergeData ( ) ; mergeData . setOtherConnId ( otherConnId ) ; mergeData . setReasons ( Util . toKVList ( reasons ) ) ; mergeData . setExtensions ( Util . toKVList ( extensions ) ) ; MergeData data = new MergeData ( ) ; data . data ( mergeData ) ; ApiSuccessResponse response = this . voiceApi . merge ( connId , data ) ; throwIfNotOk ( "mergeCalls" , response ) ; } catch ( ApiException e ) { throw new WorkspaceApiException ( "mergeCalls failed." , e ) ; }
public class PTSaxton2006 { /** * Equation 2 for 33 kPa moisture , normal density , % v * @ param slsnd Sand weight percentage by layer ( [ 0,100 ] % ) * @ param slcly Clay weight percentage by layer ( [ 0,100 ] % ) * @ param omPct Organic matter weight percentage by layer ( [ 0,100 ] % ) , ( = * SLOC * 1.72) */ public static String calcMoisture33Kpa ( String slsnd , String slcly , String omPct ) { } }
String ret ; if ( ( slsnd = checkPctVal ( slsnd ) ) == null || ( slcly = checkPctVal ( slcly ) ) == null || ( omPct = checkPctVal ( omPct ) ) == null ) { LOG . error ( "Invalid input parameters for calculating 33 kPa moisture, normal density, %v" ) ; return null ; } String mt33Fst = calcMoisture33KpaFst ( slsnd , slcly , omPct ) ; ret = sum ( product ( pow ( mt33Fst , "2" ) , "0.01283" ) , product ( mt33Fst , "0.626" ) , "-1.5" ) ; LOG . debug ( "Calculate result for 33 kPa moisture, normal density, %v is {}" , ret ) ; return ret ;
public class Collectors { /** * Note : Generally it ' s much slower than other { @ code Collectors } . * @ param maxWaitIntervalInMillis * @ param supplier * @ param streamingCollector * @ return * @ see Stream # observe ( BlockingQueue , Predicate , long ) * @ see Stream # asyncCall ( Try . Function ) */ public static < T , R > Collector < T , ? , R > streaming ( final long maxWaitIntervalInMillis , final Supplier < ? extends BlockingQueue < T > > queueSupplier , final Function < ? super Stream < T > , R > streamingCollector ) { } }
final Function < Stream < T > , ContinuableFuture < R > > streamingCollector2 = new Function < Stream < T > , ContinuableFuture < R > > ( ) { @ Override public ContinuableFuture < R > apply ( Stream < T > t ) { return t . asyncCall ( streamingCollector ) ; } } ; return streaming ( queueSupplier , streamingCollector2 , maxWaitIntervalInMillis ) ;
public class ElementsExceptionsFactory { /** * Constructs and initializes a new { @ link ParseException } with the given { @ link String message } * formatted with the given { @ link Object [ ] arguments } . * @ param message { @ link String } describing the { @ link ParseException exception } . * @ param args { @ link Object [ ] arguments } used to replace format placeholders in the { @ link String message } . * @ return a new { @ link ParseException } with the given { @ link String message } . * @ see # newParseException ( Throwable , String , Object . . . ) * @ see org . cp . elements . text . ParseException */ public static ParseException newParseException ( String message , Object ... args ) { } }
return newParseException ( null , message , args ) ;
public class AbstractQuotaPersister { /** * { @ inheritDoc } */ public void setNodeDataSizeIfQuotaExists ( String repositoryName , String workspaceName , String nodePath , long dataSize ) { } }
setNodeDataSize ( repositoryName , workspaceName , nodePath , dataSize ) ; try { getNodeQuotaOrGroupOfNodesQuota ( repositoryName , workspaceName , nodePath ) ; } catch ( UnknownQuotaLimitException e ) { removeNodeDataSize ( repositoryName , workspaceName , nodePath ) ; }
public class Formats { /** * Returns a substring of the given string , representing the ' length ' most - left characters */ public static String leftStr ( String str , int length ) { } }
return str . substring ( 0 , Math . min ( str . length ( ) , length ) ) ;
public class StringUtils { /** * < p > Checks if all of the CharSequences are empty ( " " ) , null or whitespace only . < / p > * < p > Whitespace is defined by { @ link Character # isWhitespace ( char ) } . < / p > * < pre > * StringUtils . isAllBlank ( null ) = true * StringUtils . isAllBlank ( null , " foo " ) = false * StringUtils . isAllBlank ( null , null ) = true * StringUtils . isAllBlank ( " " , " bar " ) = false * StringUtils . isAllBlank ( " bob " , " " ) = false * StringUtils . isAllBlank ( " bob " , null ) = false * StringUtils . isAllBlank ( " " , " bar " ) = false * StringUtils . isAllBlank ( " foo " , " bar " ) = false * StringUtils . isAllBlank ( new String [ ] { } ) = true * < / pre > * @ param css the CharSequences to check , may be null or empty * @ return { @ code true } if all of the CharSequences are empty or null or whitespace only * @ since 3.6 */ public static boolean isAllBlank ( final CharSequence ... css ) { } }
if ( ArrayUtils . isEmpty ( css ) ) { return true ; } for ( final CharSequence cs : css ) { if ( isNotBlank ( cs ) ) { return false ; } } return true ;
public class CcAes { /** * Create new cipher based on the valid mode from { @ link Cipher } class . * @ param mode Either Cipher . ENRYPT _ MODE or Cipher . DECRYPT _ MODE * @ param spec Param spec ( IV ) * @ return The cipher * @ throws IOException For any unexpected exceptions */ private Cipher cipher ( final int mode , final AlgorithmParameterSpec spec ) throws IOException { } }
try { final Cipher cipher = Cipher . getInstance ( "AES/CBC/PKCS5PADDING" ) ; cipher . init ( mode , this . key , spec , this . random ) ; return cipher ; } catch ( final InvalidKeyException | NoSuchAlgorithmException | InvalidAlgorithmParameterException | NoSuchPaddingException ex ) { throw new IOException ( ex ) ; }
public class XPathParser { /** * Parses the the rule ExprSingle according to the following production * rule : * [ 3 ] ExprSingle : : = ForExpr | QuantifiedExpr | IfExpr | OrExpr . * @ throws TTXPathException */ private void parseExprSingle ( ) throws TTXPathException { } }
mPipeBuilder . addExpressionSingle ( ) ; final String tContent = mToken . getContent ( ) ; if ( "for" . equals ( tContent ) ) { parseForExpr ( ) ; } else if ( "some" . equals ( tContent ) || "every" . equals ( tContent ) ) { parseQuantifiedExpr ( ) ; } else if ( "if" . equals ( tContent ) ) { parseIfExpr ( ) ; } else { parseOrExpr ( ) ; }
public class _NavigationUtils { /** * Evaluate all EL expressions found as parameters and return a map that can be used for * redirect or render bookmark links * @ param parameters parameter map retrieved from NavigationCase . getParameters ( ) * @ return */ public static Map < String , List < String > > getEvaluatedNavigationParameters ( FacesContext facesContext , Map < String , List < String > > parameters ) { } }
Map < String , List < String > > evaluatedParameters = null ; if ( parameters != null && parameters . size ( ) > 0 ) { evaluatedParameters = new HashMap < String , List < String > > ( ) ; for ( Map . Entry < String , List < String > > pair : parameters . entrySet ( ) ) { boolean containsEL = false ; for ( String value : pair . getValue ( ) ) { if ( _isExpression ( value ) ) { containsEL = true ; break ; } } if ( containsEL ) { evaluatedParameters . put ( pair . getKey ( ) , _evaluateValueExpressions ( facesContext , pair . getValue ( ) ) ) ; } else { evaluatedParameters . put ( pair . getKey ( ) , pair . getValue ( ) ) ; } } } else { evaluatedParameters = parameters ; } return evaluatedParameters ;
public class FieldDefinition { /** * Indicate if the given field name is valid . Currently , field names must begin with a * letter and consist of all letters , digits , and underscores . * @ param fieldName Candidate field name . * @ return True if the given name is valid for fields . */ public static boolean isValidFieldName ( String fieldName ) { } }
return fieldName != null && fieldName . length ( ) > 0 && Utils . isLetter ( fieldName . charAt ( 0 ) ) && Utils . allAlphaNumUnderscore ( fieldName ) ;
public class VaultDriverDataImpl { /** * @ Override * public void findAllIds ( String where , Object [ ] params , * Result < List < ID > > result ) * String sql = getSelectPkSql ( where ) ; * _ db . findAll ( sql , result . of ( iter - > readIdListFromCursor ( iter ) ) , params ) ; * @ Override * public < X > void findValueList ( String sql , * Object [ ] values , * ResultChain < List < X > > result ) * Iterable < Cursor > rows = _ db . findAll ( sql , values ) ; * List < X > list = new ArrayList < > ( ) ; * for ( Cursor row : rows ) { * list . add ( ( X ) row . getObject ( 1 ) ) ; * result . ok ( list ) ; */ @ Override public T toProxy ( Object id ) { } }
if ( id == null ) { return null ; } ServiceRef ref = _services . service ( toAddress ( id ) ) ; return ref . as ( _entityClass ) ;
public class DirectoryServiceClient { /** * Get the changed services list . * @ param services * the Service list . * @ return * the list of Services that have been changed . * @ throws ServiceException */ public Map < String , OperationResult < ModelService > > getChangedServices ( Map < String , ModelService > services ) { } }
String body = _serialize ( services ) ; HttpResponse result = invoker . invoke ( "/service/changing" , body , HttpMethod . POST ) ; if ( result . getHttpCode ( ) != HTTP_OK ) { throw new ServiceException ( ErrorCode . REMOTE_DIRECTORY_SERVER_ERROR , "HTTP Code is not OK, code=%s" , result . getHttpCode ( ) ) ; } Map < String , OperationResult < ModelService > > changedServices = _deserialize ( result . getRetBody ( ) , new TypeReference < Map < String , OperationResult < ModelService > > > ( ) { } ) ; return changedServices ;
public class AccountACL { /** * Set if a player can deposit money in the account * @ param name The Player name * @ param deposit Can deposit or not */ public void setDeposit ( String name , boolean deposit ) { } }
String newName = name . toLowerCase ( ) ; if ( aclList . containsKey ( newName ) ) { AccountACLValue value = aclList . get ( newName ) ; set ( newName , deposit , value . canWithdraw ( ) , value . canAcl ( ) , value . canBalance ( ) , value . isOwner ( ) ) ; } else { set ( newName , deposit , false , false , false , false ) ; }
public class SqlFragmentContainer { /** * builds the text of the prepared statement * @ param context A ControlBeanContext instance . * @ param m The annotated method . * @ param args The method ' s parameters . * @ return The PreparedStatement text generated by this fragment and its children . */ String getPreparedStatementText ( ControlBeanContext context , Method m , Object [ ] args ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( SqlFragment sf : _children ) { sb . append ( sf . getPreparedStatementText ( context , m , args ) ) ; } return sb . toString ( ) ;
public class ListBrokersRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListBrokersRequest listBrokersRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listBrokersRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listBrokersRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listBrokersRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SummarizedAttackVector { /** * The list of counters that describe the details of the attack . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setVectorCounters ( java . util . Collection ) } or { @ link # withVectorCounters ( java . util . Collection ) } if you want * to override the existing values . * @ param vectorCounters * The list of counters that describe the details of the attack . * @ return Returns a reference to this object so that method calls can be chained together . */ public SummarizedAttackVector withVectorCounters ( SummarizedCounter ... vectorCounters ) { } }
if ( this . vectorCounters == null ) { setVectorCounters ( new java . util . ArrayList < SummarizedCounter > ( vectorCounters . length ) ) ; } for ( SummarizedCounter ele : vectorCounters ) { this . vectorCounters . add ( ele ) ; } return this ;
public class PropertiesConfigured { /** * Configures with a list of properties files , which could be on either the * file system ( file : location ) or the class path ( classpath : location ) . */ public void setPropertiesLocations ( List < String > locations ) { } }
Properties properties = null ; PathMatchingResourcePatternResolver resolver = new PathMatchingResourcePatternResolver ( ) ; for ( String location : locations ) { properties = load ( properties , resolver , location ) ; } if ( properties != null && properties . size ( ) > 0 ) { configure ( properties ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MassPointReliefType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link MassPointReliefType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/citygml/relief/2.0" , name = "MassPointRelief" , substitutionHeadNamespace = "http://www.opengis.net/citygml/relief/2.0" , substitutionHeadName = "_ReliefComponent" ) public JAXBElement < MassPointReliefType > createMassPointRelief ( MassPointReliefType value ) { } }
return new JAXBElement < MassPointReliefType > ( _MassPointRelief_QNAME , MassPointReliefType . class , null , value ) ;
public class CollationIterator { /** * Turns a string of digits ( bytes 0 . . 9) * into a sequence of CEs that will sort in numeric order . * Starts from this ce32 ' s digit value and consumes the following / preceding digits . * The digits string must not be empty and must not have leading zeros . */ private final void appendNumericCEs ( int ce32 , boolean forward ) { } }
// Collect digits . // TODO : Use some kind of a byte buffer ? We only store values 0 . . 9. StringBuilder digits = new StringBuilder ( ) ; if ( forward ) { for ( ; ; ) { char digit = Collation . digitFromCE32 ( ce32 ) ; digits . append ( digit ) ; if ( numCpFwd == 0 ) { break ; } int c = nextCodePoint ( ) ; if ( c < 0 ) { break ; } ce32 = data . getCE32 ( c ) ; if ( ce32 == Collation . FALLBACK_CE32 ) { ce32 = data . base . getCE32 ( c ) ; } if ( ! Collation . hasCE32Tag ( ce32 , Collation . DIGIT_TAG ) ) { backwardNumCodePoints ( 1 ) ; break ; } if ( numCpFwd > 0 ) { -- numCpFwd ; } } } else { for ( ; ; ) { char digit = Collation . digitFromCE32 ( ce32 ) ; digits . append ( digit ) ; int c = previousCodePoint ( ) ; if ( c < 0 ) { break ; } ce32 = data . getCE32 ( c ) ; if ( ce32 == Collation . FALLBACK_CE32 ) { ce32 = data . base . getCE32 ( c ) ; } if ( ! Collation . hasCE32Tag ( ce32 , Collation . DIGIT_TAG ) ) { forwardNumCodePoints ( 1 ) ; break ; } } // Reverse the digit string . digits . reverse ( ) ; } int pos = 0 ; do { // Skip leading zeros . while ( pos < ( digits . length ( ) - 1 ) && digits . charAt ( pos ) == 0 ) { ++ pos ; } // Write a sequence of CEs for at most 254 digits at a time . int segmentLength = digits . length ( ) - pos ; if ( segmentLength > 254 ) { segmentLength = 254 ; } appendNumericSegmentCEs ( digits . subSequence ( pos , pos + segmentLength ) ) ; pos += segmentLength ; } while ( pos < digits . length ( ) ) ;
public class BdbNativeBackup { /** * Copies the jdb log files , with additional verification of the checksums . * @ param sourceFile * @ param destFile * @ throws IOException */ private void verifiedCopyFile ( File sourceFile , File destFile ) throws IOException { } }
if ( ! destFile . exists ( ) ) { destFile . createNewFile ( ) ; } FileInputStream source = null ; FileOutputStream destination = null ; LogVerificationInputStream verifyStream = null ; try { source = new FileInputStream ( sourceFile ) ; destination = new FileOutputStream ( destFile ) ; verifyStream = new LogVerificationInputStream ( env , source , sourceFile . getName ( ) ) ; final byte [ ] buf = new byte [ LOGVERIFY_BUFSIZE ] ; while ( true ) { final int len = verifyStream . read ( buf ) ; if ( len < 0 ) { break ; } destination . write ( buf , 0 , len ) ; } } finally { if ( verifyStream != null ) { verifyStream . close ( ) ; } if ( destination != null ) { destination . close ( ) ; } }
public class CommerceCurrencyPersistenceImpl { /** * Removes all the commerce currencies where groupId = & # 63 ; and primary = & # 63 ; and active = & # 63 ; from the database . * @ param groupId the group ID * @ param primary the primary * @ param active the active */ @ Override public void removeByG_P_A ( long groupId , boolean primary , boolean active ) { } }
for ( CommerceCurrency commerceCurrency : findByG_P_A ( groupId , primary , active , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( commerceCurrency ) ; }
public class SerialParameters { /** * Sets the number of stop bits from the given < tt > String < / tt > . * @ param stopbits the number of stop bits as < tt > String < / tt > . */ public void setStopbits ( String stopbits ) { } }
if ( ModbusUtil . isBlank ( stopbits ) || stopbits . equals ( "1" ) ) { this . stopbits = AbstractSerialConnection . ONE_STOP_BIT ; } else if ( stopbits . equals ( "1.5" ) ) { this . stopbits = AbstractSerialConnection . ONE_POINT_FIVE_STOP_BITS ; } else if ( stopbits . equals ( "2" ) ) { this . stopbits = AbstractSerialConnection . TWO_STOP_BITS ; }
public class RxFile { /** * Get a thumbnail from the provided Image or Video Uri in the specified size and kind . * Kind is a value of MediaStore . Images . Thumbnails . MICRO _ KIND or MediaStore . Images . Thumbnails . MINI _ KIND */ private static Observable < Bitmap > getThumbnailFromUriWithSizeAndKind ( final Context context , final Uri data , final int requiredWidth , final int requiredHeight , final int kind ) { } }
return Observable . fromCallable ( new Func0 < Bitmap > ( ) { @ Override public Bitmap call ( ) { Bitmap bitmap = null ; ParcelFileDescriptor parcelFileDescriptor ; final BitmapFactory . Options options = new BitmapFactory . Options ( ) ; if ( requiredWidth > 0 && requiredHeight > 0 ) { options . inJustDecodeBounds = true ; options . inSampleSize = calculateInSampleSize ( options , requiredWidth , requiredHeight ) ; options . inJustDecodeBounds = false ; } if ( ! isMediaUri ( data ) ) { logDebug ( "Not a media uri:" + data ) ; if ( isGoogleDriveDocument ( data ) ) { logDebug ( "Google Drive Uri:" + data ) ; DocumentFile file = DocumentFile . fromSingleUri ( context , data ) ; if ( file . getType ( ) . startsWith ( Constants . IMAGE_TYPE ) || file . getType ( ) . startsWith ( Constants . VIDEO_TYPE ) ) { logDebug ( "Google Drive Uri:" + data + " (Video or Image)" ) ; try { parcelFileDescriptor = context . getContentResolver ( ) . openFileDescriptor ( data , Constants . READ_MODE ) ; FileDescriptor fileDescriptor = parcelFileDescriptor . getFileDescriptor ( ) ; bitmap = BitmapFactory . decodeFileDescriptor ( fileDescriptor , null , options ) ; parcelFileDescriptor . close ( ) ; return bitmap ; } catch ( IOException e ) { logError ( e ) ; } } } else if ( data . getScheme ( ) . equals ( Constants . FILE ) ) { logDebug ( "Dropbox or other DocumentsProvider Uri:" + data ) ; try { parcelFileDescriptor = context . getContentResolver ( ) . openFileDescriptor ( data , Constants . READ_MODE ) ; FileDescriptor fileDescriptor = parcelFileDescriptor . getFileDescriptor ( ) ; bitmap = BitmapFactory . decodeFileDescriptor ( fileDescriptor , null , options ) ; parcelFileDescriptor . close ( ) ; return bitmap ; } catch ( IOException e ) { logError ( e ) ; } } else { try { parcelFileDescriptor = context . getContentResolver ( ) . openFileDescriptor ( data , Constants . READ_MODE ) ; FileDescriptor fileDescriptor = parcelFileDescriptor . getFileDescriptor ( ) ; bitmap = BitmapFactory . decodeFileDescriptor ( fileDescriptor , null , options ) ; parcelFileDescriptor . close ( ) ; return bitmap ; } catch ( IOException e ) { logError ( e ) ; } } } else { logDebug ( "Uri for thumbnail:" + data ) ; String [ ] parts = data . getLastPathSegment ( ) . split ( ":" ) ; String fileId = parts [ 1 ] ; Cursor cursor = null ; try { cursor = context . getContentResolver ( ) . query ( data , null , null , null , null ) ; if ( cursor != null ) { logDebug ( "Cursor size:" + cursor . getCount ( ) ) ; if ( cursor . moveToFirst ( ) ) { if ( data . toString ( ) . contains ( Constants . VIDEO ) ) { bitmap = MediaStore . Video . Thumbnails . getThumbnail ( context . getContentResolver ( ) , Long . parseLong ( fileId ) , kind , options ) ; } else if ( data . toString ( ) . contains ( Constants . IMAGE ) ) { bitmap = MediaStore . Images . Thumbnails . getThumbnail ( context . getContentResolver ( ) , Long . parseLong ( fileId ) , kind , options ) ; } } } return bitmap ; } catch ( Exception e ) { logError ( e ) ; } finally { if ( cursor != null ) cursor . close ( ) ; } } return bitmap ; } } ) ;
public class AbstractProxyFactory { /** * Creates a new indirection handler instance . * @ param brokerKey The associated { @ link PBKey } . * @ param id The subject ' s ids * @ return The new instance */ public IndirectionHandler createIndirectionHandler ( PBKey brokerKey , Identity id ) { } }
Object args [ ] = { brokerKey , id } ; try { return ( IndirectionHandler ) getIndirectionHandlerConstructor ( ) . newInstance ( args ) ; } catch ( InvocationTargetException ex ) { throw new PersistenceBrokerException ( "Exception while creating a new indirection handler instance" , ex ) ; } catch ( InstantiationException ex ) { throw new PersistenceBrokerException ( "Exception while creating a new indirection handler instance" , ex ) ; } catch ( IllegalAccessException ex ) { throw new PersistenceBrokerException ( "Exception while creating a new indirection handler instance" , ex ) ; }
public class Object2ByteArrFieldConversion { /** * @ see FieldConversion # javaToSql ( Object ) */ public Object javaToSql ( Object source ) { } }
if ( source == null ) return null ; try { ByteArrayOutputStream bao = new ByteArrayOutputStream ( ) ; GZIPOutputStream gos = new GZIPOutputStream ( bao ) ; ObjectOutputStream oos = new ObjectOutputStream ( gos ) ; oos . writeObject ( source ) ; oos . close ( ) ; gos . close ( ) ; bao . close ( ) ; byte [ ] result = bao . toByteArray ( ) ; return result ; } catch ( Throwable t ) { throw new ConversionException ( t ) ; }
public class CommonOps_DDRM { /** * Returns the absolute value of the element in the matrix that has the largest absolute value . < br > * < br > * Max { | a < sub > ij < / sub > | } for all i and j < br > * @ param a A matrix . Not modified . * @ return The max abs element value of the matrix . */ public static double elementMaxAbs ( DMatrixD1 a ) { } }
final int size = a . getNumElements ( ) ; double max = 0 ; for ( int i = 0 ; i < size ; i ++ ) { double val = Math . abs ( a . get ( i ) ) ; if ( val > max ) { max = val ; } } return max ;
public class CudaArgs { /** * Returns number of SMs , based on device compute capability and number of processors . * @ param ccMajor * @ param ccMinor * @ return */ public static int convertMPtoCores ( int ccMajor , int ccMinor , int numberOfProcessors ) { } }
// Defines for GPU Architecture types ( using the SM version to determine the # of cores per SM if ( ccMajor == 1 ) return 8 ; if ( ccMajor == 2 && ccMinor == 1 ) return 48 ; if ( ccMajor == 2 ) return 32 ; if ( ccMajor == 3 ) return 192 ; if ( ccMajor == 5 ) return 128 ; // return negative number if device is unknown return - 1 ;
public class TransactionToDispatchableMap { /** * Removes the association between a global transaction and a dispatchable . * This removes the affinity between ( SIXAResource , XID ) pair an dispatchable from * the table . It is anticipated that the unit of work identified by the resource + id * pair will not be enlisted at the time it is removed ( as this is an error - dispatchables * should be removed when the resource is not enlisted for the XID ) . If there is no * outstanding in - flight XIDs for the resource - it is removed completely from the * table . * @ param clientId the client transaction identifier that specifies the SIXAResource that * the unit of work was being carried out under . * @ return the removed dispatchable - or null if no dispatchable matching the resource and * XID parameters was present in the table . */ public Dispatchable removeDispatchableForGlobalTransaction ( int clientId , XidProxy xid ) { } }
if ( tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "removeDispatchableForGlobalTransaction" , new Object [ ] { "" + clientId } ) ; AbstractFirstLevelMapEntry firstLevelEntry = null ; if ( idToFirstLevelEntryMap . containsKey ( clientId ) ) { firstLevelEntry = ( AbstractFirstLevelMapEntry ) idToFirstLevelEntryMap . get ( clientId ) ; } final Dispatchable result ; if ( firstLevelEntry == null ) { result = null ; } else { if ( ! firstLevelEntry . isLocalTransaction ( ) ) { GlobalFirstLevelMapEntry globalEntry = ( GlobalFirstLevelMapEntry ) firstLevelEntry ; result = globalEntry . removeDispatchable ( xid ) ; if ( globalEntry . isEmpty ( ) ) idToFirstLevelEntryMap . remove ( clientId ) ; } else { final SIErrorException exception = new SIErrorException ( CommsConstants . TRANTODISPATCHMAP_REMOVEFORGLOBALTX_01 ) ; FFDCFilter . processException ( exception , CLASS_NAME + ".removeDispatchableForGlobalTransaction" , CommsConstants . TRANTODISPATCHMAP_REMOVEFORGLOBALTX_01 , new Object [ ] { "" + clientId , firstLevelEntry , idToFirstLevelEntryMap , this } ) ; if ( tc . isEventEnabled ( ) ) SibTr . exception ( this , tc , exception ) ; throw exception ; } } if ( tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "removeDispatchableForGlobalTransaction" , result ) ; return result ;
public class TimeExpression { /** * Create a milliseconds expression ( range 0-999) * < p > Is always 0 in JPA and JDO modules < / p > * @ return milli second */ public NumberExpression < Integer > milliSecond ( ) { } }
if ( milliseconds == null ) { milliseconds = Expressions . numberOperation ( Integer . class , Ops . DateTimeOps . MILLISECOND , mixin ) ; } return milliseconds ;
public class StreamletUtils { /** * Converts a list of integers into a comma - separated string . */ public static String intListAsString ( List < Integer > ls ) { } }
return String . join ( ", " , ls . stream ( ) . map ( i -> i . toString ( ) ) . collect ( Collectors . toList ( ) ) ) ;
public class CommonsHttpInvokerRequestExecutor { /** * Set the connection timeout for the underlying HttpClient . * A timeout value of 0 specifies an infinite timeout . * @ param timeout the timeout value in milliseconds * @ see org . apache . commons . httpclient . params . HttpConnectionManagerParams # setConnectionTimeout ( int ) */ public void setConnectTimeout ( int timeout ) { } }
Assert . isTrue ( timeout >= 0 , "Timeout must be a non-negative value" ) ; this . httpClient . getHttpConnectionManager ( ) . getParams ( ) . setConnectionTimeout ( timeout ) ;
public class UrlResource { /** * This implementation creates a UrlResource , applying the given path * relative to the path of the underlying URL of this resource descriptor . * @ see java . net . URL # URL ( java . net . URL , String ) */ @ Override public Resource createRelative ( String relativePath ) throws MalformedURLException { } }
if ( relativePath . startsWith ( "/" ) ) { relativePath = relativePath . substring ( 1 ) ; } return new UrlResource ( new URL ( this . url , relativePath ) ) ;
public class PoolManager { /** * Do we need to reclaim connections . */ private boolean needToReclaimConnections ( ) { } }
boolean removemcw = false ; for ( int j = 0 ; j < maxFreePoolHashSize ; ++ j ) { synchronized ( freePool [ j ] . freeConnectionLockObject ) { int localtotalConnectionCount = totalConnectionCount . get ( ) ; int mcwlSize = freePool [ j ] . mcWrapperList . size ( ) ; for ( int k = 0 ; k < mcwlSize ; ++ k ) { MCWrapper mcw = ( MCWrapper ) freePool [ j ] . mcWrapperList . get ( k ) ; if ( agedTimeout != - 1 ) { if ( mcw . hasAgedTimedOut ( agedTimeoutMillis ) ) { removemcw = true ; break ; } } if ( ! removemcw && unusedTimeout != - 1 ) { if ( mcw . hasIdleTimedOut ( unusedTimeout * 1000 ) && ( localtotalConnectionCount > minConnections ) ) { removemcw = true ; break ; } } } } if ( removemcw ) { break ; } } return removemcw ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertFNCResYUBaseToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class SessionDatasource { /** * load an new instance of the client datasource scope * @ param datasourceName * @ param appName * @ param pc * @ param checkExpires * @ return client datasource scope * @ throws PageException */ public static Session getInstance ( String datasourceName , PageContext pc , Log log ) throws PageException { } }
Struct _sct = _loadData ( pc , datasourceName , "session" , SCOPE_SESSION , log , false ) ; if ( _sct == null ) _sct = new StructImpl ( ) ; return new SessionDatasource ( pc , datasourceName , _sct ) ;
public class LocalServerReceiver { /** * Blocks until the server receives a login result , or the server is stopped * by { @ link # stop ( ) } , to return an authorization code . * @ return authorization code if login succeeds ; may return { @ code null } if the server * is stopped by { @ link # stop ( ) } * @ throws IOException if the server receives an error code ( through an HTTP request * parameter { @ code error } ) */ @ Override public String waitForCode ( ) throws IOException { } }
waitUnlessSignaled . acquireUninterruptibly ( ) ; if ( error != null ) { throw new IOException ( "User authorization failed (" + error + ")" ) ; } return code ;
public class TableWorks { /** * Creates a foreign key on an existing table . Foreign keys are enforced by * indexes on both the referencing ( child ) and referenced ( main ) tables . * < p > Since version 1.7.2 , a unique constraint on the referenced columns * must exist . The non - unique index on the referencing table is now always * created whether or not a PK or unique constraint index on the columns * exist . Foriegn keys on temp tables can reference other temp tables with * the same rules above . Foreign keys on permanent tables cannot reference * temp tables . Duplicate foreign keys are now disallowed . * @ param c the constraint object */ void addForeignKey ( Constraint c ) { } }
checkCreateForeignKey ( c ) ; Constraint uniqueConstraint = c . core . mainTable . getUniqueConstraintForColumns ( c . core . mainCols , c . core . refCols ) ; Index mainIndex = uniqueConstraint . getMainIndex ( ) ; uniqueConstraint . checkReferencedRows ( session , table , c . core . refCols ) ; int offset = database . schemaManager . getTableIndex ( table ) ; boolean isForward = c . core . mainTable . getSchemaName ( ) != table . getSchemaName ( ) ; if ( offset != - 1 && offset < database . schemaManager . getTableIndex ( c . core . mainTable ) ) { isForward = true ; } HsqlName indexName = database . nameManager . newAutoName ( "IDX" , table . getSchemaName ( ) , table . getName ( ) , SchemaObject . INDEX ) ; Index refIndex = table . createIndexStructure ( indexName , c . core . refCols , null , null , false , false , true , isForward ) ; HsqlName mainName = database . nameManager . newAutoName ( "REF" , c . getName ( ) . name , table . getSchemaName ( ) , table . getName ( ) , SchemaObject . INDEX ) ; c . core . uniqueName = uniqueConstraint . getName ( ) ; c . core . mainName = mainName ; c . core . mainIndex = mainIndex ; c . core . refTable = table ; c . core . refName = c . getName ( ) ; c . core . refIndex = refIndex ; c . isForward = isForward ; Table tn = table . moveDefinition ( session , table . tableType , null , c , refIndex , - 1 , 0 , emptySet , emptySet ) ; tn . moveData ( session , table , - 1 , 0 ) ; c . core . mainTable . addConstraint ( new Constraint ( mainName , c ) ) ; database . schemaManager . addSchemaObject ( c ) ; database . persistentStoreCollection . releaseStore ( table ) ; setNewTableInSchema ( tn ) ; updateConstraints ( tn , emptySet ) ; database . schemaManager . recompileDependentObjects ( tn ) ; table = tn ;
public class ExecutionGraph { /** * This method is a callback during cancellation / failover and called when all tasks * have reached a terminal state ( cancelled / failed / finished ) . */ private void allVerticesInTerminalState ( long expectedGlobalVersionForRestart ) { } }
assertRunningInJobMasterMainThread ( ) ; // we are done , transition to the final state JobStatus current ; while ( true ) { current = this . state ; if ( current == JobStatus . RUNNING ) { failGlobal ( new Exception ( "ExecutionGraph went into allVerticesInTerminalState() from RUNNING" ) ) ; } else if ( current == JobStatus . CANCELLING ) { if ( transitionState ( current , JobStatus . CANCELED ) ) { onTerminalState ( JobStatus . CANCELED ) ; break ; } } else if ( current == JobStatus . FAILING ) { if ( tryRestartOrFail ( expectedGlobalVersionForRestart ) ) { break ; } // concurrent job status change , let ' s check again } else if ( current . isGloballyTerminalState ( ) ) { LOG . warn ( "Job has entered globally terminal state without waiting for all " + "job vertices to reach final state." ) ; break ; } else { failGlobal ( new Exception ( "ExecutionGraph went into final state from state " + current ) ) ; break ; } } // done transitioning the state
public class MicrochipPotentiometerDeviceController { /** * Fetches the terminal - configuration from the device for a certain channel . * @ param channel The channel * @ return The current terminal - configuration * @ throws IOException Thrown if communication fails or device returned a malformed result */ public DeviceControllerTerminalConfiguration getTerminalConfiguration ( final DeviceControllerChannel channel ) throws IOException { } }
if ( channel == null ) { throw new RuntimeException ( "null-channel is not allowed. For devices " + "knowing just one wiper Channel.A is mandatory for " + "parameter 'channel'" ) ; } // read configuration from device int tcon = read ( channel . getTerminalControllAddress ( ) ) ; // build result boolean channelEnabled = ( tcon & channel . getHardwareConfigControlBit ( ) ) > 0 ; boolean pinAEnabled = ( tcon & channel . getTerminalAConnectControlBit ( ) ) > 0 ; boolean pinWEnabled = ( tcon & channel . getWiperConnectControlBit ( ) ) > 0 ; boolean pinBEnabled = ( tcon & channel . getTerminalBConnectControlBit ( ) ) > 0 ; return new DeviceControllerTerminalConfiguration ( channel , channelEnabled , pinAEnabled , pinWEnabled , pinBEnabled ) ;