signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class TreeMap { /** * Determines if the tree is empty as viewed by the transaction . * Returns true if there are no entries visible to the transaction and false * if there are entries visible . * @ param transaction the transaction which sees the tree as empty . * @ return true if no entries are visible , false if there are entries visible . * @ exception ObjectManagerException */ public synchronized boolean isEmpty ( Transaction transaction ) throws ObjectManagerException { } }
if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "isEmpty" , new Object [ ] { transaction } ) ; boolean returnValue ; if ( firstEntry ( transaction ) == null ) { returnValue = true ; } else { returnValue = false ; } if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "isEmpty" , new Object [ ] { new Boolean ( returnValue ) } ) ; return returnValue ;
public class Constraint { /** * Checks for foreign key or check constraint violation when * inserting a row into the child table . */ void checkInsert ( Session session , Table table , Object [ ] row ) { } }
switch ( constType ) { case CHECK : if ( ! isNotNull ) { checkCheckConstraint ( session , table , row ) ; } return ; case FOREIGN_KEY : PersistentStore store = session . sessionData . getRowStore ( core . mainTable ) ; if ( ArrayUtil . hasNull ( row , core . refCols ) ) { if ( core . matchType == OpTypes . MATCH_SIMPLE ) { return ; } if ( core . refCols . length == 1 ) { return ; } if ( ArrayUtil . hasAllNull ( row , core . refCols ) ) { return ; } // core . matchType = = OpTypes . MATCH _ FULL } else if ( core . mainIndex . exists ( session , store , row , core . refCols ) ) { return ; } else if ( core . mainTable == core . refTable ) { // special case : self referencing table and self referencing row int compare = core . mainIndex . compareRowNonUnique ( row , core . refCols , row ) ; if ( compare == 0 ) { return ; } } String [ ] info = new String [ ] { core . refName . name , core . mainTable . getName ( ) . name } ; throw Error . error ( ErrorCode . X_23502 , ErrorCode . CONSTRAINT , info ) ; }
public class AbstractValidate { /** * Validate that the specified primitive value falls between the two exclusive values specified ; otherwise , throws an exception . * < pre > Validate . exclusiveBetween ( 0 , 2 , 1 ) ; < / pre > * @ param start * the exclusive start value * @ param end * the exclusive end value * @ param value * the value to validate * @ return the value * @ throws IllegalArgumentValidationException * if the value falls out of the boundaries */ public long exclusiveBetween ( long start , long end , long value ) { } }
if ( value <= start || value >= end ) { fail ( String . format ( DEFAULT_EXCLUSIVE_BETWEEN_EX_MESSAGE , value , start , end ) ) ; } return value ;
public class UploadManager { /** * 同步上传文件 。 使用 form 表单方式上传 , 建议只在文件较小情况下使用此方式 , 如 file . size ( ) < 1024 * 1024。 * @ param file 上传的文件绝对路径 * @ param key 上传数据保存的文件名 * @ param token 上传凭证 * @ param options 上传数据的可选参数 * @ return 响应信息 ResponseInfo # response 响应体 , 序列化后 json 格式 */ public ResponseInfo syncPut ( String file , String key , String token , UploadOptions options ) { } }
return syncPut ( new File ( file ) , key , token , options ) ;
public class Dispatching { /** * Adapts a function to a binary function by ignoring the first parameter . * @ param < T1 > the adapted function first parameter type * @ param < T2 > the adapted function second parameter type * @ param < R > the adapted function result type * @ param function the function to be adapted * @ param ignored the adapted function ignored parameter type class * @ return the adapted function */ public static < T1 , T2 , R > BiFunction < T1 , T2 , R > ignore1st ( Function < T2 , R > function , Class < T1 > ignored ) { } }
dbc . precondition ( function != null , "cannot ignore parameter of a null function" ) ; return ( first , second ) -> function . apply ( second ) ;
public class LifeCycleHelper { /** * Assigns / injects { @ link Provided } property values to a component . * @ param descriptor * @ param component */ public void assignProvidedProperties ( ComponentDescriptor < ? > descriptor , Object component ) { } }
AssignProvidedCallback callback = new AssignProvidedCallback ( _injectionManager ) ; callback . onEvent ( component , descriptor ) ;
public class ExpressionUtil { /** * Substitutes dynamic values for expressions in the input string . * @ param input raw input string * @ param model object containing the values to substitute * @ param map of images to populate based on special $ { image : * . gif } syntax * @ return string with values substituted */ public static String substitute ( String input , Object model , Map < String , String > imageMap , boolean lenient ) throws MdwException { } }
StringBuffer substituted = new StringBuffer ( input . length ( ) ) ; try { Matcher matcher = tokenPattern . matcher ( input ) ; int index = 0 ; while ( matcher . find ( ) ) { String match = matcher . group ( ) ; substituted . append ( input . substring ( index , matcher . start ( ) ) ) ; if ( imageMap != null && ( match . startsWith ( "${image:" ) || match . startsWith ( "#{image:" ) ) ) { String imageFile = match . substring ( 8 , match . length ( ) - 1 ) ; String imageId = imageFile . substring ( 0 , imageFile . lastIndexOf ( '.' ) ) ; substituted . append ( "cid:" + imageId ) ; imageMap . put ( imageId , imageFile ) ; } else if ( match . startsWith ( "#{" ) ) { // ignore # { . . . in favor of facelets ( except images ) substituted . append ( match ) ; } else { Object value ; if ( lenient ) { try { value = propUtilsBean . getProperty ( model , match . substring ( 2 , match . length ( ) - 1 ) ) ; if ( value == null ) value = match ; } catch ( Exception e ) { value = match ; } } else { value = propUtilsBean . getProperty ( model , match . substring ( 2 , match . length ( ) - 1 ) ) ; } if ( value != null ) substituted . append ( value ) ; } index = matcher . end ( ) ; } substituted . append ( input . substring ( index ) ) ; return substituted . toString ( ) ; } catch ( Exception ex ) { throw new MdwException ( "Error substituting expression value(s)" , ex ) ; }
public class AndroidResourceBitmap { /** * clearResourceBitmaps is called */ public static void clearResourceBitmaps ( ) { } }
if ( ! AndroidGraphicFactory . KEEP_RESOURCE_BITMAPS ) { return ; } synchronized ( RESOURCE_BITMAPS ) { for ( Pair < android . graphics . Bitmap , Integer > p : RESOURCE_BITMAPS . values ( ) ) { p . first . recycle ( ) ; if ( AndroidGraphicFactory . DEBUG_BITMAPS ) { rInstances . decrementAndGet ( ) ; } } if ( AndroidGraphicFactory . DEBUG_BITMAPS ) { rBitmaps . clear ( ) ; } RESOURCE_BITMAPS . clear ( ) ; }
public class DatabaseInformationFull { /** * The DOMAINS view has one row for each domain . < p > * < pre class = " SqlCodeExample " > * < / pre > * @ return Table */ Table DOMAINS ( ) { } }
Table t = sysTables [ DOMAINS ] ; if ( t == null ) { t = createBlankTable ( sysTableHsqlNames [ DOMAINS ] ) ; addColumn ( t , "DOMAIN_CATALOG" , SQL_IDENTIFIER ) ; addColumn ( t , "DOMAIN_SCHEMA" , SQL_IDENTIFIER ) ; addColumn ( t , "DOMAIN_NAME" , SQL_IDENTIFIER ) ; addColumn ( t , "DATA_TYPE" , SQL_IDENTIFIER ) ; addColumn ( t , "CHARACTER_MAXIMUM_LENGTH" , CARDINAL_NUMBER ) ; addColumn ( t , "CHARACTER_OCTET_LENGTH" , CARDINAL_NUMBER ) ; addColumn ( t , "CHARACTER_SET_CATALOG" , SQL_IDENTIFIER ) ; addColumn ( t , "CHARACTER_SET_SCHEMA" , SQL_IDENTIFIER ) ; addColumn ( t , "CHARACTER_SET_NAME" , SQL_IDENTIFIER ) ; addColumn ( t , "COLLATION_CATALOG" , SQL_IDENTIFIER ) ; addColumn ( t , "COLLATION_SCHEMA" , SQL_IDENTIFIER ) ; addColumn ( t , "COLLATION_NAME" , SQL_IDENTIFIER ) ; addColumn ( t , "NUMERIC_PRECISION" , CARDINAL_NUMBER ) ; addColumn ( t , "NUMERIC_PRECISION_RADIX" , CARDINAL_NUMBER ) ; addColumn ( t , "NUMERIC_SCALE" , CARDINAL_NUMBER ) ; addColumn ( t , "DATETIME_PRECISION" , CARDINAL_NUMBER ) ; addColumn ( t , "INTERVAL_TYPE" , CHARACTER_DATA ) ; addColumn ( t , "INTERVAL_PRECISION" , CARDINAL_NUMBER ) ; addColumn ( t , "DOMAIN_DEFAULT" , CHARACTER_DATA ) ; addColumn ( t , "MAXIMUM_CARDINALITY" , SQL_IDENTIFIER ) ; addColumn ( t , "DTD_IDENTIFIER" , SQL_IDENTIFIER ) ; addColumn ( t , "DECLARED_DATA_TYPE" , CHARACTER_DATA ) ; addColumn ( t , "DECLARED_NUMERIC_PRECISION" , CARDINAL_NUMBER ) ; addColumn ( t , "DECLARED_NUMERIC_SCLAE" , CARDINAL_NUMBER ) ; HsqlName name = HsqlNameManager . newInfoSchemaObjectName ( sysTableHsqlNames [ DOMAINS ] . name , false , SchemaObject . INDEX ) ; t . createPrimaryKey ( name , new int [ ] { 0 , 1 , 2 , 4 , 5 , 6 } , false ) ; return t ; } final int domain_catalog = 0 ; final int domain_schema = 1 ; final int domain_name = 2 ; final int data_type = 3 ; final int character_maximum_length = 4 ; final int character_octet_length = 5 ; final int character_set_catalog = 6 ; final int character_set_schema = 7 ; final int character_set_name = 8 ; final int collation_catalog = 9 ; final int collation_schema = 10 ; final int collation_name = 11 ; final int numeric_precision = 12 ; final int numeric_precision_radix = 13 ; final int numeric_scale = 14 ; final int datetime_precision = 15 ; final int interval_type = 16 ; final int interval_precision = 17 ; final int domain_default = 18 ; final int maximum_cardinality = 19 ; final int dtd_identifier = 20 ; final int declared_data_type = 21 ; final int declared_numeric_precision = 22 ; final int declared_numeric_scale = 23 ; PersistentStore store = database . persistentStoreCollection . getStore ( t ) ; Iterator it = database . schemaManager . databaseObjectIterator ( SchemaObject . DOMAIN ) ; while ( it . hasNext ( ) ) { Type domain = ( Type ) it . next ( ) ; if ( ! domain . isDomainType ( ) ) { continue ; } if ( ! session . getGrantee ( ) . isAccessible ( domain ) ) { continue ; } Object [ ] data = t . getEmptyRowData ( ) ; data [ domain_catalog ] = database . getCatalogName ( ) . name ; data [ domain_schema ] = domain . getSchemaName ( ) . name ; data [ domain_name ] = domain . getName ( ) . name ; data [ data_type ] = domain . getFullNameString ( ) ; if ( domain . isCharacterType ( ) ) { data [ character_maximum_length ] = ValuePool . getLong ( domain . precision ) ; data [ character_octet_length ] = ValuePool . getLong ( domain . precision * 2 ) ; data [ character_set_catalog ] = database . getCatalogName ( ) . name ; data [ character_set_schema ] = ( ( CharacterType ) domain ) . getCharacterSet ( ) . getSchemaName ( ) . name ; data [ character_set_name ] = ( ( CharacterType ) domain ) . getCharacterSet ( ) . getName ( ) . name ; data [ collation_catalog ] = database . getCatalogName ( ) . name ; data [ collation_schema ] = ( ( CharacterType ) domain ) . getCollation ( ) . getSchemaName ( ) . name ; data [ collation_name ] = ( ( CharacterType ) domain ) . getCollation ( ) . getName ( ) . name ; } else if ( domain . isNumberType ( ) ) { data [ numeric_precision ] = ValuePool . getLong ( ( ( NumberType ) domain ) . getPrecision ( ) ) ; data [ declared_numeric_precision ] = data [ numeric_precision ] ; if ( domain . typeCode != Types . SQL_DOUBLE ) { data [ numeric_scale ] = ValuePool . getLong ( domain . scale ) ; data [ declared_numeric_scale ] = data [ numeric_scale ] ; } data [ numeric_precision_radix ] = ValuePool . getLong ( ( ( NumberType ) domain ) . getPrecisionRadix ( ) ) ; } else if ( domain . isBooleanType ( ) ) { } else if ( domain . isDateTimeType ( ) ) { data [ datetime_precision ] = ValuePool . getLong ( domain . scale ) ; } else if ( domain . isIntervalType ( ) ) { data [ interval_precision ] = ValuePool . getLong ( domain . precision ) ; data [ interval_type ] = domain . getFullNameString ( ) ; data [ datetime_precision ] = ValuePool . getLong ( domain . scale ) ; } else if ( domain . isBinaryType ( ) ) { data [ character_maximum_length ] = ValuePool . getLong ( domain . precision ) ; data [ character_octet_length ] = ValuePool . getLong ( domain . precision ) ; } else if ( domain . isBitType ( ) ) { data [ character_maximum_length ] = ValuePool . getLong ( domain . precision ) ; data [ character_octet_length ] = ValuePool . getLong ( domain . precision ) ; } Expression defaultExpression = domain . userTypeModifier . getDefaultClause ( ) ; if ( defaultExpression != null ) { data [ domain_default ] = defaultExpression . getSQL ( ) ; } t . insertSys ( store , data ) ; } return t ;
public class MessageCacheImpl { /** * Cleans the cache . */ public void clean ( ) { } }
Instant minAge = Instant . now ( ) . minus ( storageTimeInSeconds , ChronoUnit . SECONDS ) ; synchronized ( messages ) { messages . removeIf ( messageRef -> Optional . ofNullable ( messageRef . get ( ) ) . map ( message -> ! message . isCachedForever ( ) && message . getCreationTimestamp ( ) . isBefore ( minAge ) ) . orElse ( true ) ) ; long foreverCachedAmount = messages . stream ( ) . map ( Reference :: get ) . filter ( Objects :: nonNull ) . filter ( Message :: isCachedForever ) . count ( ) ; messages . removeAll ( messages . stream ( ) . filter ( messageRef -> Optional . ofNullable ( messageRef . get ( ) ) . map ( message -> ! message . isCachedForever ( ) ) . orElse ( true ) ) . limit ( Math . max ( 0 , messages . size ( ) - capacity - foreverCachedAmount ) ) . collect ( Collectors . toList ( ) ) ) ; }
public class ImportServlet { /** * Trust every server - dont check for any certificate */ private static void trustAllHosts ( ) { } }
// Create a trust manager that does not validate certificate chains TrustManager [ ] trustAllCerts = new TrustManager [ ] { new X509TrustManager ( ) { @ Override public java . security . cert . X509Certificate [ ] getAcceptedIssuers ( ) { return new java . security . cert . X509Certificate [ ] { } ; } @ Override public void checkClientTrusted ( X509Certificate [ ] chain , String authType ) throws CertificateException { } @ Override public void checkServerTrusted ( X509Certificate [ ] chain , String authType ) throws CertificateException { } } } ; // Install the all - trusting trust manager try { SSLContext sc = SSLContext . getInstance ( "TLS" ) ; sc . init ( null , trustAllCerts , new java . security . SecureRandom ( ) ) ; HttpsURLConnection . setDefaultSSLSocketFactory ( sc . getSocketFactory ( ) ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; }
public class LdapTemplate { /** * Delete all subcontexts including the current one recursively . * @ param ctx The context to use for deleting . * @ param name The starting point to delete recursively . * @ throws NamingException if any error occurs */ protected void deleteRecursively ( DirContext ctx , Name name ) { } }
NamingEnumeration enumeration = null ; try { enumeration = ctx . listBindings ( name ) ; while ( enumeration . hasMore ( ) ) { Binding binding = ( Binding ) enumeration . next ( ) ; LdapName childName = LdapUtils . newLdapName ( binding . getName ( ) ) ; childName . addAll ( 0 , name ) ; deleteRecursively ( ctx , childName ) ; } ctx . unbind ( name ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Entry " + name + " deleted" ) ; } } catch ( javax . naming . NamingException e ) { throw LdapUtils . convertLdapException ( e ) ; } finally { try { enumeration . close ( ) ; } catch ( Exception e ) { // Never mind this } }
public class CreateApplicationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateApplicationRequest createApplicationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createApplicationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createApplicationRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( createApplicationRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class HttpRequestUtils { /** * Gets request headers . * @ param request the request * @ return the request headers */ public static Map < String , String > getRequestHeaders ( final HttpServletRequest request ) { } }
val headers = new LinkedHashMap < String , Object > ( ) ; val headerNames = request . getHeaderNames ( ) ; if ( headerNames != null ) { while ( headerNames . hasMoreElements ( ) ) { val headerName = headerNames . nextElement ( ) ; val headerValue = StringUtils . stripToEmpty ( request . getHeader ( headerName ) ) ; headers . put ( headerName , headerValue ) ; } } return ( Map ) headers ;
public class ShowcaseEntryPoint { /** * Event coming from old dashboards ( created with versions prior to 0.7) */ private void onDashboardDeletedEvent ( @ Observes DashboardDeletedEvent event ) { } }
NavTree navTree = navigationManager . getNavTree ( ) ; navTree . deleteItem ( event . getDashboardId ( ) ) ; navBar . show ( NavTreeDefinitions . GROUP_APP ) ; workbenchNotification . fire ( new NotificationEvent ( constants . notification_dashboard_deleted ( event . getDashboardName ( ) ) , INFO ) ) ;
public class VdmEditor { /** * highlights a node in the text editor . * @ param node */ public void setHighlightRange ( INode node ) { } }
try { int [ ] offsetLength = this . locationSearcher . getNodeOffset ( node ) ; // int offset = getSourceViewer ( ) . getTextWidget ( ) . getCaretOffset ( ) ; Assert . isNotNull ( offsetLength ) ; Assert . isTrue ( offsetLength [ 0 ] > 0 , "Illegal start offset" ) ; Assert . isTrue ( offsetLength [ 0 ] > 0 , "Illegal offset length" ) ; super . setHighlightRange ( offsetLength [ 0 ] , offsetLength [ 1 ] , true ) ; } catch ( IllegalArgumentException e ) { super . resetHighlightRange ( ) ; }
public class Conversion { /** * Converts an array of short into a long using the default ( little endian , Lsb0 ) byte and * bit ordering . * @ param src the short array to convert * @ param srcPos the position in { @ code src } , in short unit , from where to start the * conversion * @ param dstInit initial value of the destination long * @ param dstPos the position of the lsb , in bits , in the result long * @ param nShorts the number of shorts to convert * @ return a long containing the selected bits * @ throws NullPointerException if { @ code src } is { @ code null } * @ throws IllegalArgumentException if { @ code ( nShorts - 1 ) * 16 + dstPos > = 64} * @ throws ArrayIndexOutOfBoundsException if { @ code srcPos + nShorts > src . length } */ public static long shortArrayToLong ( final short [ ] src , final int srcPos , final long dstInit , final int dstPos , final int nShorts ) { } }
if ( src . length == 0 && srcPos == 0 || 0 == nShorts ) { return dstInit ; } if ( ( nShorts - 1 ) * 16 + dstPos >= 64 ) { throw new IllegalArgumentException ( "(nShorts-1)*16+dstPos is greater or equal to than 64" ) ; } long out = dstInit ; for ( int i = 0 ; i < nShorts ; i ++ ) { final int shift = i * 16 + dstPos ; final long bits = ( 0xffffL & src [ i + srcPos ] ) << shift ; final long mask = 0xffffL << shift ; out = ( out & ~ mask ) | bits ; } return out ;
public class ModelsImpl { /** * Updates the name of an intent classifier . * @ param appId The application ID . * @ param versionId The version ID . * @ param intentId The intent classifier ID . * @ param updateIntentOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the OperationStatus object if successful . */ public OperationStatus updateIntent ( UUID appId , String versionId , UUID intentId , UpdateIntentOptionalParameter updateIntentOptionalParameter ) { } }
return updateIntentWithServiceResponseAsync ( appId , versionId , intentId , updateIntentOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Utilities { /** * This method read a application descriptor file and return a { @ link org . openqa . selenium . By } object ( xpath , id , link . . . ) . * @ param page * is target page * @ param code * Name of element on the web Page . * @ param args * list of description ( xpath , id , link . . . ) for code . * @ return a { @ link org . openqa . selenium . By } object ( xpath , id , link . . . ) */ public static By getLocator ( Page page , String code , Object ... args ) { } }
return getLocator ( page . getApplication ( ) , page . getPageKey ( ) + code , args ) ;
public class CollectionJsonSerializer { /** * { @ inheritDoc } */ @ Override public void doSerialize ( JsonWriter writer , C values , JsonSerializationContext ctx , JsonSerializerParameters params ) { } }
if ( values . isEmpty ( ) ) { if ( ctx . isWriteEmptyJsonArrays ( ) ) { writer . beginArray ( ) ; writer . endArray ( ) ; } else { writer . cancelName ( ) ; } return ; } if ( ctx . isWriteSingleElemArraysUnwrapped ( ) && values . size ( ) == 1 ) { // there is only one element , we write it directly serializer . serialize ( writer , values . iterator ( ) . next ( ) , ctx , params ) ; } else { writer . beginArray ( ) ; for ( T value : values ) { serializer . serialize ( writer , value , ctx , params ) ; } writer . endArray ( ) ; }
public class AWSCognitoIdentityProviderClient { /** * Creates a new group in the specified user pool . * Requires developer credentials . * @ param createGroupRequest * @ return Result of the CreateGroup operation returned by the service . * @ throws InvalidParameterException * This exception is thrown when the Amazon Cognito service encounters an invalid parameter . * @ throws GroupExistsException * This exception is thrown when Amazon Cognito encounters a group that already exists in the user pool . * @ throws ResourceNotFoundException * This exception is thrown when the Amazon Cognito service cannot find the requested resource . * @ throws TooManyRequestsException * This exception is thrown when the user has made too many requests for a given operation . * @ throws LimitExceededException * This exception is thrown when a user exceeds the limit for a requested AWS resource . * @ throws NotAuthorizedException * This exception is thrown when a user is not authorized . * @ throws InternalErrorException * This exception is thrown when Amazon Cognito encounters an internal error . * @ sample AWSCognitoIdentityProvider . CreateGroup * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cognito - idp - 2016-04-18 / CreateGroup " target = " _ top " > AWS API * Documentation < / a > */ @ Override public CreateGroupResult createGroup ( CreateGroupRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateGroup ( request ) ;
public class FilterChain { /** * Appends the given element . */ private boolean appendText ( final String indent , final Element element , final StringBuilder sb ) { } }
if ( sb != null ) { if ( element . getTagName ( ) . indexOf ( ":annotation" ) < 0 && element . getTagName ( ) . indexOf ( ":documentation" ) < 0 ) { sb . append ( String . format ( ELEMENT_LOG , indent , element . getTagName ( ) , element . getAttribute ( "name" ) ) ) ; return true ; } } return false ;
public class TargetSpecifications { /** * { @ link Specification } for retrieving { @ link JpaTarget } s including * { @ link JpaTarget # getAssignedDistributionSet ( ) } . * @ param controllerIDs * to search for * @ return the { @ link Target } { @ link Specification } */ public static Specification < JpaTarget > byControllerIdWithAssignedDsInJoin ( final Collection < String > controllerIDs ) { } }
return ( targetRoot , query , cb ) -> { final Predicate predicate = targetRoot . get ( JpaTarget_ . controllerId ) . in ( controllerIDs ) ; targetRoot . fetch ( JpaTarget_ . assignedDistributionSet ) ; return predicate ; } ;
public class SnowflakeFileTransferAgent { /** * A callable that can be executed in a separate thread using executor service . * The callable download files from a stage location to a local location * @ param stage stage information * @ param srcFilePath path that stores the downloaded file * @ param localLocation local location * @ param fileMetadataMap file metadata map * @ param client remote store client * @ param connection connection object * @ param command command string * @ param encMat remote store encryption material * @ param parallel number of parallel threads for downloading * @ return a callable responsible for downloading files */ public static Callable < Void > getDownloadFileCallable ( final StageInfo stage , final String srcFilePath , final String localLocation , final Map < String , FileMetadata > fileMetadataMap , final SnowflakeStorageClient client , final SFSession connection , final String command , final int parallel , final RemoteStoreFileEncryptionMaterial encMat ) { } }
return new Callable < Void > ( ) { public Void call ( ) throws Exception { logger . debug ( "Entering getDownloadFileCallable..." ) ; FileMetadata metadata = fileMetadataMap . get ( srcFilePath ) ; // this shouldn ' t happen if ( metadata == null ) { throw new SnowflakeSQLException ( SqlState . INTERNAL_ERROR , ErrorCode . INTERNAL_ERROR . getMessageCode ( ) , "missing file metadata for: " + srcFilePath ) ; } String destFileName = metadata . destFileName ; logger . debug ( "Started copying file from: {}:{} file path:{} to {} destName:{}" , stage . getStageType ( ) . name ( ) , stage . getLocation ( ) , srcFilePath , localLocation , destFileName ) ; try { switch ( stage . getStageType ( ) ) { case LOCAL_FS : pullFileFromLocal ( stage . getLocation ( ) , srcFilePath , localLocation , destFileName ) ; break ; case AZURE : case S3 : pullFileFromRemoteStore ( stage , srcFilePath , destFileName , localLocation , client , connection , command , parallel , encMat ) ; metadata . isEncrypted = encMat != null ; break ; } } catch ( Throwable ex ) { logger . error ( "Exception encountered during file download" , ex ) ; metadata . resultStatus = ResultStatus . ERROR ; metadata . errorDetails = ex . getMessage ( ) ; throw ex ; } logger . debug ( "filePath: {}" , srcFilePath ) ; File destFile = new File ( localLocation + localFSFileSep + destFileName ) ; long downloadSize = destFile . length ( ) ; // set dest size metadata . destFileSize = downloadSize ; // mark the file as being uploaded metadata . resultStatus = ResultStatus . DOWNLOADED ; return null ; } } ;
public class ModuleXmlReader { /** * Parse module . xml from an input stream . * @ param inputStream The InputStream to parse * @ return Document the parsed DOM * @ throws ParserConfigurationException In case of parser mis - configuration * @ throws SAXException In case of SAX exception * @ throws IOException In case of IO error */ protected static Document parseXml ( final InputStream inputStream ) throws ParserConfigurationException , SAXException , IOException { } }
DocumentBuilderFactory dbFactory = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder dBuilder = dbFactory . newDocumentBuilder ( ) ; Document doc = dBuilder . parse ( inputStream ) ; doc . getDocumentElement ( ) . normalize ( ) ; return doc ;
public class CopticChronology { /** * Obtains a local date in Coptic calendar system from the * proleptic - year , month - of - year and day - of - month fields . * @ param prolepticYear the proleptic - year * @ param month the month - of - year * @ param dayOfMonth the day - of - month * @ return the Coptic local date , not null * @ throws DateTimeException if unable to create the date */ @ Override public CopticDate date ( int prolepticYear , int month , int dayOfMonth ) { } }
return CopticDate . of ( prolepticYear , month , dayOfMonth ) ;
public class CommonAhoCorasickSegmentUtil { /** * 最长分词 , 合并未知语素 * @ param charArray 文本 * @ param trie 自动机 * @ param < V > 类型 * @ return 结果链表 */ public static < V > LinkedList < ResultTerm < V > > segment ( final char [ ] charArray , AhoCorasickDoubleArrayTrie < V > trie ) { } }
LinkedList < ResultTerm < V > > termList = new LinkedList < ResultTerm < V > > ( ) ; final ResultTerm < V > [ ] wordNet = new ResultTerm [ charArray . length ] ; trie . parseText ( charArray , new AhoCorasickDoubleArrayTrie . IHit < V > ( ) { @ Override public void hit ( int begin , int end , V value ) { if ( wordNet [ begin ] == null || wordNet [ begin ] . word . length ( ) < end - begin ) { wordNet [ begin ] = new ResultTerm < V > ( new String ( charArray , begin , end - begin ) , value , begin ) ; } } } ) ; for ( int i = 0 ; i < charArray . length ; ) { if ( wordNet [ i ] == null ) { StringBuilder sbTerm = new StringBuilder ( ) ; int offset = i ; while ( i < charArray . length && wordNet [ i ] == null ) { sbTerm . append ( charArray [ i ] ) ; ++ i ; } termList . add ( new ResultTerm < V > ( sbTerm . toString ( ) , null , offset ) ) ; } else { termList . add ( wordNet [ i ] ) ; i += wordNet [ i ] . word . length ( ) ; } } return termList ;
public class Helper { /** * < p > firstUpperCase . < / p > * @ param s a { @ link java . lang . String } object . * @ return a { @ link java . lang . String } object . */ public static String firstUpperCase ( String s ) { } }
String first = s . length ( ) > 0 ? s . substring ( 0 , 1 ) . toUpperCase ( ) : "" ; String second = s . length ( ) > 1 ? s . substring ( 1 ) . toLowerCase ( ) : "" ; return first + second ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } * { @ link CmisExtensionType } { @ code > } */ @ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "extension" , scope = CreateFolder . class ) public JAXBElement < CmisExtensionType > createCreateFolderExtension ( CmisExtensionType value ) { } }
return new JAXBElement < CmisExtensionType > ( _GetPropertiesExtension_QNAME , CmisExtensionType . class , CreateFolder . class , value ) ;
public class ServerCookieEncoder { /** * Batch encodes cookies into Set - Cookie header values . * @ param cookies a bunch of cookies * @ return the corresponding bunch of Set - Cookie headers */ public List < String > encode ( Collection < ? extends Cookie > cookies ) { } }
if ( checkNotNull ( cookies , "cookies" ) . isEmpty ( ) ) { return Collections . emptyList ( ) ; } List < String > encoded = new ArrayList < String > ( cookies . size ( ) ) ; Map < String , Integer > nameToIndex = strict && cookies . size ( ) > 1 ? new HashMap < String , Integer > ( ) : null ; int i = 0 ; boolean hasDupdName = false ; for ( Cookie c : cookies ) { encoded . add ( encode ( c ) ) ; if ( nameToIndex != null ) { hasDupdName |= nameToIndex . put ( c . name ( ) , i ++ ) != null ; } } return hasDupdName ? dedup ( encoded , nameToIndex ) : encoded ;
public class MiniTemplatorCache { /** * Returns a cloned MiniTemplator object from the cache . If there is not yet a MiniTemplator object with the * specified < code > templateFileName < / code > in the cache , a new MiniTemplator object is created and stored in the * cache . Then the cached MiniTemplator object is cloned and the clone object is returned . * @ param templateSpec the template specification . * @ return a cloned and reset MiniTemplator object . */ public synchronized MiniTemplator get ( MiniTemplator . TemplateSpecification templateSpec ) throws IOException , MiniTemplator . TemplateSyntaxException { } }
String key = generateCacheKey ( templateSpec ) ; MiniTemplator mt = cache . get ( key ) ; if ( mt == null ) { mt = new MiniTemplator ( templateSpec ) ; cache . put ( key , mt ) ; } return mt . cloneReset ( ) ;
public class Money { /** * ( non - Javadoc ) * @ see MonetaryAmount # divide ( MonetaryAmount ) */ @ Override public Money divide ( double divisor ) { } }
if ( NumberVerifier . isInfinityAndNotNaN ( divisor ) ) { return Money . of ( 0 , getCurrency ( ) ) ; } if ( divisor == 1.0d ) { return this ; } return divide ( new BigDecimal ( String . valueOf ( divisor ) ) ) ;
public class GPixelMath { /** * Each element has the specified number added to it . Both input and output images can * be the same . * @ param input The input image . Not modified . * @ param value What is added to each element . * @ param output The output image . Modified . */ public static < T extends ImageBase < T > > void plus ( T input , double value , T output ) { } }
if ( input instanceof ImageGray ) { if ( GrayU8 . class == input . getClass ( ) ) { PixelMath . plus ( ( GrayU8 ) input , ( int ) value , ( GrayU8 ) output ) ; } else if ( GrayS8 . class == input . getClass ( ) ) { PixelMath . plus ( ( GrayS8 ) input , ( int ) value , ( GrayS8 ) output ) ; } else if ( GrayU16 . class == input . getClass ( ) ) { PixelMath . plus ( ( GrayU16 ) input , ( int ) value , ( GrayU16 ) output ) ; } else if ( GrayS16 . class == input . getClass ( ) ) { PixelMath . plus ( ( GrayS16 ) input , ( int ) value , ( GrayS16 ) output ) ; } else if ( GrayS32 . class == input . getClass ( ) ) { PixelMath . plus ( ( GrayS32 ) input , ( int ) value , ( GrayS32 ) output ) ; } else if ( GrayS64 . class == input . getClass ( ) ) { PixelMath . plus ( ( GrayS64 ) input , ( long ) value , ( GrayS64 ) output ) ; } else if ( GrayF32 . class == input . getClass ( ) ) { PixelMath . plus ( ( GrayF32 ) input , ( float ) value , ( GrayF32 ) output ) ; } else if ( GrayF64 . class == input . getClass ( ) ) { PixelMath . plus ( ( GrayF64 ) input , value , ( GrayF64 ) output ) ; } else { throw new IllegalArgumentException ( "Unknown image Type: " + input . getClass ( ) . getSimpleName ( ) ) ; } } else if ( input instanceof ImageInterleaved ) { if ( InterleavedU8 . class == input . getClass ( ) ) { PixelMath . plus ( ( InterleavedU8 ) input , ( int ) value , ( InterleavedU8 ) output ) ; } else if ( InterleavedS8 . class == input . getClass ( ) ) { PixelMath . plus ( ( InterleavedS8 ) input , ( int ) value , ( InterleavedS8 ) output ) ; } else if ( InterleavedU16 . class == input . getClass ( ) ) { PixelMath . plus ( ( InterleavedU16 ) input , ( int ) value , ( InterleavedU16 ) output ) ; } else if ( InterleavedS16 . class == input . getClass ( ) ) { PixelMath . plus ( ( InterleavedS16 ) input , ( int ) value , ( InterleavedS16 ) output ) ; } else if ( InterleavedS32 . class == input . getClass ( ) ) { PixelMath . plus ( ( InterleavedS32 ) input , ( int ) value , ( InterleavedS32 ) output ) ; } else if ( InterleavedS64 . class == input . getClass ( ) ) { PixelMath . plus ( ( InterleavedS64 ) input , ( long ) value , ( InterleavedS64 ) output ) ; } else if ( InterleavedF32 . class == input . getClass ( ) ) { PixelMath . plus ( ( InterleavedF32 ) input , ( float ) value , ( InterleavedF32 ) output ) ; } else if ( InterleavedF64 . class == input . getClass ( ) ) { PixelMath . plus ( ( InterleavedF64 ) input , value , ( InterleavedF64 ) output ) ; } else { throw new IllegalArgumentException ( "Unknown image Type: " + input . getClass ( ) . getSimpleName ( ) ) ; } } else { Planar in = ( Planar ) input ; Planar out = ( Planar ) output ; for ( int i = 0 ; i < in . getNumBands ( ) ; i ++ ) { plus ( in . getBand ( i ) , value , out . getBand ( i ) ) ; } }
public class Event { /** * Adds the given event to the events to be thrown when this event * has completed ( see { @ link # isDone ( ) } ) . Such an event is called * a " completion event " . * Completion events are considered to be caused by the event that * caused the completed event . If an event * e1 * caused an event * * e2 * which has a completion event * e2c * , * e1 * is only put in * state completed when * e2c * has been handled . * Completion events are handled by the same { @ link EventProcessor } * as the event that has been completed . * @ param completionEvent the completion event to add * @ return the object for easy chaining */ public Event < T > addCompletionEvent ( Event < ? > completionEvent ) { } }
if ( completionEvents == null ) { completionEvents = new HashSet < > ( ) ; } completionEvents . add ( completionEvent ) ; return this ;
public class XMLCharHelper { /** * Check if the passed character is invalid for a text node . * @ param eXMLVersion * XML version to be used . May not be < code > null < / code > . * @ param c * char to check * @ return < code > true < / code > if the char is invalid */ public static boolean isInvalidXMLTextChar ( @ Nonnull final EXMLSerializeVersion eXMLVersion , final int c ) { } }
switch ( eXMLVersion ) { case XML_10 : return INVALID_VALUE_CHAR_XML10 . get ( c ) ; case XML_11 : return INVALID_TEXT_VALUE_CHAR_XML11 . get ( c ) ; case HTML : return INVALID_CHAR_HTML . get ( c ) ; default : throw new IllegalArgumentException ( "Unsupported XML version " + eXMLVersion + "!" ) ; }
public class UpdatePipelineRequest { /** * A list of " PipelineActivity " objects . Activities perform transformations on your messages , such as removing , * renaming or adding message attributes ; filtering messages based on attribute values ; invoking your Lambda * functions on messages for advanced processing ; or performing mathematical transformations to normalize device * data . * The list can be 2-25 < b > PipelineActivity < / b > objects and must contain both a < code > channel < / code > and a * < code > datastore < / code > activity . Each entry in the list must contain only one activity , for example : * < code > pipelineActivities = [ { " channel " : { . . . } } , { " lambda " : { . . . } } , . . . ] < / code > * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPipelineActivities ( java . util . Collection ) } or { @ link # withPipelineActivities ( java . util . Collection ) } if * you want to override the existing values . * @ param pipelineActivities * A list of " PipelineActivity " objects . Activities perform transformations on your messages , such as * removing , renaming or adding message attributes ; filtering messages based on attribute values ; invoking * your Lambda functions on messages for advanced processing ; or performing mathematical transformations to * normalize device data . < / p > * The list can be 2-25 < b > PipelineActivity < / b > objects and must contain both a < code > channel < / code > and a * < code > datastore < / code > activity . Each entry in the list must contain only one activity , for example : * < code > pipelineActivities = [ { " channel " : { . . . } } , { " lambda " : { . . . } } , . . . ] < / code > * @ return Returns a reference to this object so that method calls can be chained together . */ public UpdatePipelineRequest withPipelineActivities ( PipelineActivity ... pipelineActivities ) { } }
if ( this . pipelineActivities == null ) { setPipelineActivities ( new java . util . ArrayList < PipelineActivity > ( pipelineActivities . length ) ) ; } for ( PipelineActivity ele : pipelineActivities ) { this . pipelineActivities . add ( ele ) ; } return this ;
public class LayoutManager { /** * Loads a the previously saved layout for the current page . If no * previously persisted layout exists for the given page the built * in default layout is used . * @ param manager The docking manager to use * @ param pageId The page to get the layout for * @ return a boolean saying if the layout requested was previously saved */ public static boolean loadPageLayoutData ( DockingManager manager , String pageId , Perspective perspective ) { } }
manager . beginLoadLayoutData ( ) ; try { if ( isValidLayout ( manager , pageId , perspective ) ) { String pageLayout = MessageFormat . format ( PAGE_LAYOUT , pageId , perspective . getId ( ) ) ; manager . loadLayoutDataFrom ( pageLayout ) ; return true ; } else { manager . loadLayoutData ( ) ; return false ; } } catch ( Exception e ) { manager . loadLayoutData ( ) ; return false ; }
public class DomainsInner { /** * List domains under an Azure subscription . * List all the domains under an Azure subscription . * @ return the observable to the List & lt ; DomainInner & gt ; object */ public Observable < Page < DomainInner > > listAsync ( ) { } }
return listWithServiceResponseAsync ( ) . map ( new Func1 < ServiceResponse < List < DomainInner > > , Page < DomainInner > > ( ) { @ Override public Page < DomainInner > call ( ServiceResponse < List < DomainInner > > response ) { PageImpl < DomainInner > page = new PageImpl < > ( ) ; page . setItems ( response . body ( ) ) ; return page ; } } ) ;
public class TokVariable { /** * { @ inheritDoc } */ @ Override public EquPart morph ( ) throws Exception { } }
final EquPart fun = Equ . getInstance ( ) . function ( this ) ; if ( fun == null ) return this ; return fun ;
public class EditText { /** * Sets the Drawables ( if any ) to appear to the left of , above , to the * right of , and below the text . Use 0 if you do not want a Drawable there . * The Drawables ' bounds will be set to their intrinsic bounds . * Calling this method will overwrite any Drawables previously set using * { @ link # setCompoundDrawablesRelative } or related methods . * @ param left Resource identifier of the left Drawable . * @ param top Resource identifier of the top Drawable . * @ param right Resource identifier of the right Drawable . * @ param bottom Resource identifier of the bottom Drawable . * @ attr ref android . R . styleable # TextView _ drawableLeft * @ attr ref android . R . styleable # TextView _ drawableTop * @ attr ref android . R . styleable # TextView _ drawableRight * @ attr ref android . R . styleable # TextView _ drawableBottom */ public void setCompoundDrawablesWithIntrinsicBounds ( int left , int top , int right , int bottom ) { } }
mInputView . setCompoundDrawablesWithIntrinsicBounds ( left , top , right , bottom ) ;
public class SearchResultToBDBRecordAdapter { /** * ( non - Javadoc ) * @ see org . archive . wayback . util . Adapter # adapt ( java . lang . Object ) */ public BDBRecord adapt ( CaptureSearchResult result ) { } }
StringBuilder keySB = new StringBuilder ( 40 ) ; StringBuilder valSB = new StringBuilder ( 100 ) ; String origUrl = result . getOriginalUrl ( ) ; String urlKey ; try { urlKey = canonicalizer . urlStringToKey ( origUrl ) ; } catch ( URIException e ) { // e . printStackTrace ( ) ; LOGGER . warning ( "FAILED canonicalize(" + origUrl + ")" ) ; urlKey = origUrl ; } keySB . append ( urlKey ) ; keySB . append ( DELIMITER ) ; keySB . append ( result . getCaptureTimestamp ( ) ) ; keySB . append ( DELIMITER ) ; keySB . append ( result . getOffset ( ) ) ; keySB . append ( DELIMITER ) ; keySB . append ( result . getFile ( ) ) ; valSB . append ( result . getOriginalUrl ( ) ) ; valSB . append ( DELIMITER ) ; valSB . append ( result . getMimeType ( ) ) ; valSB . append ( DELIMITER ) ; valSB . append ( result . getHttpCode ( ) ) ; valSB . append ( DELIMITER ) ; valSB . append ( result . getDigest ( ) ) ; valSB . append ( DELIMITER ) ; valSB . append ( result . getRedirectUrl ( ) ) ; key . setData ( BDBRecordSet . stringToBytes ( keySB . toString ( ) ) ) ; value . setData ( BDBRecordSet . stringToBytes ( valSB . toString ( ) ) ) ; return record ;
public class XlsSaver { /** * 複数のオブジェクトをそれぞれのシートへ保存する 。 * @ param templateXlsIn 雛形となるExcelファイルの入力 * @ param xlsOut xlsOut 出力先のストリーム * @ param beanObjs 書き込むオブジェクトの配列 。 * @ throws IllegalArgumentException { @ literal templateXlsIn = = null or xlsOut = = null or beanObjs = = null } * @ throws XlsMapperException マッピングに失敗した場合 * @ throws IOException テンプレートのファイルの読み込みやファイルの出力に失敗した場合 */ public void saveMultiple ( final InputStream templateXlsIn , final OutputStream xlsOut , final Object [ ] beanObjs ) throws XlsMapperException , IOException { } }
saveMultipleDetail ( templateXlsIn , xlsOut , beanObjs ) ;
public class XStreamUtils { /** * Takes the incoming file - name and checks whether this is a URI using the < tt > file : < / tt > protocol or a non - URI and treats * it accordingly . * @ return a file - name { @ link java . io . File } representation or < tt > null < / tt > if the file - name was in an invalid URI format */ private static File astFile ( final String uriOrFileName ) { } }
try { final String astFileName = uriOrFileName + ".xml" ; return uriOrFileName . startsWith ( "file:" ) ? new File ( URI . create ( astFileName ) ) : new File ( astFileName ) ; } catch ( IllegalArgumentException e ) { return null ; }
public class WebhooksInner { /** * Lists recent events for the specified webhook . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; EventInner & gt ; object */ public Observable < ServiceResponse < Page < EventInner > > > listEventsNextWithServiceResponseAsync ( final String nextPageLink ) { } }
return listEventsNextSinglePageAsync ( nextPageLink ) . concatMap ( new Func1 < ServiceResponse < Page < EventInner > > , Observable < ServiceResponse < Page < EventInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < EventInner > > > call ( ServiceResponse < Page < EventInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listEventsNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class ClassFile { /** * Add a static initializer to this class . */ public MethodInfo addInitializer ( ) { } }
MethodDesc md = MethodDesc . forArguments ( null , null , null ) ; Modifiers af = new Modifiers ( ) ; af . setStatic ( true ) ; MethodInfo mi = new MethodInfo ( this , af , "<clinit>" , md , null ) ; mMethods . add ( mi ) ; return mi ;
public class OperationsApi { /** * Put users . * putUsers * @ return ApiResponse & lt ; PutUsers & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < PutUsers > putUsersWithHttpInfo ( ) throws ApiException { } }
com . squareup . okhttp . Call call = putUsersValidateBeforeCall ( null , null ) ; Type localVarReturnType = new TypeToken < PutUsers > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class DeviceAttributes { /** * < pre > * String representation of device _ type . * < / pre > * < code > optional string device _ type = 2 ; < / code > */ public java . lang . String getDeviceType ( ) { } }
java . lang . Object ref = deviceType_ ; if ( ref instanceof java . lang . String ) { return ( java . lang . String ) ref ; } else { com . google . protobuf . ByteString bs = ( com . google . protobuf . ByteString ) ref ; java . lang . String s = bs . toStringUtf8 ( ) ; deviceType_ = s ; return s ; }
public class AsyncAppender { /** * Construct a new Counter , register it , and then return it . * @ param name String * @ return Counter */ private Counter initAndRegisterCounter ( String name ) { } }
BasicCounter counter = new BasicCounter ( MonitorConfig . builder ( name ) . build ( ) ) ; DefaultMonitorRegistry . getInstance ( ) . register ( counter ) ; return counter ;
public class CounterContext { /** * Creates a counter context with a single local shard . * For use by tests of compatibility with pre - 2.1 counters only . */ public ByteBuffer createLocal ( long count ) { } }
ContextState state = ContextState . allocate ( 0 , 1 , 0 ) ; state . writeLocal ( CounterId . getLocalId ( ) , 1L , count ) ; return state . context ;
public class OpenAPIFilter { /** * { @ inheritDoc } */ @ Override public Schema visitSchema ( Context context , Schema schema ) { } }
return filter . filterSchema ( schema ) ;
public class dnssoarec { /** * Use this API to unset the properties of dnssoarec resources . * Properties that need to be unset are specified in args array . */ public static base_responses unset ( nitro_service client , String domain [ ] , String args [ ] ) throws Exception { } }
base_responses result = null ; if ( domain != null && domain . length > 0 ) { dnssoarec unsetresources [ ] = new dnssoarec [ domain . length ] ; for ( int i = 0 ; i < domain . length ; i ++ ) { unsetresources [ i ] = new dnssoarec ( ) ; unsetresources [ i ] . domain = domain [ i ] ; } result = unset_bulk_request ( client , unsetresources , args ) ; } return result ;
public class Index { /** * Find the set of IDs associated with a full name in the provided map . * @ param indexName the full name in index form * @ param names the map of full names to sets of IDs * @ return the set of ID strings */ private SortedSet < String > findIdsPerName ( final String indexName , final SortedMap < String , SortedSet < String > > names ) { } }
if ( names . containsKey ( indexName ) ) { return names . get ( indexName ) ; } final TreeSet < String > idsPerName = new TreeSet < String > ( ) ; names . put ( indexName , idsPerName ) ; return idsPerName ;
public class Choice6 { /** * Static factory method for wrapping a value of type < code > E < / code > in a { @ link Choice6 } . * @ param e the value * @ param < A > the first possible type * @ param < B > the second possible type * @ param < C > the third possible type * @ param < D > the fourth possible type * @ param < E > the fifth possible type * @ param < F > the sixth possible type * @ return the wrapped value as a { @ link Choice6 } & lt ; A , B , C , D , E , F & gt ; */ public static < A , B , C , D , E , F > Choice6 < A , B , C , D , E , F > e ( E e ) { } }
return new _E < > ( e ) ;
public class Scanner { /** * Scan a single node . The current path is updated for the duration of the scan . */ @ Override public Void scan ( Tree tree , VisitorState state ) { } }
if ( tree == null ) { return null ; } SuppressionInfo prevSuppressionInfo = updateSuppressions ( tree , state ) ; try { return super . scan ( tree , state ) ; } finally { // Restore old suppression state . currentSuppressions = prevSuppressionInfo ; }
public class XMLProperties { /** * For testing only . */ public static void main ( String [ ] pArgs ) throws Exception { } }
// - - Print DTD System . out . println ( "DTD: \n" + DTD ) ; System . out . println ( "--" ) ; // - - Test load System . out . println ( "Reading properties from \"" + pArgs [ 0 ] + "\"..." ) ; XMLProperties props = new XMLProperties ( ) ; props . load ( new FileInputStream ( new File ( pArgs [ 0 ] ) ) ) ; props . list ( System . out ) ; System . out . println ( "--" ) ; // - - Test recursion String key = "key" ; Object old = props . setProperty ( key , "AAA" ) ; Properties p1 = new XMLProperties ( new XMLProperties ( props ) ) ; Properties p2 = new Properties ( new Properties ( props ) ) ; System . out . println ( "XMLProperties: " + p1 . getProperty ( key ) + " ==" + " Properties: " + p2 . getProperty ( key ) ) ; if ( old == null ) { props . remove ( "key" ) ; } else { props . put ( "key" , old ) ; // Put old value back , to avoid confusion . . . } System . out . println ( "--" ) ; // - - Test store // props . store ( System . out , " XML Properties file written by XMLProperties . " ) ; File out = new File ( "copy_of_" + pArgs [ 0 ] ) ; System . out . println ( "Writing properties to \"" + out . getName ( ) + "\"" ) ; if ( ! out . exists ( ) ) { props . store ( new FileOutputStream ( out ) , "XML Properties file written by XMLProperties." ) ; } else { System . err . println ( "File \"" + out . getName ( ) + "\" allready exists, cannot write!" ) ; } // - - Test utility methods // Write normal properties from XMLProperties out = new File ( "copy_of_" + pArgs [ 0 ] . substring ( 0 , pArgs [ 0 ] . lastIndexOf ( "." ) ) + ".properties" ) ; System . out . println ( "Writing properties to \"" + out . getName ( ) + "\"" ) ; if ( ! out . exists ( ) ) { storeProperties ( props , new FileOutputStream ( out ) , "Properties file written by XMLProperties." ) ; } else { System . err . println ( "File \"" + out . getName ( ) + "\" allready exists, cannot write!" ) ; } System . out . println ( "--" ) ; // - - Test type attribute System . out . println ( "getPropertyValue(\"one\"): " + props . getPropertyValue ( "one" ) + " class: " + props . getPropertyValue ( "one" ) . getClass ( ) ) ; System . out . println ( "setPropertyValue(\"now\", " + new Date ( ) + "): " + props . setPropertyValue ( "now" , new Date ( ) ) + " class: " + props . getPropertyValue ( "now" ) . getClass ( ) ) ; System . out . println ( "getPropertyValue(\"date\"): " + props . getPropertyValue ( "date" ) + " class: " + props . getPropertyValue ( "date" ) . getClass ( ) ) ; System . out . println ( "getPropertyValue(\"time\"): " + props . getPropertyValue ( "time" ) + " class: " + props . getPropertyValue ( "time" ) . getClass ( ) ) ;
public class SldUtilities { /** * Creates a color with the given alpha . * @ param color the color to use . * @ param alpha an alpha value between 0 and 255. * @ return the color with alpha . */ public static Color colorWithAlpha ( Color color , int alpha ) { } }
return new Color ( color . getRed ( ) , color . getGreen ( ) , color . getBlue ( ) , alpha ) ;
public class ViewPosition { /** * Restores ViewPosition from the string created by { @ link # pack ( ) } method . * @ param str Serialized position string * @ return De - serialized position */ @ SuppressWarnings ( "unused" ) // Public API public static ViewPosition unpack ( String str ) { } }
String [ ] parts = TextUtils . split ( str , SPLIT_PATTERN ) ; if ( parts . length != 4 ) { throw new IllegalArgumentException ( "Wrong ViewPosition string: " + str ) ; } Rect view = Rect . unflattenFromString ( parts [ 0 ] ) ; Rect viewport = Rect . unflattenFromString ( parts [ 1 ] ) ; Rect visible = Rect . unflattenFromString ( parts [ 2 ] ) ; Rect image = Rect . unflattenFromString ( parts [ 3 ] ) ; if ( view == null || viewport == null || image == null ) { throw new IllegalArgumentException ( "Wrong ViewPosition string: " + str ) ; } return new ViewPosition ( view , viewport , visible , image ) ;
public class Tile { /** * Defines if the second hand of the clock will be drawn . * @ param VISIBLE */ public void setSecondsVisible ( boolean VISIBLE ) { } }
if ( null == secondsVisible ) { _secondsVisible = VISIBLE ; fireTileEvent ( REDRAW_EVENT ) ; } else { secondsVisible . set ( VISIBLE ) ; }
public class MtasSolrStatus { /** * Update shard info . */ private final void updateShardInfo ( ) { } }
final long expirationTime = 1000 ; // don ' t update too much if ( shardKey == null || ( shardInfoUpdated && Objects . requireNonNull ( shardInfoUpdate , "update expire time not set" ) < System . currentTimeMillis ( ) ) ) { return ; } // and only if necessary if ( ! shardInfoUpdated || ! finished || shardInfoNeedUpdate ) { // reset shardInfoError = false ; // get list of relevant stages Set < Integer > stagesList = new HashSet < > ( ) ; for ( Integer stage : shardStageKeys . keySet ( ) ) { if ( ! shardStageStatus . containsKey ( stage ) || ! shardStageStatus . get ( stage ) . finished ) { stagesList . add ( stage ) ; } } // loop for this list over shards for ( Entry < String , ShardStatus > entry : shards . entrySet ( ) ) { ShardStatus shardStatus = entry . getValue ( ) ; SolrClient solrClient = null ; StageStatus stageStatus ; // then loop over stages for ( Integer stage : stagesList ) { // get shardStage if ( ! shardStageStatus . containsKey ( stage ) ) { stageStatus = new StageStatus ( stage ) ; shardStageStatus . put ( stage , stageStatus ) ; } else { stageStatus = shardStageStatus . get ( stage ) ; } if ( shardStatus . finishedStages . contains ( stage ) ) { stageStatus . add ( true , shardStatus . numberDocumentsFoundStage . get ( stage ) ) ; } else { // create request ModifiableSolrParams solrParams = new ModifiableSolrParams ( ) ; solrParams . add ( CommonParams . QT , shardStatus . mtasHandler ) ; solrParams . add ( MtasRequestHandler . PARAM_ACTION , MtasRequestHandler . ACTION_STATUS ) ; solrParams . add ( MtasRequestHandler . PARAM_KEY , shardStageKeys . get ( stage ) ) ; try { // set solrClient solrClient = new HttpSolrClient . Builder ( shardStatus . location ) . build ( ) ; // get response QueryResponse response = solrClient . query ( solrParams ) ; // check for response if ( response . getResponse ( ) . findRecursive ( MtasSolrComponentStatus . NAME ) != null ) { shardStatus . numberDocumentsFoundStage . put ( stage , getLong ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_DOCUMENT_NUMBER_FOUND ) ) ; shardStatus . timeStage . put ( stage , getInteger ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_TIME_TOTAL ) ) ; stageStatus . add ( shardStatus . setFinishedStage ( stage , getBoolean ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_FINISHED ) ) , shardStatus . numberDocumentsFoundStage . get ( stage ) ) ; shardInfoError = shardStatus . setErrorStage ( stage , getString ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_ERROR ) ) || shardInfoError ; shardStatus . setAbortStage ( stage , getString ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_ABORT ) ) ; if ( stage . equals ( currentStage ) ) { shardStatus . stage = stage ; shardStatus . stageNumberDocumentsFinished = getLong ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_DOCUMENT_NUMBER_FINISHED ) ; shardStatus . stageSubNumberDocumentsFinished = getLongMap ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_DOCUMENT_SUB_NUMBER_FINISHED ) ; shardStatus . stageSubNumberDocumentsTotal = getLong ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_DOCUMENT_SUB_NUMBER_TOTAL ) ; shardStatus . stageSubNumberDocumentsFinishedTotal = getLong ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_DOCUMENT_SUB_NUMBER_FINISHED_TOTAL ) ; shardStatus . stageNumberSegmentsFinished = getInteger ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_SEGMENT_SUB_NUMBER_FINISHED_TOTAL ) ; shardStatus . stageSubNumberSegmentsFinished = getIntegerMap ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_SEGMENT_SUB_NUMBER_FINISHED ) ; shardStatus . stageSubNumberSegmentsTotal = getInteger ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_SEGMENT_SUB_NUMBER_TOTAL ) ; shardStatus . stageSubNumberSegmentsFinishedTotal = getInteger ( response . getResponse ( ) , MtasSolrComponentStatus . NAME , NAME_STATUS_SEGMENT_SUB_NUMBER_FINISHED_TOTAL ) ; } } else if ( ! finished && currentStage . equals ( stage ) && ! stageStatus . checked ) { stageStatus . finished = false ; } } catch ( SolrServerException | IOException e ) { shardInfoError = shardInfoError || shardStatus . setErrorStage ( stage , e . getMessage ( ) ) ; } finally { if ( solrClient != null ) { try { solrClient . close ( ) ; } catch ( IOException e ) { shardInfoError = shardInfoError || shardStatus . setErrorStage ( stage , e . getMessage ( ) ) ; } } } } } } if ( ! shardInfoError ) { for ( StageStatus item : shardStageStatus . values ( ) ) { item . checked = true ; } shardInfoUpdated = true ; shardInfoUpdate = System . currentTimeMillis ( ) + expirationTime ; } else { shardInfoUpdated = false ; shardInfoUpdate = null ; } }
public class TagUtils { /** * String - - > int * @ param value * @ return */ public static int getInteger ( Object value ) { } }
if ( value == null ) { return 0 ; } if ( value instanceof Number ) { return ( ( Number ) value ) . intValue ( ) ; } return Integer . valueOf ( value . toString ( ) ) . intValue ( ) ;
public class Task { /** * Counters to measure the usage of the different file systems . * Always return the String array with two elements . First one is the name of * BYTES _ READ counter and second one is of the BYTES _ WRITTEN counter . */ protected static String [ ] getFileSystemCounterNames ( String uriScheme ) { } }
String scheme = uriScheme . toUpperCase ( ) ; return new String [ ] { scheme + "_BYTES_READ" , scheme + "_BYTES_WRITTEN" , scheme + "_FILES_CREATED" , scheme + "_BYTES_READ_LOCAL" , scheme + "_BYTES_READ_RACK" , scheme + "_READ_EXCEPTIONS" , scheme + "_WRITE_EXCEPTIONS" } ;
public class Positions { /** * Positions the owner above the other . * @ param other the other * @ param spacing the spacing * @ return the int supplier */ public static IntSupplier above ( ISized owner , IPositioned other , int spacing ) { } }
checkNotNull ( other ) ; return ( ) -> { return other . position ( ) . y ( ) - owner . size ( ) . height ( ) - spacing ; } ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Location } { @ code > } } */ @ XmlElementDecl ( namespace = PROV_NS , name = "location" ) public JAXBElement < Location > createLocation ( Location value ) { } }
return new JAXBElement < Location > ( _Location_QNAME , Location . class , null , value ) ;
public class JSONNavi { /** * get the current object value as String if the current Object is null * return null . */ public String asString ( ) { } }
if ( current == null ) return null ; if ( current instanceof String ) return ( String ) current ; return current . toString ( ) ;
public class HttpRequest { /** * Get the request port . The port is obtained either from an absolute URI , the HTTP Host header * field , the connection or the default . * @ return The port . 0 should be interpreted as the default port . */ public int getPort ( ) { } }
if ( _port > 0 ) return _port ; if ( _host != null ) return 0 ; if ( _uri . isAbsolute ( ) ) _port = _uri . getPort ( ) ; else if ( _connection != null ) _port = _connection . getServerPort ( ) ; return _port ;
public class HTMLUtils { /** * Remove the first element inside a parent element and copy the element ' s children in the parent . * @ param document the w3c document from which to remove the top level paragraph * @ param parentTagName the name of the parent tag to look under * @ param elementTagName the name of the first element to remove */ public static void stripFirstElementInside ( Document document , String parentTagName , String elementTagName ) { } }
NodeList parentNodes = document . getElementsByTagName ( parentTagName ) ; if ( parentNodes . getLength ( ) > 0 ) { Node parentNode = parentNodes . item ( 0 ) ; // Look for a p element below the first parent element Node pNode = parentNode . getFirstChild ( ) ; if ( elementTagName . equalsIgnoreCase ( pNode . getNodeName ( ) ) ) { // Move all children of p node under the root element NodeList pChildrenNodes = pNode . getChildNodes ( ) ; while ( pChildrenNodes . getLength ( ) > 0 ) { parentNode . insertBefore ( pChildrenNodes . item ( 0 ) , null ) ; } parentNode . removeChild ( pNode ) ; } }
public class ZipUtils { /** * Replaces the specified file in the provided ZIP file with the * provided content . * @ param zip The zip - file to process * @ param file The file to look for * @ param data The string - data to replace * @ param encoding The encoding that should be used when writing the string data to the file * @ throws IOException Thrown if files can not be read or any other error occurs while handling the Zip - files */ public static void replaceInZip ( File zip , String file , String data , String encoding ) throws IOException { } }
// open the output side File zipOutFile = File . createTempFile ( "ZipReplace" , ".zip" ) ; try { FileOutputStream fos = new FileOutputStream ( zipOutFile ) ; try ( ZipOutputStream zos = new ZipOutputStream ( fos ) ) { // open the input side try ( ZipFile zipFile = new ZipFile ( zip ) ) { boolean found = false ; // walk all entries and copy them into the new file Enumeration < ? extends ZipEntry > entries = zipFile . entries ( ) ; while ( entries . hasMoreElements ( ) ) { ZipEntry entry = entries . nextElement ( ) ; try { if ( entry . getName ( ) . equals ( file ) ) { zos . putNextEntry ( new ZipEntry ( entry . getName ( ) ) ) ; IOUtils . write ( data , zos , encoding ) ; found = true ; } else { zos . putNextEntry ( entry ) ; IOUtils . copy ( zipFile . getInputStream ( entry ) , zos ) ; } } finally { zos . closeEntry ( ) ; } } if ( ! found ) { zos . putNextEntry ( new ZipEntry ( file ) ) ; try { IOUtils . write ( data , zos , "UTF-8" ) ; } finally { zos . closeEntry ( ) ; } } } } // copy over the data FileUtils . copyFile ( zipOutFile , zip ) ; } finally { if ( ! zipOutFile . delete ( ) ) { // noinspection ThrowFromFinallyBlock throw new IOException ( "Error deleting file: " + zipOutFile ) ; } }
public class JNDIObjectFactory { /** * The reference target is specified via metatype */ @ Reference ( name = REFERENCE_LIBRARY , service = Library . class ) protected void setLibrary ( ServiceReference < Library > ref ) { } }
libraryRef . setReference ( ref ) ;
public class TaskImpl { /** * 将任务添加到线程池 , 开始执行 * @ return TAG */ @ Override public String start ( ) { } }
if ( mConsumed ) { throw new IllegalStateException ( "task has been executed already" ) ; } if ( Config . DEBUG ) { Log . v ( TAG , "start() " + getName ( ) ) ; } mConsumed = true ; final Runnable runnable = new Runnable ( ) { @ Override public void run ( ) { execute ( ) ; } } ; final long delayMillis = mInfo . delayMillis ; if ( delayMillis > 0 ) { mInfo . handler . postDelayed ( runnable , delayMillis ) ; } else { runnable . run ( ) ; } return getName ( ) ;
public class SimpleXmlWriter { /** * Writes ' > \ n ' . */ public void openElement ( String elementName ) throws IOException { } }
assert ( elementNames . size ( ) > 0 ) ; assert ( elementNames . get ( elementNames . size ( ) - 1 ) . equals ( elementName ) ) ; writer . write ( ">" ) ; if ( indent || noTextElement ) { writer . write ( "\n" ) ; }
public class EvaluationErrorPrinter { /** * Auxiliary method to print expected and predicted samples . * @ param referenceSample * the reference sample * @ param predictedSample * the predicted sample */ private < S > void printSamples ( final S referenceSample , final S predictedSample ) { } }
final String details = "Expected: {\n" + referenceSample + "}\nPredicted: {\n" + predictedSample + "}" ; this . printStream . println ( details ) ;
public class GVRRenderData { /** * Set the face to be culled * @ param cullFace * { @ code GVRCullFaceEnum . Back } Tells Graphics API to discard * back faces , { @ code GVRCullFaceEnum . Front } Tells Graphics API * to discard front faces , { @ code GVRCullFaceEnum . None } Tells * Graphics API to not discard any face * @ param passIndex * The rendering pass to set cull face state */ public GVRRenderData setCullFace ( GVRCullFaceEnum cullFace , int passIndex ) { } }
if ( passIndex < mRenderPassList . size ( ) ) { mRenderPassList . get ( passIndex ) . setCullFace ( cullFace ) ; } else { Log . e ( TAG , "Trying to set cull face to a invalid pass. Pass " + passIndex + " was not created." ) ; } return this ;
public class DataTable { /** * Returns the parameter list of jQuery and other non - standard JS callbacks . If * there ' s no parameter list for a certain event , the default is simply " event " . * @ return A hash map containing the events . May be null . */ @ Override public Map < String , String > getJQueryEventParameterLists ( ) { } }
Map < String , String > result = new HashMap < String , String > ( ) ; result . put ( "select" , "event, datatable, typeOfSelection, indexes" ) ; result . put ( "deselect" , "event, datatable, typeOfSelection, indexes" ) ; return result ;
public class ObjectRange { /** * { @ inheritDoc } */ public void step ( int step , Closure closure ) { } }
if ( step == 0 ) { if ( compareTo ( from , to ) != 0 ) { throw new GroovyRuntimeException ( "Infinite loop detected due to step size of 0" ) ; } else { return ; // from = = to and step = = 0 , nothing to do , so return } } if ( reverse ) { step = - step ; } if ( step > 0 ) { Comparable first = from ; Comparable value = from ; while ( compareTo ( value , to ) <= 0 ) { closure . call ( value ) ; for ( int i = 0 ; i < step ; i ++ ) { value = ( Comparable ) increment ( value ) ; if ( compareTo ( value , first ) <= 0 ) return ; } } } else { step = - step ; Comparable first = to ; Comparable value = to ; while ( compareTo ( value , from ) >= 0 ) { closure . call ( value ) ; for ( int i = 0 ; i < step ; i ++ ) { value = ( Comparable ) decrement ( value ) ; if ( compareTo ( value , first ) >= 0 ) return ; } } }
public class SoftwareModuleSpecification { /** * { @ link Specification } for retrieving { @ link SoftwareModule } s where its * DELETED attribute is false . * @ return the { @ link SoftwareModule } { @ link Specification } */ public static Specification < JpaSoftwareModule > isDeletedFalse ( ) { } }
return ( swRoot , query , cb ) -> cb . equal ( swRoot . < Boolean > get ( JpaSoftwareModule_ . deleted ) , Boolean . FALSE ) ;
public class ProjectApi { /** * Get a list of visible projects owned by the given user in the specified page range . * < pre > < code > GET / users / : user _ id / projects < / code > < / pre > * @ param userIdOrUsername the user ID , username of the user , or a User instance holding the user ID or username * @ param filter the ProjectFilter instance holding the filter values for the query * @ param page the page to get * @ param perPage the number of projects per page * @ return a list of visible projects owned by the given use * @ throws GitLabApiException if any exception occurs */ public List < Project > getUserProjects ( Object userIdOrUsername , ProjectFilter filter , int page , int perPage ) throws GitLabApiException { } }
GitLabApiForm formData = filter . getQueryParams ( page , perPage ) ; Response response = get ( Response . Status . OK , formData . asMap ( ) , "users" , getUserIdOrUsername ( userIdOrUsername ) , "projects" ) ; return ( response . readEntity ( new GenericType < List < Project > > ( ) { } ) ) ;
public class XmlNode { /** * Adds the attribute * @ param _ name * the attribute name * @ param _ value * the attribute name */ public void addAttribute ( final String _name , final String _value ) { } }
this . attributes . put ( _name , new XmlNode ( ) { { this . name = _name ; this . value = _value ; this . valid = true ; this . type = XmlNode . ATTRIBUTE_NODE ; } } ) ;
public class MonetaryFunctions { /** * Returns the smaller of two { @ code MonetaryAmount } values . If the arguments * have the same value , the result is that same value . * @ param a an argument . * @ param b another argument . * @ return the smaller of { @ code a } and { @ code b } . */ static MonetaryAmount min ( MonetaryAmount a , MonetaryAmount b ) { } }
MoneyUtils . checkAmountParameter ( Objects . requireNonNull ( a ) , Objects . requireNonNull ( b . getCurrency ( ) ) ) ; return a . isLessThan ( b ) ? a : b ;
public class AmazonDynamoDBAsyncClient { /** * Retrieves a set of Attributes for an item that matches the primary * key . * The < code > GetItem < / code > operation provides an eventually - consistent * read by default . If eventually - consistent reads are not acceptable for * your application , use < code > ConsistentRead < / code > . Although this * operation might take longer than a standard read , it always returns * the last updated value . * @ param getItemRequest Container for the necessary parameters to * execute the GetItem operation on AmazonDynamoDB . * @ param asyncHandler Asynchronous callback handler for events in the * life - cycle of the request . Users could provide the implementation of * the four callback methods in this interface to process the operation * result or handle the exception . * @ return A Java Future object containing the response from the GetItem * service method , as returned by AmazonDynamoDB . * @ throws AmazonClientException * If any internal errors are encountered inside the client while * attempting to make the request or handle the response . For example * if a network connection is not available . * @ throws AmazonServiceException * If an error response is returned by AmazonDynamoDB indicating * either a problem with the data in the request , or a server side issue . */ public Future < GetItemResult > getItemAsync ( final GetItemRequest getItemRequest , final AsyncHandler < GetItemRequest , GetItemResult > asyncHandler ) throws AmazonServiceException , AmazonClientException { } }
return executorService . submit ( new Callable < GetItemResult > ( ) { public GetItemResult call ( ) throws Exception { GetItemResult result ; try { result = getItem ( getItemRequest ) ; } catch ( Exception ex ) { asyncHandler . onError ( ex ) ; throw ex ; } asyncHandler . onSuccess ( getItemRequest , result ) ; return result ; } } ) ;
public class NodeSequence { /** * Create a batch of nodes around the supplied iterable container . Note that the supplied iterator is accessed lazily only * when the batch is { @ link Batch # nextRow ( ) used } . * @ param keys the iterator over the keys of the nodes to be returned ; if null , an { @ link # emptySequence empty instance } is * returned * @ param score the score to return for all of the nodes * @ param workspaceName the name of the workspace in which all of the nodes exist * @ param repository the repository cache used to access the workspaces and cached nodes ; may be null only if the key sequence * is null or empty * @ return the batch of nodes ; never null */ public static Batch batchOfKeys ( final Collection < NodeKey > keys , final float score , final String workspaceName , final RepositoryCache repository ) { } }
if ( keys == null ) return emptyBatch ( workspaceName , 1 ) ; return batchOfKeys ( keys . iterator ( ) , keys . size ( ) , score , workspaceName , repository ) ;
public class BasicPIDGenerator { /** * Get a reference to the ConnectionPoolManager so we can give the instance * constructor a ConnectionPool later in initializeIfNeeded ( ) . */ @ Override public void postInitModule ( ) throws ModuleInitializationException { } }
ConnectionPoolManager mgr = ( ConnectionPoolManager ) getServer ( ) . getModule ( "org.fcrepo.server.storage.ConnectionPoolManager" ) ; if ( mgr == null ) { throw new ModuleInitializationException ( "ConnectionPoolManager module not loaded." , getRole ( ) ) ; } try { m_pidGenerator = new DBPIDGenerator ( mgr . getPool ( ) , m_oldPidGenDir ) ; } catch ( Exception e ) { String msg = "Can't get default connection pool" ; logger . error ( msg , e ) ; throw new ModuleInitializationException ( msg , getRole ( ) ) ; }
public class FontSpec { /** * Converts the weight value to bold / not bold * @ param weight a CSS weight * @ return true if the given weight corresponds to bold */ public static boolean representsBold ( CSSProperty . FontWeight weight ) { } }
if ( weight == CSSProperty . FontWeight . BOLD || weight == CSSProperty . FontWeight . BOLDER || weight == CSSProperty . FontWeight . numeric_600 || weight == CSSProperty . FontWeight . numeric_700 || weight == CSSProperty . FontWeight . numeric_800 || weight == CSSProperty . FontWeight . numeric_900 ) { return true ; } else return false ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcFireSuppressionTerminalType ( ) { } }
if ( ifcFireSuppressionTerminalTypeEClass == null ) { ifcFireSuppressionTerminalTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 274 ) ; } return ifcFireSuppressionTerminalTypeEClass ;
public class DatabaseStoreService { /** * Declarative Services method for unsetting the data source service reference * @ param ref reference to service object ; type of service object is verified */ protected void unsetDataSourceFactory ( ServiceReference < ResourceFactory > ref ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && LoggingUtil . SESSION_LOGGER_WAS . isLoggable ( Level . FINE ) ) { LoggingUtil . SESSION_LOGGER_WAS . logp ( Level . FINE , methodClassName , "unsetDataSourceFactory" , "unsetting " + ref ) ; } dataSourceFactoryRef . unsetReference ( ref ) ;
public class NearCachePreloader { /** * Loads the values via a stored key file into the supplied { @ link DataStructureAdapter } . * @ param adapter the { @ link DataStructureAdapter } to load the values from */ public void loadKeys ( DataStructureAdapter < Object , ? > adapter ) { } }
if ( ! storeFile . exists ( ) ) { logger . info ( format ( "Skipped loading keys of Near Cache %s since storage file doesn't exist (%s)" , nearCacheName , storeFile . getAbsolutePath ( ) ) ) ; return ; } long startedNanos = System . nanoTime ( ) ; BufferingInputStream bis = null ; try { bis = new BufferingInputStream ( new FileInputStream ( storeFile ) , BUFFER_SIZE ) ; if ( ! checkHeader ( bis ) ) { return ; } int loadedKeys = loadKeySet ( bis , adapter ) ; long elapsedMillis = getElapsedMillis ( startedNanos ) ; logger . info ( format ( "Loaded %d keys of Near Cache %s in %d ms" , loadedKeys , nearCacheName , elapsedMillis ) ) ; } catch ( Exception e ) { logger . warning ( format ( "Could not pre-load Near Cache %s (%s)" , nearCacheName , storeFile . getAbsolutePath ( ) ) , e ) ; } finally { closeResource ( bis ) ; }
public class BpmnParse { /** * Parses a receive task . */ public ActivityImpl parseReceiveTask ( Element receiveTaskElement , ScopeImpl scope ) { } }
ActivityImpl activity = createActivityOnScope ( receiveTaskElement , scope ) ; activity . setActivityBehavior ( new ReceiveTaskActivityBehavior ( ) ) ; parseAsynchronousContinuationForActivity ( receiveTaskElement , activity ) ; parseExecutionListenersOnScope ( receiveTaskElement , activity ) ; if ( receiveTaskElement . attribute ( "messageRef" ) != null ) { activity . setScope ( true ) ; activity . setEventScope ( activity ) ; EventSubscriptionDeclaration declaration = parseMessageEventDefinition ( receiveTaskElement ) ; declaration . setActivityId ( activity . getActivityId ( ) ) ; declaration . setEventScopeActivityId ( activity . getActivityId ( ) ) ; addEventSubscriptionDeclaration ( declaration , activity , receiveTaskElement ) ; } for ( BpmnParseListener parseListener : parseListeners ) { parseListener . parseReceiveTask ( receiveTaskElement , scope , activity ) ; } return activity ;
public class SmartShareActionProvider { /** * { @ inheritDoc } */ @ Override public View onCreateActionView ( ) { } }
// Create the view and set its data model . SmartActivityChooserModel dataModel = SmartActivityChooserModel . get ( mContext , mShareHistoryFileName ) ; SmartActivityChooserView activityChooserView = new SmartActivityChooserView ( mContext ) ; activityChooserView . setActivityChooserModel ( dataModel ) ; // Lookup and set the expand action icon . // TypedValue outTypedValue = new TypedValue ( ) ; // mContext . getTheme ( ) . resolveAttribute ( R . attr . actionModeShareDrawable , outTypedValue , true ) ; // Drawable drawable = TintManager . getDrawable ( mContext , outTypedValue . resourceId ) ; final Drawable drawable = mDrawable == null ? mContext . getResources ( ) . getDrawable ( mDrawableResId ) : mDrawable ; activityChooserView . setExpandActivityOverflowButtonDrawable ( drawable ) ; activityChooserView . setProvider ( this ) ; // Set content description . activityChooserView . setDefaultActionButtonContentDescription ( R . string . abc_shareactionprovider_share_with_application ) ; activityChooserView . setExpandActivityOverflowButtonContentDescription ( R . string . abc_shareactionprovider_share_with ) ; return activityChooserView ;
public class MavenModelScannerPlugin { /** * Adds information about execution goals . * @ param executionDescriptor * The descriptor for the execution . * @ param pluginExecution * The PluginExecution . * @ param store * The database . */ private void addExecutionGoals ( MavenPluginExecutionDescriptor executionDescriptor , PluginExecution pluginExecution , Store store ) { } }
List < String > goals = pluginExecution . getGoals ( ) ; for ( String goal : goals ) { MavenExecutionGoalDescriptor goalDescriptor = store . create ( MavenExecutionGoalDescriptor . class ) ; goalDescriptor . setName ( goal ) ; executionDescriptor . getGoals ( ) . add ( goalDescriptor ) ; }
public class IntSets { /** * Returns an IntSet based on the ints in the iterator . This method will try to return the most performant IntSet * based on what ints are provided if any . The returned IntSet may or may not be immutable , so no guarantees are * provided from that respect . * @ param iterator values set in the returned set * @ return IntSet with all the values set that the iterator had */ public static IntSet from ( PrimitiveIterator . OfInt iterator ) { } }
boolean hasNext = iterator . hasNext ( ) ; if ( ! hasNext ) { return EmptyIntSet . getInstance ( ) ; } int firstValue = iterator . nextInt ( ) ; hasNext = iterator . hasNext ( ) ; if ( ! hasNext ) { return new SingletonIntSet ( firstValue ) ; } // We have 2 or more values so just set them in the SmallIntSet SmallIntSet set = new SmallIntSet ( ) ; set . set ( firstValue ) ; iterator . forEachRemaining ( ( IntConsumer ) set :: set ) ; return set ;
public class DynamoDBTableMapper { /** * Loads an object with the hash and range key . * @ param hashKey The hash key value . * @ param rangeKey The range key value . * @ return The object . * @ see com . amazonaws . services . dynamodbv2 . datamodeling . DynamoDBMapper # load */ public T load ( H hashKey , R rangeKey ) { } }
return mapper . < T > load ( model . targetType ( ) , hashKey , rangeKey ) ;
public class CmsScheduleManager { /** * Initializes the OpenCms scheduler . < p > * @ param adminCms an OpenCms context object that must have been initialized with " Admin " permissions * @ throws CmsRoleViolationException if the user has insufficient role permissions */ public synchronized void initialize ( CmsObject adminCms ) throws CmsRoleViolationException { } }
if ( OpenCms . getRunLevel ( ) > OpenCms . RUNLEVEL_1_CORE_OBJECT ) { // simple unit tests will have runlevel 1 and no CmsObject OpenCms . getRoleManager ( ) . checkRole ( adminCms , CmsRole . WORKPLACE_MANAGER ) ; } // the list of job entries m_jobs = new ArrayList < CmsScheduledJobInfo > ( ) ; // save the admin cms m_adminCms = adminCms ; // Quartz scheduler settings Properties properties = new Properties ( ) ; properties . put ( StdSchedulerFactory . PROP_SCHED_INSTANCE_NAME , "OpenCmsScheduler" ) ; properties . put ( StdSchedulerFactory . PROP_SCHED_THREAD_NAME , "OpenCms: Scheduler" ) ; properties . put ( StdSchedulerFactory . PROP_SCHED_RMI_EXPORT , CmsStringUtil . FALSE ) ; properties . put ( StdSchedulerFactory . PROP_SCHED_RMI_PROXY , CmsStringUtil . FALSE ) ; properties . put ( StdSchedulerFactory . PROP_THREAD_POOL_CLASS , CmsSchedulerThreadPool . class . getName ( ) ) ; properties . put ( StdSchedulerFactory . PROP_JOB_STORE_CLASS , "org.quartz.simpl.RAMJobStore" ) ; // this will be required in quartz versions from 1.6 , but constants are not supported in earlier versions properties . put ( "org.quartz.scheduler.jmx.export" , CmsStringUtil . FALSE ) ; properties . put ( "org.quartz.scheduler.jmx.proxy" , CmsStringUtil . FALSE ) ; try { // initialize the Quartz scheduler SchedulerFactory schedulerFactory = new StdSchedulerFactory ( properties ) ; m_scheduler = schedulerFactory . getScheduler ( ) ; } catch ( Exception e ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_NO_SCHEDULER_0 ) , e ) ; // can not continue m_scheduler = null ; return ; } if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SCHEDULER_INITIALIZED_0 ) ) ; } if ( m_configuredJobs != null ) { // add all jobs from the system configuration for ( int i = 0 ; i < m_configuredJobs . size ( ) ; i ++ ) { try { CmsScheduledJobInfo job = m_configuredJobs . get ( i ) ; scheduleJob ( adminCms , job ) ; } catch ( CmsSchedulerException e ) { // ignore this job , but keep scheduling the other jobs // note : the log is has already been written } } } try { // start the scheduler m_scheduler . start ( ) ; } catch ( Exception e ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_CANNOT_START_SCHEDULER_0 ) , e ) ; // can not continue m_scheduler = null ; return ; } if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SCHEDULER_STARTED_0 ) ) ; CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SCHEDULER_CONFIG_FINISHED_0 ) ) ; }
public class DB { /** * This factory method is the mechanism for constructing a new embedded database for use . This * method automatically installs the database and prepares it for use . * @ param config Configuration of the embedded instance * @ return a new DB instance * @ throws ManagedProcessException if something fatal went wrong */ public static DB newEmbeddedDB ( DBConfiguration config ) throws ManagedProcessException { } }
DB db = new DB ( config ) ; db . prepareDirectories ( ) ; db . unpackEmbeddedDb ( ) ; db . install ( ) ; return db ;
public class TEEJBInvocationInfo { /** * This is called by the EJB container server code to write a * EJB method call preinvoke exceptions record to the trace log , if enabled . */ public static void tracePreInvokeException ( EJSDeployedSupport s , EJSWrapperBase wrapper , Throwable t ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { StringBuffer sbuf = new StringBuffer ( ) ; sbuf . append ( MthdPreInvokeException_Type_Str ) . append ( DataDelimiter ) . append ( MthdPreInvokeException_Type ) . append ( DataDelimiter ) ; writeDeployedSupportInfo ( s , sbuf , wrapper , t ) ; Tr . debug ( tc , sbuf . toString ( ) ) ; }
public class ChunkFrequencyManager { /** * Aggregation function to compute the addition for the chunk values . * @ return */ public BiFunction < Integer , Integer , Short > addition ( ) { } }
return ( a , b ) -> ( short ) Math . min ( a , 255 ) ;
public class TryCatchBlockNode { /** * Updates the index of this try catch block in the method ' s list of try * catch block nodes . This index maybe stored in the ' target ' field of the * type annotations of this block . * @ param index * the new index of this try catch block in the method ' s list of * try catch block nodes . */ public void updateIndex ( final int index ) { } }
int newTypeRef = 0x42000000 | ( index << 8 ) ; if ( visibleTypeAnnotations != null ) { for ( TypeAnnotationNode tan : visibleTypeAnnotations ) { tan . typeRef = newTypeRef ; } } if ( invisibleTypeAnnotations != null ) { for ( TypeAnnotationNode tan : invisibleTypeAnnotations ) { tan . typeRef = newTypeRef ; } }
public class TargetStreamManager { /** * Flush any existing streams and throw away any cached messages . * @ param streamID * @ throws SIResourceException * @ throws SIException */ public void forceFlush ( SIBUuid12 streamID ) throws SIResourceException , SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "forceFlush" , new Object [ ] { streamID } ) ; // Synchronize to resolve racing messages . synchronized ( flushMap ) { FlushQueryRecord entry = flushMap . remove ( streamID ) ; // Remove the entry ( we may not have even had // one ) , then clean up any existing stream state . Also , make // sure we turn off the alarm if there IS an entry . Note that // an alarm will always be present if an entry exists . if ( entry != null ) entry . resend . cancel ( ) ; flush ( streamID ) ; } // If all the inbound stream sets are empty , queue the destination that the // inbound streams are for to the asynch deletion thread , incase any cleanup // of the destination is required . If not , the asynch deletion thread will do // nothing . // if ( isEmpty ( ) ) // DestinationManager destinationManager = messageProcessor . getDestinationManager ( ) ; // BaseDestinationHandler destinationHandler = protocolItemStream . getDestinationHandler ( ) ; // destinationManager . markDestinationAsCleanUpPending ( destinationHandler ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "forceFlush" ) ;
public class UserSettingRepository { /** * region > newString */ @ Programmatic public UserSettingJdo newString ( final String user , final String key , final String description , final String value ) { } }
return newSetting ( user , key , description , SettingType . STRING , value ) ;
public class SagaMessageStream { /** * { @ inheritDoc } */ @ Override public void handle ( @ Nonnull final Object message ) throws InvocationTargetException , IllegalAccessException { } }
checkNotNull ( message , "Message to handle must not be null." ) ; handle ( message , null , null ) ;
public class DataManager { /** * Log basic information about initialisation environment . */ private void logInfo ( @ NonNull final Logger log ) { } }
log . i ( "App ver. = " + deviceDAO . device ( ) . getAppVer ( ) ) ; log . i ( "Comapi device ID = " + deviceDAO . device ( ) . getDeviceId ( ) ) ; log . d ( "Firebase ID = " + deviceDAO . device ( ) . getInstanceId ( ) ) ;
public class SearchIO { /** * Guess factory class to be used using file extension . * It can be used both for read and for in write . * To be ResultFactory classes automatically available to this subsystem * they must be listed in the file org . biojava . nbio . core . search . io . ResultFactory * located in src / main / resources * @ param f : file . Its last extension ( text after last dot ) will be compared * to default extensions of known ResultFactory implementing classes * @ return the guessed factory */ private ResultFactory guessFactory ( File f ) { } }
if ( extensionFactoryAssociation == null ) { extensionFactoryAssociation = new HashMap < String , ResultFactory > ( ) ; ServiceLoader < ResultFactory > impl = ServiceLoader . load ( ResultFactory . class ) ; for ( ResultFactory loadedImpl : impl ) { List < String > fileExtensions = loadedImpl . getFileExtensions ( ) ; for ( String ext : fileExtensions ) extensionFactoryAssociation . put ( ext , loadedImpl ) ; } } String filename = f . getAbsolutePath ( ) ; int extensionPos = filename . lastIndexOf ( "." ) ; String extension = filename . substring ( extensionPos + 1 ) ; if ( extensionFactoryAssociation . get ( extension ) == null ) throw new UnsupportedOperationException ( NOT_SUPPORTED_FILE_EXCEPTION + "\nExtension:" + extension ) ; return extensionFactoryAssociation . get ( extension ) ; } public double getEvalueThreshold ( ) { return evalueThreshold ; } @ Override public Iterator < Result > iterator ( ) { return new Iterator < Result > ( ) { int currentResult = 0 ; @ Override public boolean hasNext ( ) { return currentResult < results . size ( ) ; } @ Override public Result next ( ) { if ( ! hasNext ( ) ) { throw new NoSuchElementException ( ) ; } return results . get ( currentResult ++ ) ; } @ Override public void remove ( ) { throw new UnsupportedOperationException ( "The remove operation is not supported by this iterator" ) ; } } ; }