signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DefaultTextBundleRegistry { /** * Load java properties bundle with iso - 8859-1 * @ param bundleName * @ param locale * @ return None or bundle corresponding bindleName . locale . properties */ protected Option < TextBundle > loadJavaBundle ( String bundleName , Locale locale ) { } }
Properties properties = new Properties ( ) ; String resource = toJavaResourceName ( bundleName , locale ) ; try { InputStream is = ClassLoaders . getResourceAsStream ( resource , getClass ( ) ) ; if ( null == is ) return Option . none ( ) ; properties . load ( is ) ; is . close ( ) ; } catch ( IOException e ) { return Option . none ( ) ; } finally { } return Option . < TextBundle > from ( new DefaultTextBundle ( locale , resource , properties ) ) ;
import java . util . ArrayList ; import java . util . Arrays ; import java . util . List ; class ModifyTuple { /** * This function modifies a tuple ( represented as a List ) by inserting a value into a list at a specific index . * Example : * modifyTuple ( Arrays . asList ( " HELLO " , 5 , new ArrayList < > ( ) , true ) , 2 , 50 ) - > [ " HELLO " , 5 , [ 50 ] , true ] * modifyTuple ( Arrays . asList ( " HELLO " , 5 , new ArrayList < > ( ) , true ) , 2 , 100 ) - > [ " HELLO " , 5 , [ 100 ] , true ] * modifyTuple ( Arrays . asList ( " HELLO " , 5 , new ArrayList < > ( ) , true ) , 2 , 500 ) - > [ " HELLO " , 5 , [ 500 ] , true ] * @ param inputList list that represents a tuple * @ param index position of the element to modify * @ param value value to add to the list at specified index * @ return a new list that is a modified copy of the original list , to mimic the behavior of tuples in Python */ public static List < Object > modifyTuple ( List < Object > inputList , int index , int value ) { } }
List < Object > newList = new ArrayList < > ( inputList ) ; List < Integer > sublist = new ArrayList < > ( ( List < Integer > ) newList . get ( index ) ) ; sublist . add ( value ) ; newList . set ( index , sublist ) ; return newList ;
public class EntityGroupImpl { /** * Adds the < code > IGroupMember < / code > key to the appropriate member key cache by copying the * cache , adding to the copy , and then replacing the original with the copy . At this point , * < code > gm < / code > does not yet have < code > this < / code > in its containing group cache . That cache * entry is not added until update ( ) , when changes are committed to the store . * @ param gm org . apereo . portal . groups . IGroupMember */ private void primAddMember ( IGroupMember gm ) throws GroupsException { } }
final EntityIdentifier cacheKey = getUnderlyingEntityIdentifier ( ) ; Element element = childrenCache . get ( cacheKey ) ; @ SuppressWarnings ( "unchecked" ) final Set < IGroupMember > set = element != null ? ( Set < IGroupMember > ) element . getObjectValue ( ) : buildChildrenSet ( ) ; final Set < IGroupMember > children = new HashSet < > ( set ) ; children . add ( gm ) ; childrenCache . put ( new Element ( cacheKey , children ) ) ;
public class PrintablePage { /** * Default configuration for Month view in the preview Pane . * @ return */ protected MonthView createMonthView ( ) { } }
MonthView newMonthView = new MonthView ( ) ; newMonthView . setShowToday ( false ) ; newMonthView . setShowCurrentWeek ( false ) ; newMonthView . weekFieldsProperty ( ) . bind ( weekFieldsProperty ( ) ) ; newMonthView . showFullDayEntriesProperty ( ) . bind ( showAllDayEntriesProperty ( ) ) ; newMonthView . showTimedEntriesProperty ( ) . bind ( showTimedEntriesProperty ( ) ) ; newMonthView . addEventFilter ( MouseEvent . ANY , weakMouseHandler ) ; newMonthView . dateProperty ( ) . bind ( pageStartDateProperty ( ) ) ; return newMonthView ;
public class LoggingUtils { /** * Prepends short session details ( result of getId ) for the session in square brackets to the message . * @ param message the message to be logged * @ param session an instance of IoSessionEx * @ return example : " [ wsn # 34 127.0.0.0.1:41234 ] this is the log message " */ public static String addSession ( String message , IoSession session ) { } }
return format ( "[%s] %s" , getId ( session ) , message ) ;
public class CommercePriceListUtil { /** * Returns the commerce price list where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found , optionally using the finder cache . * @ param uuid the uuid * @ param groupId the group ID * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the matching commerce price list , or < code > null < / code > if a matching commerce price list could not be found */ public static CommercePriceList fetchByUUID_G ( String uuid , long groupId , boolean retrieveFromCache ) { } }
return getPersistence ( ) . fetchByUUID_G ( uuid , groupId , retrieveFromCache ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcFeatureElementAddition ( ) { } }
if ( ifcFeatureElementAdditionEClass == null ) { ifcFeatureElementAdditionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 232 ) ; } return ifcFeatureElementAdditionEClass ;
public class Rss2Parser { /** * Handles a node from the tag node and assigns it to the correct article value . * @ param tag The tag which to handle . * @ param article Article object to assign the node value to . * @ return True if a proper tag was given or handled . False if improper tag was given or * if an exception if triggered . */ private boolean handleNode ( String tag , Article article ) { } }
try { if ( xmlParser . next ( ) != XmlPullParser . TEXT ) return false ; if ( tag . equalsIgnoreCase ( "link" ) ) article . setSource ( Uri . parse ( xmlParser . getText ( ) ) ) ; else if ( tag . equalsIgnoreCase ( "title" ) ) article . setTitle ( xmlParser . getText ( ) ) ; else if ( tag . equalsIgnoreCase ( "description" ) ) { String encoded = xmlParser . getText ( ) ; article . setImage ( Uri . parse ( pullImageLink ( encoded ) ) ) ; article . setDescription ( Html . fromHtml ( encoded . replaceAll ( "<img.+?>" , "" ) ) . toString ( ) ) ; } else if ( tag . equalsIgnoreCase ( "content:encoded" ) ) article . setContent ( xmlParser . getText ( ) . replaceAll ( "[<](/)?div[^>]*[>]" , "" ) ) ; else if ( tag . equalsIgnoreCase ( "wfw:commentRss" ) ) article . setComments ( xmlParser . getText ( ) ) ; else if ( tag . equalsIgnoreCase ( "category" ) ) article . setNewTag ( xmlParser . getText ( ) ) ; else if ( tag . equalsIgnoreCase ( "dc:creator" ) ) article . setAuthor ( xmlParser . getText ( ) ) ; else if ( tag . equalsIgnoreCase ( "pubDate" ) ) { article . setDate ( getParsedDate ( xmlParser . getText ( ) ) ) ; } return true ; } catch ( IOException e ) { e . printStackTrace ( ) ; return false ; } catch ( XmlPullParserException e ) { e . printStackTrace ( ) ; return false ; }
public class Searcher { /** * Searches the pattern starting from the given element . * @ param ele element to start from * @ param pattern pattern to search * @ return matching results */ public static List < Match > search ( BioPAXElement ele , Pattern pattern ) { } }
assert pattern . getStartingClass ( ) . isAssignableFrom ( ele . getModelInterface ( ) ) ; Match m = new Match ( pattern . size ( ) ) ; m . set ( ele , 0 ) ; return search ( m , pattern ) ;
public class BackupResourceStorageConfigsInner { /** * Updates vault storage model type . * @ param vaultName The name of the recovery services vault . * @ param resourceGroupName The name of the resource group where the recovery services vault is present . * @ param parameters Vault storage config request * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > updateAsync ( String vaultName , String resourceGroupName , BackupResourceConfigResourceInner parameters , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( vaultName , resourceGroupName , parameters ) , serviceCallback ) ;
public class ANXAdapters { /** * Adapts a ANX Wallet to a XChange Balance * @ param anxWallet * @ return */ public static Balance adaptBalance ( ANXWallet anxWallet ) { } }
if ( anxWallet == null ) { // use the presence of a currency String to indicate existing wallet at ANX return null ; // an account maybe doesn ' t contain a ANXWallet } else { return new Balance ( Currency . getInstance ( anxWallet . getBalance ( ) . getCurrency ( ) ) , anxWallet . getBalance ( ) . getValue ( ) , anxWallet . getAvailableBalance ( ) . getValue ( ) ) ; }
public class ParameterizedTypeName { /** * Returns a new { @ link ParameterizedTypeName } instance for the specified { @ code name } as nested * inside this class , with the specified { @ code typeArguments } . */ public ParameterizedTypeName nestedClass ( String name , List < TypeName > typeArguments ) { } }
checkNotNull ( name , "name == null" ) ; return new ParameterizedTypeName ( this , rawType . nestedClass ( name ) , typeArguments , new ArrayList < > ( ) ) ;
public class LogManager { /** * Sets the level of a logger * @ param logger The name of the logger to set the level for . * @ param level The level to set the logger at */ public void setLevel ( String logger , Level level ) { } }
Logger log = getLogger ( logger ) ; log . setLevel ( level ) ; for ( String loggerName : getLoggerNames ( ) ) { if ( loggerName . startsWith ( logger ) && ! loggerName . equals ( logger ) ) { getLogger ( loggerName ) . setLevel ( level ) ; } }
public class Format { /** * formats map using ' : ' as key - value separator instead of default ' = ' */ public static < V > String formatPairs ( Map < String , V > entries ) { } }
Iterator < Entry < String , V > > iterator = entries . entrySet ( ) . iterator ( ) ; switch ( entries . size ( ) ) { case 0 : return "{}" ; case 1 : { return String . format ( "{%s}" , keyValueString ( iterator . next ( ) ) ) ; } default : { StringBuilder builder = new StringBuilder ( ) ; builder . append ( "{" ) ; builder . append ( keyValueString ( iterator . next ( ) ) ) ; while ( iterator . hasNext ( ) ) { builder . append ( ',' ) ; builder . append ( ' ' ) ; builder . append ( keyValueString ( iterator . next ( ) ) ) ; } builder . append ( "}" ) ; return builder . toString ( ) ; } }
public class Type { /** * Creates a new instance of { @ code Package } when the given name is not empty other wise it returns * { @ link Package # UNDEFINED } . * @ param packageName * package name * @ return instance of { @ code Package } and never null */ @ Nonnull private static Package createPackage ( @ Nonnull final String packageName ) { } }
Check . notNull ( packageName , "packageName" ) ; return packageName . isEmpty ( ) ? Package . UNDEFINED : new Package ( packageName ) ;
public class InteractableLookupMap { /** * Avoid code duplication in above two methods */ private Interactable findNextLeftOrRight ( Interactable interactable , boolean isRight ) { } }
int directionTerm = isRight ? 1 : - 1 ; TerminalPosition startPosition = interactable . getCursorLocation ( ) ; if ( startPosition == null ) { // If the currently active interactable component is not showing the cursor , use the top - left position // instead if we ' re going left , or the top - right position if we ' re going right if ( isRight ) { startPosition = new TerminalPosition ( interactable . getSize ( ) . getColumns ( ) - 1 , 0 ) ; } else { startPosition = TerminalPosition . TOP_LEFT_CORNER ; } } else { // Adjust position so that it ' s on the left - most side if we ' re going left or right - most side if we ' re going // right . Otherwise the lookup might product odd results in certain cases if ( isRight ) { startPosition = startPosition . withColumn ( interactable . getSize ( ) . getColumns ( ) - 1 ) ; } else { startPosition = startPosition . withColumn ( 0 ) ; } } startPosition = interactable . toBasePane ( startPosition ) ; if ( startPosition == null ) { // The structure has changed , our interactable is no longer inside the base pane ! return null ; } Set < Interactable > disqualified = getDisqualifiedInteractables ( startPosition , false ) ; TerminalSize size = getSize ( ) ; int maxShiftUp = interactable . toBasePane ( TerminalPosition . TOP_LEFT_CORNER ) . getRow ( ) ; maxShiftUp = Math . max ( maxShiftUp , 0 ) ; int maxShiftDown = interactable . toBasePane ( new TerminalPosition ( 0 , interactable . getSize ( ) . getRows ( ) - 1 ) ) . getRow ( ) ; maxShiftDown = Math . min ( maxShiftDown , size . getRows ( ) - 1 ) ; int maxShift = Math . max ( startPosition . getRow ( ) - maxShiftUp , maxShiftDown - startPosition . getRow ( ) ) ; for ( int searchColumn = startPosition . getColumn ( ) + directionTerm ; searchColumn >= 0 && searchColumn < size . getColumns ( ) ; searchColumn += directionTerm ) { for ( int yShift = 0 ; yShift <= maxShift ; yShift ++ ) { for ( int modifier : new int [ ] { 1 , - 1 } ) { if ( yShift == 0 && modifier == - 1 ) { break ; } int searchRow = startPosition . getRow ( ) + ( yShift * modifier ) ; if ( searchRow < maxShiftUp || searchRow > maxShiftDown ) { continue ; } int index = lookupMap [ searchRow ] [ searchColumn ] ; if ( index != - 1 && ! disqualified . contains ( interactables . get ( index ) ) ) { return interactables . get ( index ) ; } } } } return null ;
public class XPathFactoryFinder { /** * < p > Creates a new { @ link XPathFactory } object for the specified * schema language . < / p > * @ param uri * Identifies the underlying object model . * @ return < code > null < / code > if the callee fails to create one . * @ throws NullPointerException * If the parameter is null . */ public XPathFactory newFactory ( String uri ) { } }
if ( uri == null ) { throw new NullPointerException ( "uri == null" ) ; } XPathFactory f = _newFactory ( uri ) ; if ( debug ) { if ( f != null ) { debugPrintln ( "factory '" + f . getClass ( ) . getName ( ) + "' was found for " + uri ) ; } else { debugPrintln ( "unable to find a factory for " + uri ) ; } } return f ;
public class PersistenceBrokerThreadMapping { /** * Return the current open { @ link org . apache . ojb . broker . PersistenceBroker } * instance for the given { @ link org . apache . ojb . broker . PBKey } , if any . * @ param key * @ return null if no open { @ link org . apache . ojb . broker . PersistenceBroker } found . */ public static PersistenceBrokerInternal currentPersistenceBroker ( PBKey key ) throws PBFactoryException , PersistenceBrokerException { } }
HashMap map = ( HashMap ) currentBrokerMap . get ( ) ; WeakHashMap set ; PersistenceBrokerInternal broker = null ; if ( map == null ) { return null ; } set = ( WeakHashMap ) map . get ( key ) ; if ( set == null ) { return null ; } // seek for an open broker , preferably in transaction for ( Iterator it = set . keySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { PersistenceBrokerInternal tmp = ( PersistenceBrokerInternal ) it . next ( ) ; if ( tmp == null || tmp . isClosed ( ) ) { it . remove ( ) ; continue ; } broker = tmp ; if ( tmp . isInTransaction ( ) ) { break ; // the best choice found } } return broker ;
public class DatamappingHelper { /** * Translate annotations ( only when { @ link Datamappingstype # isUseannotations ( ) } is true ) and xml configuration into * { @ link DataMapping } for use in a { @ link DatamappingProcessor } , uses instance cache . XML config will override * Annotations on classes . * @ param dataClass * @ param id an optional id to find in xml configuration * @ param datamappingstype may be null if no xml config is used * @ see # fromXML ( java . io . Reader ) * @ return */ public final DataMapping toDataConfig ( Class dataClass , String id , Datamappingstype datamappingstype ) throws ClassNotFoundException { } }
if ( ! dataMappings . containsKey ( dataClass . getName ( ) + id ) ) { boolean useAnnotations = datamappingstype == null || datamappingstype . isUseannotations ( ) ; Datamappingtype jaxbMapping = ( datamappingstype != null ) ? findDataMapping ( dataClass , id , datamappingstype ) : null ; DataMapping dm = new DataMapping ( ) ; dataMappings . put ( dataClass . getName ( ) + id , dm ) ; if ( jaxbMapping != null && jaxbMapping . getStartcontainer ( ) != null && jaxbMapping . getStartcontainer ( ) . size ( ) > 0 ) { dm . setId ( jaxbMapping . getId ( ) ) ; for ( Startcontainertype cs : jaxbMapping . getStartcontainer ( ) ) { dm . addStartcontainer ( ( StartContainerConfig ) new StartContainerConfig ( ) . setAdddata ( cs . isAdddata ( ) ) . setContainertype ( CONTAINER_ELEMENT . valueOf ( cs . getContainertype ( ) . name ( ) ) ) . setContainertypemethod ( cs . getContainertypemethod ( ) ) . setSectionlevel ( cs . getSectionlevel ( ) . intValue ( ) ) . addStyleClasses ( cs . getStyleclass ( ) . toArray ( new String [ cs . getStyleclass ( ) . size ( ) ] ) ) . setStyleclassesmethod ( cs . getStyleclassesmethod ( ) ) . setValueasstringmethod ( cs . getValueasstringmethod ( ) ) ) ; } } if ( useAnnotations && dm . getStartcontainer ( ) . isEmpty ( ) ) { List < StartContainerConfig > scc = getContainers ( dataClass ) ; if ( scc != null ) { for ( StartContainerConfig s : scc ) { dm . addStartcontainer ( s ) ; } } } if ( jaxbMapping != null && jaxbMapping . getElement ( ) != null && jaxbMapping . getElement ( ) . size ( ) > 0 ) { for ( Elementtype e : jaxbMapping . getElement ( ) ) { dm . addElement ( fromJaxb ( e ) ) ; } } if ( useAnnotations && dm . getElement ( ) . isEmpty ( ) ) { List < ElementConfig > ec = getElements ( dataClass ) ; if ( ec != null ) { for ( ElementConfig e : ec ) { dm . addElement ( e ) ; } } } if ( jaxbMapping != null && jaxbMapping . getElementsfromdata ( ) != null ) { dm . setElementsfromdata ( new ElementsFromData ( ) . setElement ( fromJaxb ( jaxbMapping . getElementsfromdata ( ) . getElement ( ) ) ) . setDatalistmethod ( jaxbMapping . getElementsfromdata ( ) . getDatalistmethod ( ) ) ) ; } if ( useAnnotations && dm . getElementsfromdata ( ) == null ) { if ( cacheM . containsKey ( dataClass ) ) { dm . setElementsfromdata ( cacheM . get ( dataClass ) ) ; } else { MultipleFromData mfd = ( MultipleFromData ) dataClass . getAnnotation ( MultipleFromData . class ) ; if ( mfd != null ) { dm . setElementsfromdata ( new ElementsFromData ( ) . setDatalistmethod ( mfd . dataListMethod ( ) ) . setElement ( fromAnnotation ( mfd . element ( ) ) ) ) ; cacheM . put ( dataClass , dm . getElementsfromdata ( ) ) ; } } } if ( jaxbMapping != null && jaxbMapping . getEndcontainer ( ) != null ) { if ( logger . isLoggable ( Level . FINE ) ) { logger . fine ( String . format ( "using xml configuration for end of container of %s " , dataClass . getName ( ) ) ) ; } dm . setEndcontainer ( new EndContainerConfig ( ) . setContainertype ( CONTAINER_ELEMENT . valueOf ( jaxbMapping . getEndcontainer ( ) . getContainertype ( ) . name ( ) ) ) . setDepthtoend ( jaxbMapping . getEndcontainer ( ) . getDepthtoend ( ) . intValue ( ) ) ) ; } if ( useAnnotations && dm . getEndcontainer ( ) == null ) { if ( cacheECC . containsKey ( dataClass ) ) { dm . setEndcontainer ( cacheECC . get ( dataClass ) ) ; } else { ContainerEnd endAnnotation = ( ContainerEnd ) dataClass . getAnnotation ( com . vectorprint . report . itext . annotations . ContainerEnd . class ) ; if ( endAnnotation != null ) { if ( logger . isLoggable ( Level . FINE ) ) { logger . fine ( String . format ( "using annotation for end of container of %s " , dataClass . getName ( ) ) ) ; } dm . setEndcontainer ( new EndContainerConfig ( ) . setContainertype ( endAnnotation . containerType ( ) ) . setDepthtoend ( endAnnotation . depthToEnd ( ) ) ) ; cacheECC . put ( dataClass , dm . getEndcontainer ( ) ) ; } } } } return dataMappings . get ( dataClass . getName ( ) + id ) ;
public class ExportNamesCacheKeyGenerator { /** * / * ( non - Javadoc ) * @ see com . ibm . jaggr . core . cachekeygenerator . AbstractCacheKeyGenerator # generateKey ( javax . servlet . http . HttpServletRequest ) */ @ Override public String generateKey ( HttpServletRequest request ) { } }
boolean exportNames = TypeUtil . asBoolean ( request . getAttribute ( IHttpTransport . EXPORTMODULENAMES_REQATTRNAME ) ) ; return eyecatcher + ":" + ( exportNames ? "1" : "0" ) ; // $ NON - NLS - 1 $ / / $ NON - NLS - 2 $ / / $ NON - NLS - 3 $
public class GenericDraweeHierarchy { /** * Sets the actual image focus point . */ public void setActualImageFocusPoint ( PointF focusPoint ) { } }
Preconditions . checkNotNull ( focusPoint ) ; getScaleTypeDrawableAtIndex ( ACTUAL_IMAGE_INDEX ) . setFocusPoint ( focusPoint ) ;
public class DescribeImagesResult { /** * A list of < a > ImageDetail < / a > objects that contain data about the image . * @ param imageDetails * A list of < a > ImageDetail < / a > objects that contain data about the image . */ public void setImageDetails ( java . util . Collection < ImageDetail > imageDetails ) { } }
if ( imageDetails == null ) { this . imageDetails = null ; return ; } this . imageDetails = new java . util . ArrayList < ImageDetail > ( imageDetails ) ;
public class KeyManagerImpl { /** * { @ inheritDoc } */ @ Override public String getGeocodingKey ( ) { } }
if ( geocodingKey != null ) { return geocodingKey ; } try ( FileInputStream fileStream = new FileInputStream ( fileName ) ; InputStreamReader iStreamReader = new InputStreamReader ( fileStream , "UTF-8" ) ; BufferedReader br = new BufferedReader ( iStreamReader ) ; ) { final StringBuilder sb = new StringBuilder ( ) ; final String line = br . readLine ( ) ; if ( line != null ) { sb . append ( line . replace ( "\n" , "" ) ) ; } geocodingKey = sb . toString ( ) ; return geocodingKey ; } catch ( IOException e ) { throw new GeoCodeRuntimeException ( "Couldn't open key file: " + fileName , e ) ; }
public class ProxySettingsManager { /** * Find all proxy settings matching the provided parameters . * @ param sProtocol * Destination server protocol . * @ param sHostName * Destination host name * @ param nPort * Destination port * @ return A non - < code > null < / code > set with all matching proxy settings . A set * is used to avoid that the same settings are used more than once . */ @ Nonnull @ ReturnsMutableCopy public static ICommonsOrderedSet < IProxySettings > findAllProxySettings ( @ Nullable final String sProtocol , @ Nullable final String sHostName , @ CheckForSigned final int nPort ) { } }
final ICommonsOrderedSet < IProxySettings > ret = new CommonsLinkedHashSet < > ( ) ; for ( final IProxySettingsProvider aProvider : getAllProviders ( ) ) ret . addAll ( aProvider . getAllProxySettings ( sProtocol , sHostName , nPort ) ) ; return ret ;
public class TextUtilities { /** * Locates the end of the word at the specified position . * @ param line The text * @ param pos The position * @ param noWordSep Characters that are non - alphanumeric , but * should be treated as word characters anyway * @ param joinNonWordChars Treat consecutive non - alphanumeric * characters as one word * @ since jEdit 4.1pre2 */ public static int findWordEnd ( String line , int pos , String noWordSep , boolean joinNonWordChars ) { } }
return findWordEnd ( line , pos , noWordSep , joinNonWordChars , false ) ;
public class AttributeTypeImpl { /** * This method is overridden so that we can check that the regex of the new super type ( if it has a regex ) * can be applied to all the existing instances . */ @ Override public AttributeType < D > sup ( AttributeType < D > superType ) { } }
( ( AttributeTypeImpl < D > ) superType ) . sups ( ) . forEach ( st -> checkInstancesMatchRegex ( st . regex ( ) ) ) ; return super . sup ( superType ) ;
public class AmazonComprehendClient { /** * Starts an asynchronous dominant language detection job for a collection of documents . Use the operation to track * the status of a job . * @ param startDominantLanguageDetectionJobRequest * @ return Result of the StartDominantLanguageDetectionJob operation returned by the service . * @ throws InvalidRequestException * The request is invalid . * @ throws TooManyRequestsException * The number of requests exceeds the limit . Resubmit your request later . * @ throws KmsKeyValidationException * The KMS customer managed key ( CMK ) entered cannot be validated . Verify the key and re - enter it . * @ throws InternalServerException * An internal server error occurred . Retry your request . * @ sample AmazonComprehend . StartDominantLanguageDetectionJob * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / comprehend - 2017-11-27 / StartDominantLanguageDetectionJob " * target = " _ top " > AWS API Documentation < / a > */ @ Override public StartDominantLanguageDetectionJobResult startDominantLanguageDetectionJob ( StartDominantLanguageDetectionJobRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeStartDominantLanguageDetectionJob ( request ) ;
public class LocalFileBinary { /** * ( non - Javadoc ) * @ see * org . fcrepo . kernel . modeshape . FedoraBinaryImpl # setContent ( java . io . InputStream , java . lang . String , * java . util . Collection , java . lang . String , org . fcrepo . kernel . api . services . policy . StoragePolicyDecisionPoint ) */ @ Override public void setContent ( final InputStream content , final String contentType , final Collection < URI > checksums , final String originalFileName , final StoragePolicyDecisionPoint storagePolicyDecisionPoint ) throws UnsupportedOperationException { } }
throw new UnsupportedOperationException ( "Cannot call setContent() on local file, call setExternalContent() instead" ) ;
public class WbEditingAction { /** * Executes the API action " wbremoveclaims " for the given parameters . * @ param statementIds * the statement ids to delete * @ param bot * if true , edits will be flagged as " bot edits " provided that * the logged in user is in the bot group ; for regular users , the * flag will just be ignored * @ param baserevid * the revision of the data that the edit refers to or 0 if this * should not be submitted ; when used , the site will ensure that * no edit has happened since this revision to detect edit * conflicts ; it is recommended to use this whenever in all * operations where the outcome depends on the state of the * online data * @ param summary * summary for the edit ; will be prepended by an automatically * generated comment ; the length limit of the autocomment * together with the summary is 260 characters : everything above * that limit will be cut off * @ return the JSON response from the API * @ throws IOException * if there was an IO problem . such as missing network * connection * @ throws MediaWikiApiErrorException * if the API returns an error * @ throws IOException * @ throws MediaWikiApiErrorException */ public JsonNode wbRemoveClaims ( List < String > statementIds , boolean bot , long baserevid , String summary ) throws IOException , MediaWikiApiErrorException { } }
Validate . notNull ( statementIds , "statementIds parameter cannot be null when deleting statements" ) ; Validate . notEmpty ( statementIds , "statement ids to delete must be non-empty when deleting statements" ) ; Validate . isTrue ( statementIds . size ( ) <= 50 , "At most 50 statements can be deleted at once" ) ; Map < String , String > parameters = new HashMap < String , String > ( ) ; parameters . put ( "claim" , String . join ( "|" , statementIds ) ) ; return performAPIAction ( "wbremoveclaims" , null , null , null , null , parameters , summary , baserevid , bot ) ;
public class DictionaryFeatureVectorGenerator { /** * Creates a { @ code DictionaryFeatureVectorGenerator } which contains * all of the features generated from { @ code data } that occur more * than { @ code occurrenceThreshold } times . * @ param data * @ param generator * @ param occurrenceThreshold * @ return */ public static < T , U > DictionaryFeatureVectorGenerator < T , U > createFromDataWithThreshold ( Collection < T > data , FeatureGenerator < T , U > generator , int occurrenceThreshold ) { } }
CountAccumulator < U > featureOccurrenceCounts = FeatureGenerators . getFeatureCounts ( generator , data ) ; IndexedList < U > features = new IndexedList < U > ( ) ; features . addAll ( featureOccurrenceCounts . getKeysAboveCountThreshold ( occurrenceThreshold ) ) ; return new DictionaryFeatureVectorGenerator < T , U > ( features , generator , true ) ;
public class CmsVfsSitemapService { /** * Reeds the site root entry . < p > * @ param rootPath the root path of the sitemap root * @ param targetPath the target path to open * @ return the site root entry * @ throws CmsSecurityException in case of insufficient permissions * @ throws CmsException if something goes wrong */ private CmsClientSitemapEntry getRootEntry ( String rootPath , String targetPath ) throws CmsSecurityException , CmsException { } }
String sitePath = "/" ; if ( ( rootPath != null ) ) { sitePath = getCmsObject ( ) . getRequestContext ( ) . removeSiteRoot ( rootPath ) ; } CmsJspNavElement navElement = getNavBuilder ( ) . getNavigationForResource ( sitePath , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; CmsClientSitemapEntry result = toClientEntry ( navElement , true ) ; if ( result != null ) { result . setPosition ( 0 ) ; result . setChildrenLoadedInitially ( true ) ; result . setSubEntries ( getChildren ( sitePath , 2 , targetPath ) , null ) ; } return result ;
public class BoundedWriteBehindQueue { /** * Increments or decrements node - wide { @ link WriteBehindQueue } capacity according to the given value . * Throws { @ link ReachedMaxSizeException } when node - wide maximum capacity which is stated by the variable * { @ link # maxCapacity } is exceeded . * @ param capacity capacity to be added or subtracted . * @ throws ReachedMaxSizeException */ private void addCapacity ( int capacity ) { } }
int maxCapacity = this . maxCapacity ; AtomicInteger writeBehindQueueItemCounter = this . writeBehindQueueItemCounter ; int currentCapacity = writeBehindQueueItemCounter . get ( ) ; int newCapacity = currentCapacity + capacity ; if ( newCapacity < 0 ) { return ; } if ( maxCapacity < newCapacity ) { throwException ( currentCapacity , maxCapacity , capacity ) ; } while ( ! writeBehindQueueItemCounter . compareAndSet ( currentCapacity , newCapacity ) ) { currentCapacity = writeBehindQueueItemCounter . get ( ) ; newCapacity = currentCapacity + capacity ; if ( newCapacity < 0 ) { return ; } if ( maxCapacity < newCapacity ) { throwException ( currentCapacity , maxCapacity , capacity ) ; } }
public class PropertiesAdapter { /** * Returns { @ literal null } for a { @ literal " null " } { @ link String } value or simply returns the { @ link String } value . * The { @ link String } value is safely trimmed before evaluation . * @ param value { @ link String } value to evaluate . * @ return a { @ literal null } for a { @ literal " null " } { @ link String } value . * @ see java . lang . String */ @ NullSafe protected String valueOf ( String value ) { } }
return ( "null" . equalsIgnoreCase ( String . valueOf ( value ) . trim ( ) ) ? null : value ) ;
public class ResourcesInner { /** * Validates whether resources can be moved from one resource group to another resource group . * This operation checks whether the specified resources can be moved to the target . The resources to move must be in the same source resource group . The target resource group may be in a different subscription . If validation succeeds , it returns HTTP response code 204 ( no content ) . If validation fails , it returns HTTP response code 409 ( Conflict ) with an error message . Retrieve the URL in the Location header value to check the result of the long - running operation . * @ param sourceResourceGroupName The name of the resource group containing the resources to validate for move . * @ param parameters Parameters for moving resources . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginValidateMoveResources ( String sourceResourceGroupName , ResourcesMoveInfo parameters ) { } }
beginValidateMoveResourcesWithServiceResponseAsync ( sourceResourceGroupName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class PluginDeserializer { /** * Gson invokes this call - back method during deserialization when it encounters a field of the specified type . * @ param element The Json data being deserialized * @ param type The type of the Object to deserialize to * @ param context The JSON deserialization context * @ return The plugin */ @ Override public Plugin deserialize ( JsonElement element , Type type , JsonDeserializationContext context ) throws JsonParseException { } }
JsonObject obj = element . getAsJsonObject ( ) ; JsonElement plugin = obj . get ( "plugin" ) ; if ( plugin != null && plugin . isJsonObject ( ) ) return gson . fromJson ( plugin , Plugin . class ) ; return gson . fromJson ( element , Plugin . class ) ;
public class DomainPropertySource { /** * Initializes the property service . * @ return True if initialized . */ private boolean initPropertyService ( ) { } }
if ( propertyService == null && appContext . containsBean ( "propertyService" ) ) { propertyService = appContext . getBean ( "propertyService" , IPropertyService . class ) ; } return propertyService != null ;
public class Config { /** * Loads default entries that were not provided by a file or command line * This should be called in the constructor */ protected void setDefaults ( ) { } }
// map . put ( " tsd . network . port " , " " ) ; / / does not have a default , required // map . put ( " tsd . http . cachedir " , " " ) ; / / does not have a default , required // map . put ( " tsd . http . staticroot " , " " ) ; / / does not have a default , required default_map . put ( "tsd.mode" , "rw" ) ; default_map . put ( "tsd.no_diediedie" , "false" ) ; default_map . put ( "tsd.network.bind" , "0.0.0.0" ) ; default_map . put ( "tsd.network.worker_threads" , "" ) ; default_map . put ( "tsd.network.async_io" , "true" ) ; default_map . put ( "tsd.network.tcp_no_delay" , "true" ) ; default_map . put ( "tsd.network.keep_alive" , "true" ) ; default_map . put ( "tsd.network.reuse_address" , "true" ) ; default_map . put ( "tsd.core.authentication.enable" , "false" ) ; default_map . put ( "tsd.core.authentication.plugin" , "" ) ; default_map . put ( "tsd.core.auto_create_metrics" , "false" ) ; default_map . put ( "tsd.core.auto_create_tagks" , "true" ) ; default_map . put ( "tsd.core.auto_create_tagvs" , "true" ) ; default_map . put ( "tsd.core.connections.limit" , "0" ) ; default_map . put ( "tsd.core.enable_api" , "true" ) ; default_map . put ( "tsd.core.enable_ui" , "true" ) ; default_map . put ( "tsd.core.meta.enable_realtime_ts" , "false" ) ; default_map . put ( "tsd.core.meta.enable_realtime_uid" , "false" ) ; default_map . put ( "tsd.core.meta.enable_tsuid_incrementing" , "false" ) ; default_map . put ( "tsd.core.meta.enable_tsuid_tracking" , "false" ) ; default_map . put ( "tsd.core.meta.cache.enable" , "false" ) ; default_map . put ( "tsd.core.plugin_path" , "" ) ; default_map . put ( "tsd.core.socket.timeout" , "0" ) ; default_map . put ( "tsd.core.tree.enable_processing" , "false" ) ; default_map . put ( "tsd.core.preload_uid_cache" , "false" ) ; default_map . put ( "tsd.core.preload_uid_cache.max_entries" , "300000" ) ; default_map . put ( "tsd.core.storage_exception_handler.enable" , "false" ) ; default_map . put ( "tsd.core.uid.random_metrics" , "false" ) ; default_map . put ( "tsd.core.bulk.allow_out_of_order_timestamps" , "false" ) ; default_map . put ( "tsd.query.filter.expansion_limit" , "4096" ) ; default_map . put ( "tsd.query.skip_unresolved_tagvs" , "false" ) ; default_map . put ( "tsd.query.allow_simultaneous_duplicates" , "true" ) ; default_map . put ( "tsd.query.enable_fuzzy_filter" , "true" ) ; default_map . put ( "tsd.query.limits.bytes.default" , "0" ) ; default_map . put ( "tsd.query.limits.bytes.allow_override" , "false" ) ; default_map . put ( "tsd.query.limits.data_points.default" , "0" ) ; default_map . put ( "tsd.query.limits.data_points.allow_override" , "false" ) ; default_map . put ( "tsd.query.limits.overrides.interval" , "60000" ) ; default_map . put ( "tsd.query.multi_get.enable" , "false" ) ; default_map . put ( "tsd.query.multi_get.limit" , "131072" ) ; default_map . put ( "tsd.query.multi_get.batch_size" , "1024" ) ; default_map . put ( "tsd.query.multi_get.concurrent" , "20" ) ; default_map . put ( "tsd.query.multi_get.get_all_salts" , "false" ) ; default_map . put ( "tsd.rpc.telnet.return_errors" , "true" ) ; // Rollup related settings default_map . put ( "tsd.rollups.enable" , "false" ) ; default_map . put ( "tsd.rollups.tag_raw" , "false" ) ; default_map . put ( "tsd.rollups.agg_tag_key" , "_aggregate" ) ; default_map . put ( "tsd.rollups.raw_agg_tag_value" , "RAW" ) ; default_map . put ( "tsd.rollups.block_derived" , "true" ) ; default_map . put ( "tsd.rtpublisher.enable" , "false" ) ; default_map . put ( "tsd.rtpublisher.plugin" , "" ) ; default_map . put ( "tsd.search.enable" , "false" ) ; default_map . put ( "tsd.search.plugin" , "" ) ; default_map . put ( "tsd.stats.canonical" , "false" ) ; default_map . put ( "tsd.startup.enable" , "false" ) ; default_map . put ( "tsd.startup.plugin" , "" ) ; default_map . put ( "tsd.storage.hbase.scanner.maxNumRows" , "128" ) ; default_map . put ( "tsd.storage.fix_duplicates" , "false" ) ; default_map . put ( "tsd.storage.flush_interval" , "1000" ) ; default_map . put ( "tsd.storage.hbase.data_table" , "tsdb" ) ; default_map . put ( "tsd.storage.hbase.uid_table" , "tsdb-uid" ) ; default_map . put ( "tsd.storage.hbase.tree_table" , "tsdb-tree" ) ; default_map . put ( "tsd.storage.hbase.meta_table" , "tsdb-meta" ) ; default_map . put ( "tsd.storage.hbase.zk_quorum" , "localhost" ) ; default_map . put ( "tsd.storage.hbase.zk_basedir" , "/hbase" ) ; default_map . put ( "tsd.storage.hbase.prefetch_meta" , "false" ) ; default_map . put ( "tsd.storage.enable_appends" , "false" ) ; default_map . put ( "tsd.storage.repair_appends" , "false" ) ; default_map . put ( "tsd.storage.enable_compaction" , "true" ) ; default_map . put ( "tsd.storage.compaction.flush_interval" , "10" ) ; default_map . put ( "tsd.storage.compaction.min_flush_threshold" , "100" ) ; default_map . put ( "tsd.storage.compaction.max_concurrent_flushes" , "10000" ) ; default_map . put ( "tsd.storage.compaction.flush_speed" , "2" ) ; default_map . put ( "tsd.timeseriesfilter.enable" , "false" ) ; default_map . put ( "tsd.uid.use_mode" , "false" ) ; default_map . put ( "tsd.uid.lru.enable" , "false" ) ; default_map . put ( "tsd.uid.lru.name.size" , "5000000" ) ; default_map . put ( "tsd.uid.lru.id.size" , "5000000" ) ; default_map . put ( "tsd.uidfilter.enable" , "false" ) ; default_map . put ( "tsd.core.stats_with_port" , "false" ) ; default_map . put ( "tsd.http.show_stack_trace" , "true" ) ; default_map . put ( "tsd.http.query.allow_delete" , "false" ) ; default_map . put ( "tsd.http.header_tag" , "" ) ; default_map . put ( "tsd.http.request.enable_chunked" , "false" ) ; default_map . put ( "tsd.http.request.max_chunk" , "4096" ) ; default_map . put ( "tsd.http.request.cors_domains" , "" ) ; default_map . put ( "tsd.http.request.cors_headers" , "Authorization, " + "Content-Type, Accept, Origin, User-Agent, DNT, Cache-Control, " + "X-Mx-ReqToken, Keep-Alive, X-Requested-With, If-Modified-Since" ) ; default_map . put ( "tsd.query.timeout" , "0" ) ; default_map . put ( "tsd.storage.use_otsdb_timestamp" , "false" ) ; default_map . put ( "tsd.storage.use_max_value" , "true" ) ; default_map . put ( "tsd.storage.get_date_tiered_compaction_start" , "0" ) ; for ( Map . Entry < String , String > entry : default_map . entrySet ( ) ) { if ( ! properties . containsKey ( entry . getKey ( ) ) ) properties . put ( entry . getKey ( ) , entry . getValue ( ) ) ; } loadStaticVariables ( ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcDayInMonthNumber ( ) { } }
if ( ifcDayInMonthNumberEClass == null ) { ifcDayInMonthNumberEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 663 ) ; } return ifcDayInMonthNumberEClass ;
public class Utils { /** * Get time zone of time zone id * @ param id timezone id * @ return timezone */ private static DateTimeZone getTimeZone ( String id ) { } }
DateTimeZone zone ; try { zone = DateTimeZone . forID ( id ) ; } catch ( IllegalArgumentException e ) { throw new IllegalArgumentException ( "TimeZone " + id + " not recognized" ) ; } return zone ;
public class Point { /** * Create a random point , uniformly distributed over the surface of the Earth . * @ param randomGenerator Random generator used to create a point . * @ return Random point with uniform distribution over the sphere . */ @ Nonnull public static Point fromUniformlyDistributedRandomPoints ( @ Nonnull final Random randomGenerator ) { } }
checkNonnull ( "randomGenerator" , randomGenerator ) ; // Calculate uniformly distributed 3D point on sphere ( radius = 1.0 ) : // http : / / mathproofs . blogspot . co . il / 2005/04 / uniform - random - distribution - on - sphere . html final double unitRand1 = randomGenerator . nextDouble ( ) ; final double unitRand2 = randomGenerator . nextDouble ( ) ; final double theta0 = ( 2.0 * Math . PI ) * unitRand1 ; final double theta1 = Math . acos ( 1.0 - ( 2.0 * unitRand2 ) ) ; final double x = Math . sin ( theta0 ) * Math . sin ( theta1 ) ; final double y = Math . cos ( theta0 ) * Math . sin ( theta1 ) ; final double z = Math . cos ( theta1 ) ; // Convert Carthesian 3D point into lat / lon ( radius = 1.0 ) : // http : / / stackoverflow . com / questions / 1185408 / converting - from - longitude - latitude - to - cartesian - coordinates final double latRad = Math . asin ( z ) ; final double lonRad = Math . atan2 ( y , x ) ; // Convert radians to degrees . assert ! Double . isNaN ( latRad ) ; assert ! Double . isNaN ( lonRad ) ; final double lat = latRad * ( 180.0 / Math . PI ) ; final double lon = lonRad * ( 180.0 / Math . PI ) ; return fromDeg ( lat , lon ) ;
public class AbstractTypeComputer { /** * / * @ NotNull */ protected LightweightTypeReference getTypeForName ( Class < ? > clazz , ITypeComputationState state ) { } }
JvmType type = findDeclaredType ( clazz , state ) ; ITypeReferenceOwner owner = state . getReferenceOwner ( ) ; if ( type == null ) { return owner . newUnknownTypeReference ( clazz . getName ( ) ) ; } return owner . toLightweightTypeReference ( type ) ;
public class MappingCache { /** * Return a new or cached mapper * @ param clazz * class for the mapper * @ return mapper */ public < T > Mapping < T > getMapping ( Class < T > clazz , boolean includeParentFields ) { } }
return getMapping ( clazz , new DefaultAnnotationSet ( ) , includeParentFields ) ;
public class AbstractMessageParser { /** * This method serializes the whole BHS to its byte representation . * @ param dst The destination < code > ByteBuffer < / code > to write to . * @ param offset The start offset in < code > dst < / code > . * @ throws InternetSCSIException If any violation of the iSCSI - Standard emerge . */ final void serializeBasicHeaderSegment ( final ByteBuffer dst , final int offset ) throws InternetSCSIException { } }
dst . position ( offset ) ; dst . putInt ( offset , dst . getInt ( ) | serializeBytes1to3 ( ) ) ; dst . position ( offset + BasicHeaderSegment . BYTES_8_11 ) ; dst . putInt ( serializeBytes8to11 ( ) ) ; dst . putInt ( serializeBytes12to15 ( ) ) ; dst . position ( offset + BasicHeaderSegment . BYTES_20_23 ) ; dst . putInt ( serializeBytes20to23 ( ) ) ; dst . putInt ( serializeBytes24to27 ( ) ) ; dst . putInt ( serializeBytes28to31 ( ) ) ; dst . putInt ( serializeBytes32to35 ( ) ) ; dst . putInt ( serializeBytes36to39 ( ) ) ; dst . putInt ( serializeBytes40to43 ( ) ) ; dst . putInt ( serializeBytes44to47 ( ) ) ;
public class RestfulHandler { /** * 将request body的byte字节数组复制到内存中 * @ param httpServletRequest * @ return * @ throws IOException */ private byte [ ] copyBytesFromRequest ( HttpServletRequest httpServletRequest ) throws IOException { } }
InputStream inputStream = httpServletRequest . getInputStream ( ) ; ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream ( ) ; byte [ ] buffer = new byte [ 1024 ] ; int offset = - 1 ; while ( ( offset = inputStream . read ( buffer , 0 , 1024 ) ) > 0 ) { byteArrayOutputStream . write ( buffer , 0 , offset ) ; } return byteArrayOutputStream . toByteArray ( ) ;
public class mps_network_config { /** * < pre > * Performs generic data validation for the operation to be performed * < / pre > */ protected void validate ( String operationType ) throws Exception { } }
super . validate ( operationType ) ; MPSIPAddress ip_address_validator = new MPSIPAddress ( ) ; ip_address_validator . setConstraintIsReq ( MPSConstants . MODIFY_CONSTRAINT , true ) ; ip_address_validator . validate ( operationType , ip_address , "\"ip_address\"" ) ; MPSIPAddress netmask_validator = new MPSIPAddress ( ) ; netmask_validator . setConstraintIsReq ( MPSConstants . MODIFY_CONSTRAINT , true ) ; netmask_validator . validate ( operationType , netmask , "\"netmask\"" ) ; MPSIPAddress gateway_validator = new MPSIPAddress ( ) ; gateway_validator . setConstraintIsReq ( MPSConstants . MODIFY_CONSTRAINT , true ) ; gateway_validator . validate ( operationType , gateway , "\"gateway\"" ) ;
public class Util { /** * Make a given { @ link XPath } object " xalan - extension aware " , if Xalan is on * the classpath . */ @ SuppressWarnings ( "deprecation" ) static final void xalanExtensionAware ( XPath xpath ) { } }
// Load xalan extensions thread - safely for all of jOOX if ( ! xalanExtensionLoaded ) { synchronized ( Util . class ) { if ( ! xalanExtensionLoaded ) { xalanExtensionLoaded = true ; try { xalanNamespaceContext = ( NamespaceContext ) Class . forName ( "org.apache.xalan.extensions.ExtensionNamespaceContext" ) . newInstance ( ) ; xalanFunctionResolver = ( XPathFunctionResolver ) Class . forName ( "org.apache.xalan.extensions.XPathFunctionResolverImpl" ) . newInstance ( ) ; } catch ( Exception ignore ) { } } } } if ( xalanNamespaceContext != null && xalanFunctionResolver != null ) { xpath . setNamespaceContext ( xalanNamespaceContext ) ; xpath . setXPathFunctionResolver ( xalanFunctionResolver ) ; }
public class Complex { /** * Initializes all subconstraints . * @ return always { @ code null } */ @ Override public String initValues ( final FieldCase ca ) { } }
if ( ca != null ) { /* * If a FieldCase is given all fields will be generated anew , independent of the case * combination . */ for ( Constraint c : constraints ) { c . resetValues ( ) ; } } for ( Constraint c : constraints ) { c . initValues ( ca ) ; } return null ;
public class GenderRatioProcessor { /** * Counts a single page of the specified gender . If this is the first page * of that gender on this site , a suitable key is added to the list of the * site ' s genders . * @ param gender * the gender to count * @ param siteRecord * the site record to count it for */ private void countGender ( EntityIdValue gender , SiteRecord siteRecord ) { } }
Integer curValue = siteRecord . genderCounts . get ( gender ) ; if ( curValue == null ) { siteRecord . genderCounts . put ( gender , 1 ) ; } else { siteRecord . genderCounts . put ( gender , curValue + 1 ) ; }
public class SessionManager { /** * Remove a session * @ param session session instance */ public void destorySession ( Session session ) { } }
session . attributes ( ) . clear ( ) ; sessionMap . remove ( session . id ( ) ) ; Event event = new Event ( ) ; event . attribute ( "session" , session ) ; eventManager . fireEvent ( EventType . SESSION_DESTROY , event ) ;
public class IntArrayUtils { /** * Inverses the values of the given array with their indexes . * For example , the result for [ 2 , 0 , 1 ] is [ 1 , 2 , 0 ] because * a [ 0 ] : 2 = > a [ 2 ] : 0 * a [ 1 ] : 0 = > a [ 0 ] : 1 * a [ 2 ] : 1 = > a [ 1 ] : 2 */ public static void inverse ( int [ ] a ) { } }
for ( int i = 0 ; i < a . length ; i ++ ) { if ( a [ i ] >= 0 ) { inverseLoop ( a , i ) ; } } for ( int i = 0 ; i < a . length ; i ++ ) { a [ i ] = ~ a [ i ] ; }
public class ServiceDiscoveryTransportFactory { /** * Find and validate the discoveryId given in a connection string */ private UUID getDiscoveryId ( final Map < String , String > params ) throws IOException { } }
final String discoveryId = params . get ( "discoveryId" ) ; if ( discoveryId == null ) { throw new IOException ( "srvc transport did not get a discoveryId parameter. Refusing to create." ) ; } return UUID . fromString ( discoveryId ) ;
public class SearchableInterceptor { /** * Installs a < code > Searchable < / code > into the given text component . * @ param < T > * the type of the text component . * @ param textComponent * the target text component . * @ return the text component . */ private < T extends JTextComponent > T installSearchable ( T textComponent ) { } }
SearchableUtils . installSearchable ( textComponent ) ; return textComponent ;
public class URLCanonicalizer { /** * Tells whether or not the given port is the default for the given scheme . * < strong > Note : < / strong > Only HTTP and HTTPS schemes are taken into account . * @ param scheme the scheme * @ param port the port * @ return { @ code true } if given the port is the default port for the given scheme , { @ code false } otherwise . */ private static boolean isDefaultPort ( String scheme , int port ) { } }
return HTTP_SCHEME . equalsIgnoreCase ( scheme ) && port == HTTP_DEFAULT_PORT || HTTPS_SCHEME . equalsIgnoreCase ( scheme ) && port == HTTPS_DEFAULT_PORT ;
public class Expression { /** * Check the document field ' s type * and object * @ param lhs The field to check * @ param rhs The type * @ return Expression : lhs $ type rhs */ public static Expression type ( String lhs , Type rhs ) { } }
return new Expression ( lhs , "$type" , rhs . toString ( ) ) ;
public class MarkedElement { /** * Markup a rendering element with the specified classes . * @ param elem rendering element * @ param classes classes * @ return the marked element */ public static MarkedElement markup ( IRenderingElement elem , String ... classes ) { } }
assert elem != null ; MarkedElement tagElem = new MarkedElement ( elem ) ; for ( String cls : classes ) tagElem . aggClass ( cls ) ; return tagElem ;
public class RpcWrapper { /** * Make the wrapped call and unmarshall the returned Xdr to a response , * getting the IP key from the request . If an RPC Exception is being thrown , * and retries remain , then log the exception and retry . * @ param request * The request to send . * @ param responseHandler * A response handler . * @ throws IOException */ public void callRpcWrapped ( S request , RpcResponseHandler < ? extends T > responseHandler ) throws IOException { } }
for ( int i = 0 ; i < _maximumRetries ; ++ i ) { try { callRpcChecked ( request , responseHandler ) ; return ; } catch ( RpcException e ) { handleRpcException ( e , i ) ; } }
public class SeekableByteChannelPrefetcher { /** * Start a background read of the buffer after this one ( if there isn ' t one already ) . */ public ByteBuffer fetch ( long position ) throws InterruptedException , ExecutionException { } }
long blockIndex = position / bufSize ; boolean goingBack = false ; for ( WorkUnit w : full ) { if ( w . blockIndex == blockIndex ) { ensureFetching ( blockIndex + 1 ) ; nbHit ++ ; return w . buf ; } else if ( w . blockIndex > blockIndex ) { goingBack = true ; } } if ( goingBack ) { // user is asking for a block with a lower index than we ' ve already fetched - // in other words they are not following the expected pattern of increasing indexes . nbGoingBack ++ ; } if ( null == fetching ) { ensureFetching ( blockIndex ) ; } WorkUnit candidate = fetching ; // block until we have the buffer ByteBuffer buf = candidate . getBuf ( ) ; full . add ( candidate ) ; fetching = null ; if ( candidate . blockIndex == blockIndex ) { // this is who we were waiting for nbNearHit ++ ; ensureFetching ( blockIndex + 1 ) ; return buf ; } else { // wrong block . Let ' s fetch the right one now . nbMiss ++ ; ensureFetching ( blockIndex ) ; candidate = fetching ; buf = candidate . getBuf ( ) ; full . add ( candidate ) ; fetching = null ; ensureFetching ( blockIndex + 1 ) ; return buf ; }
public class ContinuousDistribution { /** * Computes the log of the Probability Density Function . Note , that then the * probability is zero , { @ link Double # NEGATIVE _ INFINITY } would be the true * value . Instead , this method will always return the negative of * { @ link Double # MAX _ VALUE } . This is to avoid propagating bad values through * computation . * @ param x the value to get the log ( PDF ) of * @ return the value of log ( PDF ( x ) ) */ public double logPdf ( double x ) { } }
double pdf = pdf ( x ) ; if ( pdf <= 0 ) return - Double . MAX_VALUE ; return Math . log ( pdf ) ;
public class SoftDictionary { /** * Insert a prepared string into the dictionary . * < p > Id is a special tag used to handle ' leave one out ' * lookups . If you do a lookup on a string with a non - null * id , you get the closest matches that do not have the same * id . */ public void put ( String id , StringWrapper toInsert , Object value ) { } }
MyWrapper wrapper = asMyWrapper ( toInsert ) ; Token [ ] tokens = wrapper . getTokens ( ) ; for ( int i = 0 ; i < tokens . length ; i ++ ) { ArrayList stringsWithToken = ( ArrayList ) index . get ( tokens [ i ] ) ; if ( stringsWithToken == null ) index . put ( tokens [ i ] , ( stringsWithToken = new ArrayList ( ) ) ) ; stringsWithToken . add ( wrapper ) ; } map . put ( wrapper , value ) ; if ( id != null ) idMap . put ( wrapper , id ) ; distance = null ; // mark distance as " out of date " totalEntries ++ ;
public class AbstractSegment3F { /** * Tests if the axis - aligned box is intersecting a segment . * @ param sx1 x coordinate of the first point of the segment . * @ param sy1 y coordinate of the first point of the segment . * @ param sz1 z coordinate of the first point of the segment . * @ param sx2 x coordinate of the second point of the segment . * @ param sy2 y coordinate of the second point of the segment . * @ param sz2 z coordinate of the second point of the segment . * @ param minx coordinates of the lowest point of the box . * @ param miny coordinates of the lowest point of the box . * @ param minz coordinates of the lowest point of the box . * @ param maxx coordinates of the uppermost point of the box . * @ param maxy coordinates of the uppermost point of the box . * @ param maxz coordinates of the uppermost point of the box . * @ return < code > true < / code > if the two shapes intersect each * other ; < code > false < / code > otherwise . * @ see " http : / / books . google . ca / books ? id = fvA7zLEFWZgC " */ @ Pure public static boolean intersectsSegmentAlignedBox ( double sx1 , double sy1 , double sz1 , double sx2 , double sy2 , double sz2 , double minx , double miny , double minz , double maxx , double maxy , double maxz ) { } }
assert ( minx <= maxx ) ; assert ( miny <= maxy ) ; assert ( minz <= maxz ) ; double dx = ( sx2 - sx1 ) * .5 ; double dy = ( sy2 - sy1 ) * .5 ; double dz = ( sz2 - sz1 ) * .5 ; double ex = ( maxx - minx ) * .5 ; double ey = ( maxy - miny ) * .5 ; double ez = ( maxz - minz ) * .5 ; double cx = sx1 + dx - ( minx + maxx ) * .5 ; double cy = sy1 + dy - ( miny + maxy ) * .5 ; double cz = sz1 + dz - ( minz + maxz ) * .5 ; double adx = Math . abs ( dx ) ; double ady = Math . abs ( dy ) ; double adz = Math . abs ( dz ) ; if ( Math . abs ( cx ) > ( ex + adx ) ) { return false ; } if ( Math . abs ( cy ) > ( ey + ady ) ) { return false ; } if ( Math . abs ( cz ) > ( ez + adz ) ) { return false ; } if ( Math . abs ( dy * cz - dz * cy ) > ( ey * adz + ez * ady ) ) { return false ; } if ( Math . abs ( dz * cx - dx * cz ) > ( ez * adx + ex * adz ) ) { return false ; } if ( Math . abs ( dx * cy - dy * cx ) > ( ex * ady + ey * adx ) ) { return false ; } return true ;
public class JsonBasedPropertiesProvider { /** * Convert given Json document to a multi - level map . */ @ SuppressWarnings ( "unchecked" ) private Map < String , Object > convertToMap ( Object jsonDocument ) { } }
Map < String , Object > jsonMap = new LinkedHashMap < > ( ) ; // Document is a text block if ( ! ( jsonDocument instanceof JSONObject ) ) { jsonMap . put ( "content" , jsonDocument ) ; return jsonMap ; } JSONObject obj = ( JSONObject ) jsonDocument ; for ( String key : obj . keySet ( ) ) { Object value = obj . get ( key ) ; if ( value instanceof JSONObject ) { value = convertToMap ( value ) ; } else if ( value instanceof JSONArray ) { ArrayList < Map < String , Object > > collection = new ArrayList < > ( ) ; for ( Object element : ( ( JSONArray ) value ) ) { collection . add ( convertToMap ( element ) ) ; } value = collection ; } jsonMap . put ( key , value ) ; } return jsonMap ;
public class MerkleTree { /** * For testing purposes . * Gets the smallest range containing the token . */ public TreeRange get ( Token t ) { } }
return getHelper ( root , fullRange . left , fullRange . right , ( byte ) 0 , t ) ;
public class EMailField { /** * Get the HTML mailto Hyperlink . * @ return The hyperlink . */ public String getHyperlink ( ) { } }
String strMailTo = this . getString ( ) ; if ( strMailTo != null ) if ( strMailTo . length ( ) > 0 ) strMailTo = BaseApplication . MAIL_TO + ":" + strMailTo ; return strMailTo ;
public class PickerUtilities { /** * localTimeToString , This will return the supplied time as a string . If the time is null , this * will return the value of emptyTimeString . * Time values will be output in one of the following ISO - 8601 formats : " HH : mm " , " HH : mm : ss " , * " HH : mm : ss . SSS " , " HH : mm : ss . SSSSS " , " HH : mm : ss . SSSSS " . * The format used will be the shortest that outputs the full value of the time where the * omitted parts are implied to be zero . * < / code > */ public static String localTimeToString ( LocalTime time , String emptyTimeString ) { } }
return ( time == null ) ? emptyTimeString : time . toString ( ) ;
public class Client { /** * Delete an existing application from the current Doradus tenant , including all of * its tables and data . Because updates are idempotent , deleting an already - deleted * application is acceptable . Hence , if no error is thrown , the result is always true . * An exception is thrown if an error occurs . * @ param appName Name of existing application to delete . * @ param key Key of application to delete . Can be null if the application has * no key . * @ return True if the application was deleted or already deleted . */ public boolean deleteApplication ( String appName , String key ) { } }
Utils . require ( ! m_restClient . isClosed ( ) , "Client has been closed" ) ; Utils . require ( appName != null && appName . length ( ) > 0 , "appName" ) ; try { // Send a DELETE request to " / _ applications / { application } / { key } " . StringBuilder uri = new StringBuilder ( Utils . isEmpty ( m_apiPrefix ) ? "" : "/" + m_apiPrefix ) ; uri . append ( "/_applications/" ) ; uri . append ( Utils . urlEncode ( appName ) ) ; if ( ! Utils . isEmpty ( key ) ) { uri . append ( "/" ) ; uri . append ( Utils . urlEncode ( key ) ) ; } RESTResponse response = m_restClient . sendRequest ( HttpMethod . DELETE , uri . toString ( ) ) ; m_logger . debug ( "deleteApplication() response: {}" , response . toString ( ) ) ; if ( response . getCode ( ) != HttpCode . NOT_FOUND ) { // Notfound is acceptable throwIfErrorResponse ( response ) ; } return true ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class MessageBatch { /** * write a TaskMessage into a stream * Each TaskMessage is encoded as : task . . . short ( 2 ) len . . . int ( 4 ) payload . . . byte [ ] * */ private void writeTaskMessage ( ChannelBufferOutputStream bout , TaskMessage message ) throws Exception { } }
int payload_len = 0 ; if ( message . message ( ) != null ) payload_len = message . message ( ) . length ; short type = message . get_type ( ) ; bout . writeShort ( type ) ; int task_id = message . task ( ) ; if ( task_id > Short . MAX_VALUE ) throw new RuntimeException ( "Task ID should not exceed " + Short . MAX_VALUE ) ; bout . writeShort ( ( short ) task_id ) ; bout . writeInt ( payload_len ) ; if ( payload_len > 0 ) bout . write ( message . message ( ) ) ; // LOG . info ( " Write one message taskid : { } , len : { } , data : { } " , taskId // , payload _ len , JStormUtils . toPrintableString ( message . message ( ) ) ) ;
public class JmsRunnableFactory { /** * Creates a new { @ link TopicConsumer } . For every message received ( or when the timeout waiting for messages is hit ) , the callback * is invoked with the message received . */ public TopicConsumer createTopicListener ( final String topic , final ConsumerCallback < Message > messageCallback ) { } }
Preconditions . checkState ( connectionFactory != null , "connection factory was never injected!" ) ; return new TopicConsumer ( connectionFactory , jmsConfig , topic , messageCallback ) ;
public class KerasInitilizationUtils { /** * Get weight initialization from Keras layer configuration . * @ param layerConfig dictionary containing Keras layer configuration * @ param enforceTrainingConfig whether to enforce loading configuration for further training * @ return Pair of DL4J weight initialization and distribution * @ throws InvalidKerasConfigurationException Invalid Keras config * @ throws UnsupportedKerasConfigurationException Unsupported Keras config */ public static Pair < WeightInit , Distribution > getWeightInitFromConfig ( Map < String , Object > layerConfig , String initField , boolean enforceTrainingConfig , KerasLayerConfiguration conf , int kerasMajorVersion ) throws InvalidKerasConfigurationException , UnsupportedKerasConfigurationException { } }
Map < String , Object > innerConfig = KerasLayerUtils . getInnerLayerConfigFromConfig ( layerConfig , conf ) ; if ( ! innerConfig . containsKey ( initField ) ) throw new InvalidKerasConfigurationException ( "Keras layer is missing " + initField + " field" ) ; String kerasInit ; Map < String , Object > initMap ; if ( kerasMajorVersion != 2 ) { kerasInit = ( String ) innerConfig . get ( initField ) ; initMap = innerConfig ; } else { @ SuppressWarnings ( "unchecked" ) Map < String , Object > fullInitMap = ( HashMap ) innerConfig . get ( initField ) ; initMap = ( HashMap ) fullInitMap . get ( "config" ) ; if ( fullInitMap . containsKey ( "class_name" ) ) { kerasInit = ( String ) fullInitMap . get ( "class_name" ) ; } else { throw new UnsupportedKerasConfigurationException ( "Incomplete initialization class" ) ; } } Pair < WeightInit , Distribution > init ; try { init = mapWeightInitialization ( kerasInit , conf , initMap , kerasMajorVersion ) ; } catch ( UnsupportedKerasConfigurationException e ) { if ( enforceTrainingConfig ) throw e ; else { init = new Pair < > ( WeightInit . XAVIER , null ) ; log . warn ( "Unknown weight initializer " + kerasInit + " (Using XAVIER instead)." ) ; } } return init ;
public class Node { /** * Get an absolute URL from a URL attribute that may be relative ( i . e . an < code > & lt ; a href & gt ; < / code > or * < code > & lt ; img src & gt ; < / code > ) . * E . g . : < code > String absUrl = linkEl . absUrl ( " href " ) ; < / code > * If the attribute value is already absolute ( i . e . it starts with a protocol , like * < code > http : / / < / code > or < code > https : / / < / code > etc ) , and it successfully parses as a URL , the attribute is * returned directly . Otherwise , it is treated as a URL relative to the element ' s { @ link # baseUri } , and made * absolute using that . * As an alternate , you can use the { @ link # attr } method with the < code > abs : < / code > prefix , e . g . : * < code > String absUrl = linkEl . attr ( " abs : href " ) ; < / code > * @ param attributeKey The attribute key * @ return An absolute URL if one could be made , or an empty string ( not null ) if the attribute was missing or * could not be made successfully into a URL . * @ see # attr * @ see java . net . URL # URL ( java . net . URL , String ) */ public String absUrl ( String attributeKey ) { } }
Validate . notEmpty ( attributeKey ) ; if ( ! hasAttr ( attributeKey ) ) { return "" ; // nothing to make absolute with } else { return StringUtil . resolve ( baseUri ( ) , attr ( attributeKey ) ) ; }
public class ServerMonitor { /** * Returns an integer that indicates location type of database server * linked to the running Doradus - server : { @ code LOCAL } if the Doradus - server * and database - server are hosted on the same machine , { @ code REMOTE } if * they are on the different machines , { @ code UNKNOWN } if no IP address for * the configured hostname of database - server could be found . * @ return { @ code LOCAL } , { @ code REMOTE } , or { @ code UNKNOWN } */ public int getDatabaseLink ( ) { } }
if ( databaseLink < 0 ) { ServerConfig c = ServerConfig . getInstance ( ) ; try { boolean [ ] local = new boolean [ 1 ] ; String dbhost = extractValidHostname ( c . dbhost , local ) ; databaseLink = local [ 0 ] ? LOCAL : REMOTE ; logger . info ( "Database hostname: " + dbhost + " (local=" + local [ 0 ] + ")." ) ; } catch ( UnknownHostException ex ) { logger . warn ( ex . getMessage ( ) ) ; databaseLink = UNKNOWN ; } } return databaseLink ;
public class CryptoPathMapper { /** * Verifies that no node exists for the given path . Otherwise a { @ link FileAlreadyExistsException } will be thrown . * @ param cleartextPath A path * @ throws FileAlreadyExistsException If the node exists * @ throws IOException If any I / O error occurs while attempting to resolve the ciphertext path */ public void assertNonExisting ( CryptoPath cleartextPath ) throws FileAlreadyExistsException , IOException { } }
try { CiphertextFileType type = getCiphertextFileType ( cleartextPath ) ; throw new FileAlreadyExistsException ( cleartextPath . toString ( ) , null , "For this path there is already a " + type . name ( ) ) ; } catch ( NoSuchFileException e ) { // good ! }
public class Util { /** * Set the message data as a XML String . * @ return */ public static Document convertXMLToDOM ( String strXML ) { } }
DocumentBuilder db = Util . getDocumentBuilder ( ) ; // Parse the input file Document doc = null ; try { synchronized ( db ) { // db is not thread safe doc = db . parse ( new InputSource ( new StringReader ( strXML ) ) ) ; } } catch ( SAXException se ) { System . out . println ( se . getMessage ( ) ) ; return null ; } catch ( IOException ioe ) { System . out . println ( ioe ) ; return null ; } catch ( Exception ioe ) { System . out . println ( ioe ) ; return null ; } return doc ;
public class LineageInfo { /** * Load all lineage info from a { @ link State } * @ return A map from branch to its lineage info . If there is no destination info , return an empty map */ static Map < String , Set < LineageEventBuilder > > load ( State state ) { } }
String name = state . getProp ( getKey ( NAME_KEY ) ) ; Descriptor source = Descriptor . fromJson ( state . getProp ( getKey ( LineageEventBuilder . SOURCE ) ) ) ; String branchedPrefix = getKey ( BRANCH , "" ) ; Map < String , Set < LineageEventBuilder > > events = Maps . newHashMap ( ) ; if ( source == null ) { return events ; } for ( Map . Entry < Object , Object > entry : state . getProperties ( ) . entrySet ( ) ) { String key = entry . getKey ( ) . toString ( ) ; if ( ! key . startsWith ( branchedPrefix ) ) { continue ; } String [ ] parts = key . substring ( branchedPrefix . length ( ) ) . split ( "\\." ) ; assert parts . length == 2 ; String branchId = parts [ 0 ] ; Set < LineageEventBuilder > branchEvents = events . computeIfAbsent ( branchId , k -> new HashSet < > ( ) ) ; switch ( parts [ 1 ] ) { case LineageEventBuilder . DESTINATION : List < Descriptor > descriptors = Descriptor . fromJsonList ( entry . getValue ( ) . toString ( ) ) ; for ( Descriptor descriptor : descriptors ) { LineageEventBuilder event = new LineageEventBuilder ( name ) ; event . setSource ( source ) ; event . setDestination ( descriptor ) ; branchEvents . add ( event ) ; } break ; default : throw new RuntimeException ( "Unsupported lineage key: " + key ) ; } } return events ;
public class CensoredDescriptives { /** * Calculates the Variance of Mean . * @ param survivalFunction * @ return */ public static double meanVariance ( AssociativeArray2D survivalFunction ) { } }
double meanVariance = 0 ; int m = 0 ; int n = 0 ; for ( Map . Entry < Object , AssociativeArray > entry : survivalFunction . entrySet ( ) ) { // Object ti = entry . getKey ( ) ; AssociativeArray row = entry . getValue ( ) ; Number mi = ( Number ) row . get ( "mi" ) ; n += mi . intValue ( ) ; if ( row . get ( "Sti" ) == null ) { // if censored internalData m += mi . intValue ( ) ; } } for ( Map . Entry < Object , AssociativeArray > entry : survivalFunction . entrySet ( ) ) { // Object ti = entry . getKey ( ) ; AssociativeArray row = entry . getValue ( ) ; if ( row . get ( "Sti" ) == null ) { continue ; // skip censored internalData } Number mi = ( Number ) row . get ( "mi" ) ; Number r = ( Number ) row . get ( "r" ) ; double Ar = ar ( survivalFunction , r . intValue ( ) ) ; if ( n - r . intValue ( ) > 0 ) { meanVariance += mi . intValue ( ) * ( Ar * Ar ) / ( ( n - r . intValue ( ) ) * ( n - r . intValue ( ) + 1.0 ) ) ; } } meanVariance *= m / ( m - 1.0 ) ; return meanVariance ;
public class MultipartContent { /** * Sets the HTTP content parts of the HTTP multipart request , where each part is assumed to have * no HTTP headers and no encoding . * < p > Overriding is only supported for the purpose of calling the super implementation and * changing the return type , but nothing else . */ public MultipartContent setContentParts ( Collection < ? extends HttpContent > contentParts ) { } }
this . parts = new ArrayList < Part > ( contentParts . size ( ) ) ; for ( HttpContent contentPart : contentParts ) { addPart ( new Part ( contentPart ) ) ; } return this ;
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Returns the last cp definition option value rel in the ordered set where CPDefinitionOptionRelId = & # 63 ; . * @ param CPDefinitionOptionRelId the cp definition option rel ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition option value rel * @ throws NoSuchCPDefinitionOptionValueRelException if a matching cp definition option value rel could not be found */ @ Override public CPDefinitionOptionValueRel findByCPDefinitionOptionRelId_Last ( long CPDefinitionOptionRelId , OrderByComparator < CPDefinitionOptionValueRel > orderByComparator ) throws NoSuchCPDefinitionOptionValueRelException { } }
CPDefinitionOptionValueRel cpDefinitionOptionValueRel = fetchByCPDefinitionOptionRelId_Last ( CPDefinitionOptionRelId , orderByComparator ) ; if ( cpDefinitionOptionValueRel != null ) { return cpDefinitionOptionValueRel ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPDefinitionOptionRelId=" ) ; msg . append ( CPDefinitionOptionRelId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionOptionValueRelException ( msg . toString ( ) ) ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcWallTypeEnum createIfcWallTypeEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcWallTypeEnum result = IfcWallTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class WebStatFilter { /** * { @ inheritDoc } */ @ Override public void filter ( ContainerRequestContext requestContext , ContainerResponseContext responseContext ) throws IOException { } }
String requestURI = getRequestURI ( requestContext ) ; if ( isExclusion ( requestURI ) || WebRequestStat . current ( ) == null ) { return ; } long endNano = System . nanoTime ( ) ; WebRequestStat . current ( ) . setEndNano ( endNano ) ; long nanos = endNano - WebRequestStat . current ( ) . getStartNano ( ) ; Throwable error = null ; if ( responseContext . getStatus ( ) == Response . Status . INTERNAL_SERVER_ERROR . getStatusCode ( ) ) { if ( Throwable . class . isInstance ( responseContext . getEntity ( ) ) ) error = ( Throwable ) responseContext . getEntity ( ) ; } webAppStat . afterInvoke ( error , nanos ) ; WebURIStat uriStat = getUriStat ( requestURI ) ; if ( uriStat == null ) { int status = responseContext . getStatus ( ) ; if ( status == Response . Status . NOT_FOUND . getStatusCode ( ) ) { String errorUrl = contextPath + "error_" + status ; uriStat = webAppStat . getURIStat ( errorUrl , true ) ; } else { uriStat = webAppStat . getURIStat ( requestURI , true ) ; } if ( uriStat != null ) { uriStat . beforeInvoke ( ) ; // 补偿调用 } } if ( uriStat != null ) { uriStat . afterInvoke ( error , nanos ) ; } WebRequestStat . set ( null ) ; if ( isProfileEnable ( ) ) { Profiler . release ( nanos ) ; Map < ProfileEntryKey , ProfileEntryReqStat > requestStatsMap = Profiler . getStatsMap ( ) ; if ( uriStat != null ) { uriStat . getProfiletat ( ) . record ( requestStatsMap ) ; } Profiler . removeLocal ( ) ; }
public class AbstractSessionQuery { /** * / * ( non - Javadoc ) * @ see net . timewalker . ffmq4 . network . packet . AbstractPacket # serializeTo ( net . timewalker . ffmq4 . utils . RawDataOutputStream ) */ @ Override protected void serializeTo ( RawDataBuffer out ) { } }
super . serializeTo ( out ) ; out . writeInt ( sessionId . asInt ( ) ) ;
public class MutableTimecodeDuration { /** * Returns a UnmodifiableTimecodeDuration instance for given TimecodeDuration storage string . Will return null in case the storage string represents a null TimecodeDuration * @ param timecode * @ return the TimecodeDuration * @ throws IllegalArgumentException */ public static MutableTimecodeDuration valueOf ( String timecode ) throws IllegalArgumentException { } }
MutableTimecodeDuration td = new MutableTimecodeDuration ( ) ; return ( MutableTimecodeDuration ) td . parse ( timecode ) ;
public class MediaType { /** * Get he extension by url . * @ param url url . * @ return extension name . */ public static String getUrlExtension ( String url ) { } }
String extension = MimeTypeMap . getFileExtensionFromUrl ( url ) ; return TextUtils . isEmpty ( extension ) ? "" : extension ;
public class JvmTypeReferenceImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public < Parameter , Result > Result accept ( ITypeReferenceVisitorWithParameter < Parameter , Result > visitor , Parameter parameter ) { } }
// TODO : implement this method // Ensure that you remove @ generated or mark it @ generated NOT throw new UnsupportedOperationException ( ) ;
public class QueryService { /** * Generic method , which based on a Root & lt ; ENTITY & gt ; returns an Expression which type is the same as the given ' value ' type . * @ param metaclassFunction function which returns the column which is used for filtering . * @ param value the actual value to filter for . * @ param < X > The type of the attribute which is filtered . * @ return a Specification . */ protected < X > Specification < ENTITY > equalsSpecification ( Function < Root < ENTITY > , Expression < X > > metaclassFunction , final X value ) { } }
return ( root , query , builder ) -> builder . equal ( metaclassFunction . apply ( root ) , value ) ;
public class MessageAdapterImpl { /** * { @ inheritDoc } */ public Messageadapter merge ( MergeableMetadata < ? > jmd ) throws Exception { } }
if ( jmd instanceof MessageAdapterImpl ) { MessageAdapterImpl input = ( MessageAdapterImpl ) jmd ; String newId = this . id == null ? input . id : this . id ; List < MessageListener > newMessagelistener = MergeUtil . mergeList ( this . messagelisteners , input . messagelisteners ) ; return new MessageAdapterImpl ( newMessagelistener , newId ) ; } else { return this ; }
public class FactoryWaveletDaub { /** * DaubJ wavelets have the following properties : < br > * < ul > * < li > Conserve the signal ' s energy < / li > * < li > If the signal is approximately polynomial of degree J / 2-1 or less within the support then fluctuations are approximately zero . < / li > * < li > The sum of the scaling numbers is sqrt ( 2 ) < / li > * < li > The sum of the wavelet numbers is 0 < / li > * < / ul > * @ param J The wavelet ' s degree . * @ return Description of the DaubJ wavelet . */ public static WaveletDescription < WlCoef_F32 > daubJ_F32 ( int J ) { } }
if ( J != 4 ) { throw new IllegalArgumentException ( "Only 4 is currently supported" ) ; } WlCoef_F32 coef = new WlCoef_F32 ( ) ; coef . offsetScaling = 0 ; coef . offsetWavelet = 0 ; coef . scaling = new float [ 4 ] ; coef . wavelet = new float [ 4 ] ; double sqrt3 = Math . sqrt ( 3 ) ; double div = 4.0 * Math . sqrt ( 2 ) ; coef . scaling [ 0 ] = ( float ) ( ( 1 + sqrt3 ) / div ) ; coef . scaling [ 1 ] = ( float ) ( ( 3 + sqrt3 ) / div ) ; coef . scaling [ 2 ] = ( float ) ( ( 3 - sqrt3 ) / div ) ; coef . scaling [ 3 ] = ( float ) ( ( 1 - sqrt3 ) / div ) ; coef . wavelet [ 0 ] = coef . scaling [ 3 ] ; coef . wavelet [ 1 ] = - coef . scaling [ 2 ] ; coef . wavelet [ 2 ] = coef . scaling [ 1 ] ; coef . wavelet [ 3 ] = - coef . scaling [ 0 ] ; WlBorderCoefStandard < WlCoef_F32 > inverse = new WlBorderCoefStandard < > ( coef ) ; return new WaveletDescription < > ( new BorderIndex1D_Wrap ( ) , coef , inverse ) ;
public class DirContextAdapter { /** * { @ inheritDoc } */ @ Override public DirContext createSubcontext ( Name name , Attributes attrs ) throws NamingException { } }
throw new UnsupportedOperationException ( NOT_IMPLEMENTED ) ;
public class CmsFileUtil { /** * Reads a file with the given name from the class loader and returns the file content . < p > * @ param filename the file to read * @ return the read file content * @ throws IOException in case of file access errors */ @ SuppressWarnings ( "resource" ) public static byte [ ] readFile ( String filename ) throws IOException { } }
// create input and output stream InputStream in = CmsFileUtil . class . getClassLoader ( ) . getResourceAsStream ( filename ) ; if ( in == null ) { throw new FileNotFoundException ( filename ) ; } return readFully ( in ) ;
public class YPipe { /** * item exists , false otherwise . */ @ Override public T unwrite ( ) { } }
if ( f == queue . backPos ( ) ) { return null ; } queue . unpush ( ) ; return queue . back ( ) ;
public class ValueStack { /** * popStringOrByteArray . * @ return a { @ link java . lang . Object } object . * @ throws java . text . ParseException if any . */ public Object popStringOrByteArray ( ) throws ParseException { } }
final Object popped = super . pop ( ) ; if ( popped instanceof String ) return popped ; /* * This is probably an unquoted single word literal . */ if ( popped instanceof TokVariable ) return ( ( TokVariable ) popped ) . getName ( ) ; if ( popped instanceof byte [ ] ) return popped ; throw new ParseException ( "Literal or byte[] required, found " + popped . getClass ( ) . getSimpleName ( ) , 0 ) ;
public class ExtendedAggregateExtractProjectRule { /** * Compute which input fields are used by the aggregate . */ private ImmutableBitSet . Builder getInputFieldUsed ( Aggregate aggregate , RelNode input ) { } }
// 1 . group fields are always used final ImmutableBitSet . Builder inputFieldsUsed = aggregate . getGroupSet ( ) . rebuild ( ) ; // 2 . agg functions for ( AggregateCall aggCall : aggregate . getAggCallList ( ) ) { for ( int i : aggCall . getArgList ( ) ) { inputFieldsUsed . set ( i ) ; } if ( aggCall . filterArg >= 0 ) { inputFieldsUsed . set ( aggCall . filterArg ) ; } } // 3 . window time field if the aggregate is a group window aggregate . if ( aggregate instanceof LogicalWindowAggregate ) { inputFieldsUsed . set ( getWindowTimeFieldIndex ( ( LogicalWindowAggregate ) aggregate , input ) ) ; } return inputFieldsUsed ;
public class Context { /** * Pop the current context from thread local , and restore parent context to thread local . */ public static void popContext ( ) { } }
Context context = LOCAL . get ( ) ; if ( context != null ) { Context parent = context . getParent ( ) ; if ( parent != null ) { LOCAL . set ( parent ) ; } else { LOCAL . remove ( ) ; } }
public class SQLiteConnectionPool { /** * Can ' t throw . */ private void logConnectionPoolBusyLocked ( long waitMillis , int connectionFlags ) { } }
final Thread thread = Thread . currentThread ( ) ; StringBuilder msg = new StringBuilder ( ) ; msg . append ( "The connection pool for database '" ) . append ( mConfiguration . label ) ; msg . append ( "' has been unable to grant a connection to thread " ) ; msg . append ( thread . getId ( ) ) . append ( " (" ) . append ( thread . getName ( ) ) . append ( ") " ) ; msg . append ( "with flags 0x" ) . append ( Integer . toHexString ( connectionFlags ) ) ; msg . append ( " for " ) . append ( waitMillis * 0.001f ) . append ( " seconds.\n" ) ; ArrayList < String > requests = new ArrayList < String > ( ) ; int activeConnections = 0 ; int idleConnections = 0 ; if ( ! mAcquiredConnections . isEmpty ( ) ) { for ( SQLiteConnection connection : mAcquiredConnections . keySet ( ) ) { String description = connection . describeCurrentOperationUnsafe ( ) ; if ( description != null ) { requests . add ( description ) ; activeConnections += 1 ; } else { idleConnections += 1 ; } } } int availableConnections = mAvailableNonPrimaryConnections . size ( ) ; if ( mAvailablePrimaryConnection != null ) { availableConnections += 1 ; } msg . append ( "Connections: " ) . append ( activeConnections ) . append ( " active, " ) ; msg . append ( idleConnections ) . append ( " idle, " ) ; msg . append ( availableConnections ) . append ( " available.\n" ) ; if ( ! requests . isEmpty ( ) ) { msg . append ( "\nRequests in progress:\n" ) ; for ( String request : requests ) { msg . append ( " " ) . append ( request ) . append ( "\n" ) ; } } Log . w ( TAG , msg . toString ( ) ) ;
public class Environment { /** * Retrieves the environment in which Jogger is working . * @ return a String object representing the environment . */ public static String get ( ) { } }
String env = System . getProperty ( "JOGGER_ENV" ) ; if ( env == null ) { env = System . getenv ( "JOGGER_ENV" ) ; } if ( env == null ) { return "dev" ; } return env ;
public class Catalog { /** * Reads the { @ code Catalog } entry from the given input stream . * The data is assumed to be in little endian byte order . * @ param pDataInput the input stream * @ return a new { @ code Catalog } * @ throws java . io . IOException if an I / O exception occurs during read */ public static Catalog read ( final DataInput pDataInput ) throws IOException { } }
CatalogHeader header = CatalogHeader . read ( pDataInput ) ; CatalogItem [ ] items = new CatalogItem [ header . getThumbnailCount ( ) ] ; for ( int i = 0 ; i < header . getThumbnailCount ( ) ; i ++ ) { CatalogItem item = CatalogItem . read ( pDataInput ) ; // System . out . println ( " item : " + item ) ; items [ item . getItemId ( ) - 1 ] = item ; } return new Catalog ( header , items ) ;
public class FormalParameterSourceAppender { /** * Initialize the formal parameter . * @ param context the context of the formal parameter . * @ param name the name of the formal parameter . */ public void eInit ( XtendExecutable context , String name , IJvmTypeProvider typeContext ) { } }
this . builder . eInit ( context , name , typeContext ) ;
public class StreamingJsonBuilder { /** * Delegates to { @ link # call ( String , Iterable , Closure ) } */ public void call ( String name , Collection coll , @ DelegatesTo ( StreamingJsonDelegate . class ) Closure c ) throws IOException { } }
call ( name , ( Iterable ) coll , c ) ;
public class Buffers { /** * Reads length and then length bytes into the data buffer , which is grown if needed . * @ param ch The channel to read data from * @ return The data buffer ( position is 0 and limit is length ) , or null if not all data could be read . */ public ByteBuffer readLengthAndData ( SocketChannel ch ) throws Exception { } }
if ( bufs [ 0 ] . hasRemaining ( ) && ch . read ( bufs [ 0 ] ) < 0 ) throw new EOFException ( ) ; if ( bufs [ 0 ] . hasRemaining ( ) ) return null ; int len = bufs [ 0 ] . getInt ( 0 ) ; if ( bufs [ 1 ] == null || len > bufs [ 1 ] . capacity ( ) ) bufs [ 1 ] = ByteBuffer . allocate ( len ) ; bufs [ 1 ] . limit ( len ) ; if ( bufs [ 1 ] . hasRemaining ( ) && ch . read ( bufs [ 1 ] ) < 0 ) throw new EOFException ( ) ; if ( bufs [ 1 ] . hasRemaining ( ) ) return null ; try { return ( ByteBuffer ) bufs [ 1 ] . duplicate ( ) . flip ( ) ; } finally { bufs [ 0 ] . clear ( ) ; bufs [ 1 ] . clear ( ) ; }
public class LombokPropertyDescriptor { /** * Determine if the current { @ link # getField ( ) field } defines a public accessor using * lombok annotations . * @ param env the { @ link MetadataGenerationEnvironment } * @ param getter { @ code true } to look for the read accessor , { @ code false } for the * write accessor * @ return { @ code true } if this field has a public accessor of the specified type */ private boolean hasLombokPublicAccessor ( MetadataGenerationEnvironment env , boolean getter ) { } }
String annotation = ( getter ? LOMBOK_GETTER_ANNOTATION : LOMBOK_SETTER_ANNOTATION ) ; AnnotationMirror lombokMethodAnnotationOnField = env . getAnnotation ( getField ( ) , annotation ) ; if ( lombokMethodAnnotationOnField != null ) { return isAccessLevelPublic ( env , lombokMethodAnnotationOnField ) ; } AnnotationMirror lombokMethodAnnotationOnElement = env . getAnnotation ( getOwnerElement ( ) , annotation ) ; if ( lombokMethodAnnotationOnElement != null ) { return isAccessLevelPublic ( env , lombokMethodAnnotationOnElement ) ; } return ( env . getAnnotation ( getOwnerElement ( ) , LOMBOK_DATA_ANNOTATION ) != null ) ;
public class ST_Reverse { /** * Returns the geometry with vertex order reversed . * @ param geometry Geometry * @ return geometry with vertex order reversed */ public static Geometry reverse ( Geometry geometry ) { } }
if ( geometry == null ) { return null ; } if ( geometry instanceof MultiPoint ) { return reverseMultiPoint ( ( MultiPoint ) geometry ) ; } return geometry . reverse ( ) ;
public class FilePolicyIndex { /** * ( non - Javadoc ) * @ see org . fcrepo . server . security . xacml . pdp . data . PolicyIndex # addPolicy ( java . lang . String , * java . lang . String ) */ @ Override public String addPolicy ( String name , String document ) throws PolicyIndexException { } }
writeLock . lock ( ) ; try { logger . debug ( "Adding policy named: " + name ) ; return doAdd ( name , document ) ; } finally { writeLock . unlock ( ) ; }