signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class BouncyCastleCertProcessingFactory { /** * Creates a new proxy credential from the specified certificate chain and a private key . * @ see # createCredential ( X509Certificate [ ] , PrivateKey , int , int , GSIConstants . CertificateType , X509ExtensionSet , String ) * createCredential */ public X509Credential createCredential ( X509Certificate [ ] certs , PrivateKey privateKey , int bits , int lifetime , GSIConstants . CertificateType certType ) throws GeneralSecurityException { } }
return createCredential ( certs , privateKey , bits , lifetime , certType , ( X509ExtensionSet ) null , null ) ;
public class LogManager { /** * Logs message using defined appender . * @ param tag Tag with SDKs and versions info . * @ param logLevel Level of the log message . * @ param msg Message to be logged . * @ param exception Optional exception to extract the stacktrace . */ public void log ( final String tag , final int logLevel , final String msg , final Throwable exception ) { } }
if ( aConsole != null ) { aConsole . appendLog ( tag , logLevel , msg , exception ) ; } if ( aFile != null ) { aFile . appendLog ( tag , logLevel , msg , exception ) ; }
public class H2StreamProcessor { /** * Get the number of bytes in this list of byte arrays * @ param listOfByteArrays * @ return the total byte count for this list of byte arrays */ private int getByteCount ( ArrayList < byte [ ] > listOfByteArrays ) { } }
int count = 0 ; for ( byte [ ] byteArray : listOfByteArrays ) { if ( byteArray != null ) { count += byteArray . length ; } } return count ;
public class DefaultVOMSProxyInfoBehaviour { /** * Groups of options for checking the proxy validity */ private void checkValidityOptions ( ProxyInfoParams params , X509Certificate [ ] proxyChain ) { } }
if ( params . containsOption ( PrintOption . PROXY_STRENGTH_VALIDITY ) && ! params . containsOption ( PrintOption . ALL_OPTIONS ) ) { if ( ! getKeySize ( proxyChain [ 0 ] ) . equals ( params . getKeyLength ( ) ) ) throw new VOMSError ( "Proxy key size is not valid" ) ; } if ( params . containsOption ( PrintOption . PROXY_EXISTS ) ) { try { try { proxyChain [ 0 ] . checkValidity ( ) ; } catch ( CertificateNotYetValidException e ) { throw new VOMSError ( "Proxy not found: " + e . getMessage ( ) , e ) ; } } catch ( CertificateExpiredException e ) { throw new VOMSError ( "The current proxy is not valid: " + e . getMessage ( ) , e ) ; } } if ( params . containsOption ( PrintOption . PROXY_TIME_VALIDITY ) ) { int period = 0 ; try { period = TimeUtils . parseLifetimeInHoursAndMinutes ( params . getValidTime ( ) ) ; } catch ( ParseException e ) { throw new VOMSError ( "Wrong validity format, required 'hh:mm': " + e . getMessage ( ) , e ) ; } if ( ! checkTimeValidity ( TimeUtils . getTimeLeft ( proxyChain [ 0 ] . getNotAfter ( ) ) , period ) ) throw new VOMSError ( "Proxy not valid for the specified period" ) ; } if ( params . containsOption ( PrintOption . PROXY_HOURS_VALIDITY ) ) { int period = 0 ; try { period = TimeUtils . parseLifetimeInHours ( params . getValidHours ( ) ) ; } catch ( ParseException e ) { throw new VOMSError ( "Wrong validity format, required 'hh': " + e . getMessage ( ) , e ) ; } if ( ! checkTimeValidity ( TimeUtils . getTimeLeft ( proxyChain [ 0 ] . getNotAfter ( ) ) , period ) ) throw new VOMSError ( "Proxy not valid for the specified period" ) ; }
public class SpiderThread { /** * Adds as seeds the given node and , if { @ link # scanChildren } is { @ code true } , the children nodes . * @ param node the node that will be added as seed and possible the children nodes */ private void addSeeds ( SiteNode node ) { } }
// Add the current node addSeed ( node ) ; // If the " scanChildren " option is enabled , add them if ( scanChildren ) { @ SuppressWarnings ( "unchecked" ) Enumeration < TreeNode > en = node . children ( ) ; while ( en . hasMoreElements ( ) ) { SiteNode sn = ( SiteNode ) en . nextElement ( ) ; addSeeds ( sn ) ; } }
public class DefaultJobProgress { /** * Close current step . */ private void onEndStepProgress ( Object source ) { } }
// Try to find the right step based on the source DefaultJobProgressStep step = findStep ( this . currentStep , source ) ; if ( step == null ) { LOGGER . warn ( "Could not find any matching step for source [{}]. Ignoring EndStepProgress." , source . toString ( ) ) ; return ; } this . currentStep = step ; this . currentStep . finish ( ) ;
public class ComponentNameSpaceConfiguration { /** * Returns a list of EJB Local References ( & lt ; ejb - local - ref > ) configured * for the component . */ public List < ? extends EJBRef > getEJBLocalRefs ( ) { } }
if ( ivJNDIEnvironmentRefs != null && ivJNDIEnvironmentRefs . containsKey ( EJBRef . class ) ) { throw new IllegalStateException ( ) ; } return ivEJBLocalRefs ;
public class Query { /** * Criteria */ @ Override public QueryBuilderWith withProperty ( String property , Object searchValue ) { } }
return withProperty ( property , QueryPropertyComparisonType . EQUALS , searchValue ) ;
public class FileUtils { /** * Delete first n files sorted by property . * @ param files list of files to delete . * @ param sortedBy sorting comparator . * @ param numFiles number of files from list to delete . */ public static void deleteFirst ( @ NonNull File [ ] files , @ NonNull Comparator < File > sortedBy , int numFiles ) { } }
Arrays . sort ( files , sortedBy ) ; int size = Math . min ( files . length , numFiles ) ; for ( int i = 0 ; i < size ; i ++ ) { if ( ! files [ i ] . delete ( ) ) { Log . w ( LOG_TAG , "Failed to delete file: " + files [ i ] ) ; } }
public class LuceneVirtualTableResolver { /** * { @ inheritDoc } */ public Query resolve ( final InternalQName tableName , final boolean includeInheritedTables ) throws InvalidQueryException , RepositoryException { } }
final List < Term > terms = new ArrayList < Term > ( ) ; Query query = null ; try { final String nodeTypeStringName = locationFactory . createJCRName ( tableName ) . getAsString ( ) ; if ( isMixin ( tableName ) ) { // search for nodes where jcr : mixinTypes is set to this mixin Term t = new Term ( FieldNames . PROPERTIES , FieldNames . createNamedValue ( mixinTypesField , nodeTypeStringName ) ) ; terms . add ( t ) ; } else { // search for nodes where jcr : primaryType is set to this type Term t = new Term ( FieldNames . PROPERTIES , FieldNames . createNamedValue ( primaryTypeField , nodeTypeStringName ) ) ; terms . add ( t ) ; } if ( includeInheritedTables ) { // now search for all node types that are derived from base final Set < InternalQName > allTypes = getSubTypes ( tableName ) ; for ( final InternalQName descendantNt : allTypes ) { final String ntName = locationFactory . createJCRName ( descendantNt ) . getAsString ( ) ; Term t ; if ( isMixin ( descendantNt ) ) { // search on jcr : mixinTypes t = new Term ( FieldNames . PROPERTIES , FieldNames . createNamedValue ( mixinTypesField , ntName ) ) ; } else { // search on jcr : primaryType t = new Term ( FieldNames . PROPERTIES , FieldNames . createNamedValue ( primaryTypeField , ntName ) ) ; } terms . add ( t ) ; } } } catch ( final NoSuchNodeTypeException e ) { throw new InvalidQueryException ( e . getMessage ( ) , e ) ; } if ( terms . size ( ) == 0 ) { // exception occured query = new BooleanQuery ( ) ; } else if ( terms . size ( ) == 1 ) { query = new JcrTermQuery ( terms . get ( 0 ) ) ; } else { final BooleanQuery b = new BooleanQuery ( ) ; for ( final Object element : terms ) { // b . add ( new TermQuery ( ( Term ) element ) , Occur . SHOULD ) ; b . add ( new JcrTermQuery ( ( Term ) element ) , Occur . SHOULD ) ; } query = b ; } return query ;
public class RemoteQPConsumerKeyGroup { /** * overriding superclass method */ public void addMember ( JSConsumerKey key ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addMember" , key ) ; super . addMember ( key ) ; // superclass method does most of the work synchronized ( criteriaLock ) { if ( allCriterias != null ) { SelectionCriteria [ ] newCriterias = new SelectionCriteria [ allCriterias . length + 1 ] ; System . arraycopy ( allCriterias , 0 , newCriterias , 0 , allCriterias . length ) ; allCriterias = newCriterias ; } else { allCriterias = new SelectionCriteria [ 1 ] ; } allCriterias [ allCriterias . length - 1 ] = ( ( RemoteQPConsumerKey ) key ) . getSelectionCriteria ( ) [ 0 ] ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addMember" ) ;
public class GetBlueprintsResult { /** * An array of key - value pairs that contains information about the available blueprints . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setBlueprints ( java . util . Collection ) } or { @ link # withBlueprints ( java . util . Collection ) } if you want to * override the existing values . * @ param blueprints * An array of key - value pairs that contains information about the available blueprints . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetBlueprintsResult withBlueprints ( Blueprint ... blueprints ) { } }
if ( this . blueprints == null ) { setBlueprints ( new java . util . ArrayList < Blueprint > ( blueprints . length ) ) ; } for ( Blueprint ele : blueprints ) { this . blueprints . add ( ele ) ; } return this ;
public class VirtualNetworkGatewaysInner { /** * The Set VpnclientIpsecParameters operation sets the vpnclient ipsec policy for P2S client of virtual network gateway in the specified resource group through Network resource provider . * @ param resourceGroupName The name of the resource group . * @ param virtualNetworkGatewayName The name of the virtual network gateway . * @ param vpnclientIpsecParams Parameters supplied to the Begin Set vpnclient ipsec parameters of Virtual Network Gateway P2S client operation through Network resource provider . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the VpnClientIPsecParametersInner object */ public Observable < ServiceResponse < VpnClientIPsecParametersInner > > beginSetVpnclientIpsecParametersWithServiceResponseAsync ( String resourceGroupName , String virtualNetworkGatewayName , VpnClientIPsecParametersInner vpnclientIpsecParams ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( virtualNetworkGatewayName == null ) { throw new IllegalArgumentException ( "Parameter virtualNetworkGatewayName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( vpnclientIpsecParams == null ) { throw new IllegalArgumentException ( "Parameter vpnclientIpsecParams is required and cannot be null." ) ; } Validator . validate ( vpnclientIpsecParams ) ; final String apiVersion = "2018-08-01" ; return service . beginSetVpnclientIpsecParameters ( resourceGroupName , virtualNetworkGatewayName , this . client . subscriptionId ( ) , vpnclientIpsecParams , apiVersion , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < VpnClientIPsecParametersInner > > > ( ) { @ Override public Observable < ServiceResponse < VpnClientIPsecParametersInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < VpnClientIPsecParametersInner > clientResponse = beginSetVpnclientIpsecParametersDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class BaseAJAXMoskitoUIAction { /** * Writes specified text to response and flushes the stream . * @ param res * { @ link HttpServletRequest } * @ param text * { @ link String } * @ throws java . io . IOException * if an input or output exception occurred */ private static void writeTextToResponse ( final HttpServletResponse res , final String text ) throws IOException { } }
res . setCharacterEncoding ( UTF_8 ) ; res . setContentType ( TEXT_X_JSON ) ; PrintWriter writer = res . getWriter ( ) ; writer . write ( text ) ; writer . flush ( ) ;
public class DocTrees { /** * Returns a DocTrees object for a given ProcessingEnvironment . * @ param env the processing environment for which to get the Trees object * @ return the DocTrees object * @ throws IllegalArgumentException if the env does not support the Trees API . */ public static DocTrees instance ( ProcessingEnvironment env ) { } }
if ( ! env . getClass ( ) . getName ( ) . equals ( "com.sun.tools.javac.processing.JavacProcessingEnvironment" ) ) throw new IllegalArgumentException ( ) ; return ( DocTrees ) getJavacTrees ( ProcessingEnvironment . class , env ) ;
public class LoggingSubsystemParser { /** * Reads the single { @ code value } attribute from an element . * @ param reader the reader to use * @ return the value of the { @ code value } attribute * @ throws XMLStreamException if the { @ code value } attribute is not present , there is more than one attribute on the * element or there is content within the element . */ static String readValueAttribute ( final XMLExtendedStreamReader reader ) throws XMLStreamException { } }
return readStringAttributeElement ( reader , Attribute . VALUE . getLocalName ( ) ) ;
public class BitFieldArgs { /** * Adds a new { @ link SubCommand } to the { @ code BITFIELD } execution . * @ param subCommand must not be { @ literal null } . */ private BitFieldArgs addSubCommand ( SubCommand subCommand ) { } }
LettuceAssert . notNull ( subCommand , "SubCommand must not be null" ) ; commands . add ( subCommand ) ; return this ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcColumnTypeEnum createIfcColumnTypeEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcColumnTypeEnum result = IfcColumnTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class OrderedChildTypesAttachment { /** * If the resource has ordered child types , those child types will be stored in the attachment . If there are no * ordered child types , this method is a no - op . * @ param resourceAddress the address of the resource * @ param resource the resource which may or may not have ordered children . */ public void addOrderedChildResourceTypes ( PathAddress resourceAddress , Resource resource ) { } }
Set < String > orderedChildTypes = resource . getOrderedChildTypes ( ) ; if ( orderedChildTypes . size ( ) > 0 ) { orderedChildren . put ( resourceAddress , resource . getOrderedChildTypes ( ) ) ; }
public class KeyVaultClientBaseImpl { /** * List all versions of the specified secret . * The full secret identifier and attributes are provided in the response . No values are returned for the secrets . This operations requires the secrets / list permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param secretName The name of the secret . * @ param maxresults Maximum number of results to return in a page . If not specified , the service will return up to 25 results . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < SecretItem > > getSecretVersionsAsync ( final String vaultBaseUrl , final String secretName , final Integer maxresults , final ListOperationCallback < SecretItem > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( getSecretVersionsSinglePageAsync ( vaultBaseUrl , secretName , maxresults ) , new Func1 < String , Observable < ServiceResponse < Page < SecretItem > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SecretItem > > > call ( String nextPageLink ) { return getSecretVersionsNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class JFunkWebDriverEventListener { /** * Saves the currently displayed browser window . The page title is used for the filename - * preceded by some identifying information ( thread , counter ) . Pages of the same type are * collected inside the same subdirectory . The subdirectory uses * { @ link SaveOutput # getIdentifier ( ) } for its name . If an alert is present , saving is not * supported and thus skipped . * @ param action * the event which triggered to save the page . Will be included in the filename . * @ param triggeredBy * the object which triggered the event ( e . g . a button or a link ) */ protected void savePage ( final WebDriver driver , final String action , final String triggeredBy ) { } }
try { // this updates the driver ' s window handles , so a subsequent call to // getWindowHandle ( ) fails if the window no longer exists driver . getWindowHandles ( ) ; driver . getWindowHandle ( ) ; } catch ( NoSuchWindowException ex ) { // Window is already closed . Saving the page could cause problems , e . g . // ChromeDriver ould hang . return ; } File moduleArchiveDir = moduleArchiveDirProvider . get ( ) ; if ( moduleArchiveDir == null ) { return ; } if ( config . getBoolean ( JFunkConstants . ARCHIVE_DO_NOT_SAVE_WHEN_ALERT , false ) ) { try { // Saving the page does not work if an alert is present driver . switchTo ( ) . alert ( ) ; log . trace ( "Cannot save page. Alert is present." ) ; return ; } catch ( NoAlertPresentException ex ) { // ignore } catch ( UnsupportedOperationException ex ) { // ignore // HtmlUnit does not support alerts } catch ( Exception ex ) { // ignore } } for ( SaveOutput saveOutput : SaveOutput . values ( ) ) { boolean saveSwitch = saveOutputMap . get ( saveOutput ) ; if ( ! saveSwitch ) { // Saving is disabled by property continue ; } File f = null ; try { f = dumpFileCreatorProvider . get ( ) . createDumpFile ( new File ( moduleArchiveDir , saveOutput . getIdentifier ( ) ) , saveOutput . getExtension ( ) , driver . getCurrentUrl ( ) , action ) ; if ( f == null ) { return ; } switch ( saveOutput ) { case HTML : StringBuilder html = new StringBuilder ( ) ; html . append ( "<!-- Requested URL: " ) ; html . append ( driver . getCurrentUrl ( ) ) ; html . append ( " -->" ) ; html . append ( IOUtils . LINE_SEPARATOR ) ; html . append ( driver . getPageSource ( ) ) ; writeStringToFile ( f , html . toString ( ) , "UTF-8" ) ; copyFile ( f , new File ( moduleArchiveDir , JFunkConstants . LASTPAGE_HTML ) ) ; log . trace ( "Saving page: filename={}, action={}, trigger={}, response={}" , f . getName ( ) , action , triggeredBy , driver . getCurrentUrl ( ) ) ; break ; case PNG : if ( driver instanceof TakesScreenshot ) { File tmpFile = ( ( TakesScreenshot ) driver ) . getScreenshotAs ( OutputType . FILE ) ; if ( tmpFile != null ) { copyFile ( tmpFile , f ) ; log . trace ( "Saving page: filename={}, action={}, trigger={}, response={}" , f . getName ( ) , action , triggeredBy , driver . getCurrentUrl ( ) ) ; deleteQuietly ( tmpFile ) ; } } break ; case HTML_VALIDATION : /* * JFunkWebDriver . getPageSource ( ) doesn ' t return the complete page source * e . g . DOCTYPE is missing . Therefore we are using a more complicated way to * retrieve the " real " page source . However , this only works when using * HtmlUnitDriver . */ if ( WebDriverUtils . isHtmlUnitDriver ( driver ) ) { String content = ( ( HtmlPage ) WebDriverUtils . getHtmlUnitDriverWebClient ( driver ) . getCurrentWindow ( ) . getEnclosedPage ( ) ) . getWebResponse ( ) . getContentAsString ( ) ; writeStringToFile ( f , content , "UTF-8" ) ; HtmlValidatorUtil . validateHtml ( f . getParentFile ( ) , config , f ) ; } break ; default : throw new IllegalStateException ( "unknown enum constant" ) ; } } catch ( Exception ex ) { log . error ( "Could not save file: {}. {}" , f , ex . getMessage ( ) ) ; return ; } }
public class NamedArgumentDefinition { /** * surrogate , each instance added to the collection must be populated with any tags and attributes . */ @ SuppressWarnings ( { } }
"rawtypes" , "unchecked" } ) private void setCollectionValues ( final CommandLineArgumentParser commandLineArgumentParser , final List < String > preprocessedValues ) // Note that some of these might be tag surrogates { final Collection c = ( Collection ) getArgumentValue ( ) ; if ( ! commandLineArgumentParser . getAppendToCollectionsParserOption ( ) ) { // if this is a collection then we only want to clear it once at the beginning , before we // process any of the values , unless we ' re in APPEND _ TO _ COLLECTIONS mode c . clear ( ) ; } for ( int i = 0 ; i < preprocessedValues . size ( ) ; i ++ ) { final String stringValue = preprocessedValues . get ( i ) ; if ( stringValue . equals ( NULL_ARGUMENT_STRING ) ) { if ( i != 0 ) { // if a " null " is included that isn ' t the first value for this option , honor it but warn , since it // will clobber any previously set values , and may indicate an unintentional error on the user ' s part logger . warn ( String . format ( "A \"null\" value was detected for an option after values for that option were already set. " + "Clobbering previously set values for this option: %s." , getArgumentAliasDisplayString ( ) ) ) ; } if ( ! isOptional ( ) ) { throw new CommandLineException ( String . format ( "Non \"null\" value must be provided for '%s'" , getArgumentAliasDisplayString ( ) ) ) ; } c . clear ( ) ; } else { // if a collection argument was presented as a tagged argument on the command line , and an expansion // file was provided as the value , propagate the tags to each value from the expansion file final Pair < String , String > normalizedSurrogatePair = getNormalizedTagValuePair ( commandLineArgumentParser , stringValue ) ; final List < String > expandedValues = argumentAnnotation . suppressFileExpansion ( ) ? Collections . singletonList ( normalizedSurrogatePair . getRight ( ) ) : commandLineArgumentParser . expandFromExpansionFile ( this , normalizedSurrogatePair . getRight ( ) , preprocessedValues ) ; for ( final String expandedValue : expandedValues ) { final Object actualValue = getValuePopulatedWithTags ( normalizedSurrogatePair . getLeft ( ) , expandedValue ) ; checkArgumentRange ( actualValue ) ; c . add ( actualValue ) ; } } }
public class ExtractPrototypeMemberDeclarations { /** * Replaces a member declaration to an assignment to the temp prototype * object . */ private void replacePrototypeMemberDeclaration ( PrototypeMemberDeclaration declar ) { } }
// x . prototype . y = . . . - > t . y = . . . Node assignment = declar . node . getFirstChild ( ) ; Node lhs = assignment . getFirstChild ( ) ; Node name = NodeUtil . newQName ( compiler , PROTOTYPE_ALIAS + "." + declar . memberName , declar . node , declar . memberName ) ; // Save the full prototype path on the left hand side of the assignment for debugging purposes . // declar . lhs = x . prototype . y so first child of the first child is ' x ' . Node accessNode = declar . lhs . getFirstFirstChild ( ) ; String originalName = accessNode . getOriginalName ( ) ; String className = originalName != null ? originalName : "?" ; name . getFirstChild ( ) . useSourceInfoFromForTree ( lhs ) ; name . putBooleanProp ( Node . IS_CONSTANT_NAME , lhs . getBooleanProp ( Node . IS_CONSTANT_NAME ) ) ; name . getFirstChild ( ) . setOriginalName ( className + ".prototype" ) ; assignment . replaceChild ( lhs , name ) ; compiler . reportChangeToEnclosingScope ( name ) ;
public class QuestOWL { /** * This method loads the given ontologies in the system . This will merge * these new ontologies with the existing ones in a set . Then it will * translate the assertions in all the ontologies into a single one , in our * internal representation . * The translation is done using our OWLAPITranslator that gets the TBox * part of the ontologies and filters all the DL - Lite axioms ( RDFS / OWL2QL * and DL - Lite ) . */ private ClassifiedTBox loadOntologies ( OWLOntology ontology ) { } }
/* * We will keep track of the loaded ontologies and translate the TBox * part of them into our internal representation . */ log . debug ( "Load ontologies called. Translating ontologies." ) ; Ontology mergeOntology = owlapiTranslator . translateAndClassify ( ontology ) ; return mergeOntology . tbox ( ) ;
public class StringUtils { /** * A Guava function for converting strings to lowercase . * @ param locale * @ return */ public static Function < String , String > toLowerCaseFunction ( final Locale locale ) { } }
return new Function < String , String > ( ) { @ Override public String apply ( final String s ) { return s . toLowerCase ( locale ) ; } @ Override public String toString ( ) { return "toLowercase(" + locale + ")" ; } } ;
public class Cluster { /** * Wraps a protobuf response . * < p > This method is considered an internal implementation detail and not meant to be used by * applications . */ @ InternalApi public static Cluster fromProto ( com . google . bigtable . admin . v2 . Cluster proto ) { } }
return new Cluster ( proto ) ;
public class StaticMethodInstanceInvocation { /** * implement the visitor to reset the stack * @ param obj * the context object of the currently parsed method */ @ Override public void visitCode ( Code obj ) { } }
Method m = getMethod ( ) ; if ( prescreen ( m ) ) { stack . resetForMethodEntry ( this ) ; popStack . clear ( ) ; super . visitCode ( obj ) ; }
public class EventMention { /** * setter for ldc _ scope - sets * @ generated * @ param v value to set into the feature */ public void setLdc_scope ( LDC_Scope v ) { } }
if ( EventMention_Type . featOkTst && ( ( EventMention_Type ) jcasType ) . casFeat_ldc_scope == null ) jcasType . jcas . throwFeatMissing ( "ldc_scope" , "de.julielab.jules.types.ace.EventMention" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( EventMention_Type ) jcasType ) . casFeatCode_ldc_scope , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class OpenApiReader { /** * Returns a URL for the given file . */ private static URL toUrl ( File file ) { } }
try { return file == null ? null : file . toURI ( ) . toURL ( ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Can't get URL for file=" + file ) ; }
public class ListPhoneNumbersOptedOutResult { /** * A list of phone numbers that are opted out of receiving SMS messages . The list is paginated , and each page can * contain up to 100 phone numbers . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPhoneNumbers ( java . util . Collection ) } or { @ link # withPhoneNumbers ( java . util . Collection ) } if you want to * override the existing values . * @ param phoneNumbers * A list of phone numbers that are opted out of receiving SMS messages . The list is paginated , and each page * can contain up to 100 phone numbers . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListPhoneNumbersOptedOutResult withPhoneNumbers ( String ... phoneNumbers ) { } }
if ( this . phoneNumbers == null ) { setPhoneNumbers ( new com . amazonaws . internal . SdkInternalList < String > ( phoneNumbers . length ) ) ; } for ( String ele : phoneNumbers ) { this . phoneNumbers . add ( ele ) ; } return this ;
public class ClassUtils { /** * Return a path suitable for use with { @ code ClassLoader . getResource } * ( also suitable for use with { @ code Class . getResource } by prepending a * slash ( ' / ' ) to the return value ) . Built by taking the package of the specified * class file , converting all dots ( ' . ' ) to slashes ( ' / ' ) , adding a trailing slash * if necessary , and concatenating the specified resource name to this . * < br / > As such , this function may be used to build a path suitable for * loading a resource file that is in the same package as a class file , * although { @ link org . springframework . core . io . ClassPathResource } is usually * even more convenient . * @ param clazz the Class whose package will be used as the base * @ param resourceName the resource name to append . A leading slash is optional . * @ return the built - up resource path * @ see ClassLoader # getResource * @ see Class # getResource */ public static String addResourcePathToPackagePath ( Class < ? > clazz , String resourceName ) { } }
Assert . notNull ( resourceName , "Resource name must not be null" ) ; if ( ! resourceName . startsWith ( "/" ) ) { return classPackageAsResourcePath ( clazz ) + "/" + resourceName ; } return classPackageAsResourcePath ( clazz ) + resourceName ;
public class SelectRawHelper { /** * ( non - Javadoc ) * @ see com . abubusoft . kripton . processor . sqlite . SQLiteSelectBuilder . * SelectCodeGenerator # generate ( com . squareup . javapoet . MethodSpec . Builder ) */ @ Override public void generateSpecializedPart ( SQLiteModelMethod method , TypeSpec . Builder classBuilder , MethodSpec . Builder methodBuilder , Set < JQLProjection > fieldList , boolean mapFields ) { } }
methodBuilder . addCode ( "return _cursor;\n" ) ; // methodBuilder . endControlFlow ( ) ;
public class CommerceCurrencyPersistenceImpl { /** * Returns an ordered range of all the commerce currencies where uuid = & # 63 ; and companyId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceCurrencyModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param uuid the uuid * @ param companyId the company ID * @ param start the lower bound of the range of commerce currencies * @ param end the upper bound of the range of commerce currencies ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ param retrieveFromCache whether to retrieve from the finder cache * @ return the ordered range of matching commerce currencies */ @ Override public List < CommerceCurrency > findByUuid_C ( String uuid , long companyId , int start , int end , OrderByComparator < CommerceCurrency > orderByComparator , boolean retrieveFromCache ) { } }
boolean pagination = true ; FinderPath finderPath = null ; Object [ ] finderArgs = null ; if ( ( start == QueryUtil . ALL_POS ) && ( end == QueryUtil . ALL_POS ) && ( orderByComparator == null ) ) { pagination = false ; finderPath = FINDER_PATH_WITHOUT_PAGINATION_FIND_BY_UUID_C ; finderArgs = new Object [ ] { uuid , companyId } ; } else { finderPath = FINDER_PATH_WITH_PAGINATION_FIND_BY_UUID_C ; finderArgs = new Object [ ] { uuid , companyId , start , end , orderByComparator } ; } List < CommerceCurrency > list = null ; if ( retrieveFromCache ) { list = ( List < CommerceCurrency > ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( ( list != null ) && ! list . isEmpty ( ) ) { for ( CommerceCurrency commerceCurrency : list ) { if ( ! Objects . equals ( uuid , commerceCurrency . getUuid ( ) ) || ( companyId != commerceCurrency . getCompanyId ( ) ) ) { list = null ; break ; } } } } if ( list == null ) { StringBundler query = null ; if ( orderByComparator != null ) { query = new StringBundler ( 4 + ( orderByComparator . getOrderByFields ( ) . length * 2 ) ) ; } else { query = new StringBundler ( 4 ) ; } query . append ( _SQL_SELECT_COMMERCECURRENCY_WHERE ) ; boolean bindUuid = false ; if ( uuid == null ) { query . append ( _FINDER_COLUMN_UUID_C_UUID_1 ) ; } else if ( uuid . equals ( "" ) ) { query . append ( _FINDER_COLUMN_UUID_C_UUID_3 ) ; } else { bindUuid = true ; query . append ( _FINDER_COLUMN_UUID_C_UUID_2 ) ; } query . append ( _FINDER_COLUMN_UUID_C_COMPANYID_2 ) ; if ( orderByComparator != null ) { appendOrderByComparator ( query , _ORDER_BY_ENTITY_ALIAS , orderByComparator ) ; } else if ( pagination ) { query . append ( CommerceCurrencyModelImpl . ORDER_BY_JPQL ) ; } String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; if ( bindUuid ) { qPos . add ( uuid ) ; } qPos . add ( companyId ) ; if ( ! pagination ) { list = ( List < CommerceCurrency > ) QueryUtil . list ( q , getDialect ( ) , start , end , false ) ; Collections . sort ( list ) ; list = Collections . unmodifiableList ( list ) ; } else { list = ( List < CommerceCurrency > ) QueryUtil . list ( q , getDialect ( ) , start , end ) ; } cacheResult ( list ) ; finderCache . putResult ( finderPath , finderArgs , list ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return list ;
public class Histogram { /** * Update histogram . */ private void update ( ) { } }
int i , n = values . length ; max = 0 ; min = n ; total = 0 ; int maxVal = - Integer . MAX_VALUE ; int minVal = Integer . MAX_VALUE ; // calculate min and max for ( i = 0 ; i < n ; i ++ ) { if ( values [ i ] != 0 ) { // max if ( i > max ) max = i ; // min if ( i < min ) min = i ; maxVal = Math . max ( maxVal , values [ i ] ) ; minVal = Math . min ( minVal , values [ i ] ) ; total += values [ i ] ; } } double k = ( maxVal - minVal ) / ( double ) bins ; int [ ] h = new int [ bins ] ; for ( int j = 0 ; j < values . length ; j ++ ) { double _min = minVal ; double _max = _min + k ; // First interval . if ( values [ j ] >= _min && values [ j ] <= _max ) h [ 0 ] ++ ; _min += k ; _max += k ; // Others interval . for ( int l = 1 ; l < bins ; l ++ ) { if ( values [ j ] > _min && values [ j ] <= _max ) h [ l ] ++ ; _min += k ; _max += k ; } } this . values = h ; mean = HistogramStatistics . Mean ( values ) ; stdDev = HistogramStatistics . StdDev ( values , mean ) ; median = HistogramStatistics . Median ( values ) ; mode = HistogramStatistics . Mode ( values ) ; entropy = HistogramStatistics . Entropy ( values ) ;
public class ClientCommsDiagnosticModule { /** * This method dumps the status of any client conversations that are currently active . It does * this by asking the JFap outbound connection tracker for its list of active conversations and * then has a look at the conversation states . All this information is captured in the FFDC * incident stream * @ param is The incident stream to log information to . */ @ Override protected void dumpJFapClientStatus ( IncidentStream is ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "dumpJFapClientStatus" ) ; ClientConnectionManager ccm = ClientConnectionManager . getRef ( ) ; List obc = ccm . getActiveOutboundConversationsForFfdc ( ) ; is . writeLine ( "\n------ Client Conversation Dump ------ " , ">" ) ; if ( obc != null ) { // Build a map of connection - > conversation so that we can output the // connection information once per set of conversations . final Map < Object , LinkedList < Conversation > > connectionToConversationMap = convertToMap ( is , obc ) ; // Go through the map and dump out a connection - followed by its conversations for ( Iterator < Entry < Object , LinkedList < Conversation > > > i = connectionToConversationMap . entrySet ( ) . iterator ( ) ; i . hasNext ( ) ; ) { final Entry < Object , LinkedList < Conversation > > entry = i . next ( ) ; is . writeLine ( "\nOutbound connection:" , entry . getKey ( ) ) ; LinkedList conversationList = entry . getValue ( ) ; while ( ! conversationList . isEmpty ( ) ) { Conversation c = ( Conversation ) conversationList . removeFirst ( ) ; is . writeLine ( "\nOutbound Conversation[" + c . getId ( ) + "]: " , c . getFullSummary ( ) ) ; try { dumpClientConversation ( is , c ) ; } catch ( Throwable t ) { // No FFDC Code Needed is . writeLine ( "\nUnable to dump conversation" , t ) ; } } } } else { is . writeLine ( "\nUnable to fetch list of conversations" , "" ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "dumpJFapClientStatus" ) ;
public class GoogleAuthenticatorTokenCouchDbRepository { /** * Find first by uid , otp pair . * @ param uid uid to search * @ param otp otp to search * @ return token for uid , otp pair */ @ View ( name = "by_uid_otp" , map = "function(doc) { if(doc.token && doc.userId) { emit([doc.userId, doc.token], doc) } }" ) public CouchDbGoogleAuthenticatorToken findOneByUidForOtp ( final String uid , final Integer otp ) { } }
val view = createQuery ( "by_uid_otp" ) . key ( ComplexKey . of ( uid , otp ) ) . limit ( 1 ) ; return db . queryView ( view , CouchDbGoogleAuthenticatorToken . class ) . stream ( ) . findFirst ( ) . orElse ( null ) ;
public class DatasourceTemplate { /** * Execute a statement . */ public void execute ( String statement ) throws SQLException { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Executing SQL statement [" + statement + "]" ) ; } Connection connection = null ; Statement stm = null ; try { connection = ds . getConnection ( ) ; stm = connection . createStatement ( ) ; stm . execute ( statement ) ; } catch ( SQLException e ) { throw e ; } finally { if ( stm != null ) stm . close ( ) ; if ( connection != null ) connection . close ( ) ; }
public class DataSiftHistorics { /** * Check the status of data availability in our archive for the given time period * @ param start the dat from which the archive should be checked * @ param end the up to which the archive should be checked * @ param sources an optional list of data sources that should be queried , e . g . [ tumblr , facebook , . . . ] * @ return a report of the current status / availability of data for the given time period */ public FutureData < HistoricsStatus > status ( DateTime start , DateTime end , String ... sources ) { } }
FutureData < HistoricsStatus > future = new FutureData < > ( ) ; URI uri = newParams ( ) . forURL ( config . newAPIEndpointURI ( STATUS ) ) ; POST request = config . http ( ) . POST ( uri , new PageReader ( newRequestCallback ( future , new HistoricsStatus ( ) , config ) ) ) . form ( "start" , MILLISECONDS . toSeconds ( start . getMillis ( ) ) ) . form ( "end" , MILLISECONDS . toSeconds ( end . getMillis ( ) ) ) ; if ( sources != null && sources . length > 0 ) { StringBuilder b = new StringBuilder ( ) ; for ( String source : sources ) { b . append ( source ) . append ( "," ) ; } request . form ( "sources" , b . toString ( ) . substring ( 0 , b . length ( ) - 1 ) ) ; } performRequest ( future , request ) ; return future ;
public class RpcStageController { /** * 获取最小一个符合条件的processId , 排除loadedProcessId */ private Long getMinTransformedProcessId ( Long loadedProcessId ) { } }
ProcessMonitor processMonitor = ArbitrateFactory . getInstance ( getPipelineId ( ) , ProcessMonitor . class ) ; List < Long > processIds = processMonitor . getCurrentProcessIds ( ) ; // 如果需要当前node处理当前process的load时 , rpc请求一定会将对应的stage状态发到这机器上 , 并保存到progress中 if ( ! CollectionUtils . isEmpty ( processIds ) && ! CollectionUtils . isEmpty ( progress ) ) { // 上一次load成功的在当前的processId中不存在 , 可能有两种情况 : // 1 . zk还未将数据通知过来 , 当前current processIds还是为老版本的值 // 2 . processId已经被删除 , 比如好久没有数据同步了 , 定时触发时发觉列表一直为空 // if ( loadedProcessId ! = null & & ! processIds . contains ( loadedProcessId ) ) { // / / 强制刷新一次 , 不过也可能是刷到老版本的值 , 杭州leader还没同步到美国 // processIds = processMonitor . getCurrentProcessIds ( true ) ; Long result = null ; // 做的一个优化 , 如果上一个processId load成功是在本机 , 直接忽略 // 因为存在一个问题 : 比如两个process , 都先完成了T模块 , 然后逐个触发L模块 , 此时第二个process需要等zookeeper回调watcher时才会被触发 for ( Long processId : processIds ) { if ( loadedProcessId == null || processId > loadedProcessId ) { result = processId ; break ; } } // 如果不存在符合 > loadedProcessId的记录 , 直接假设下一个processId就是上一个id + 1 // 因为processId目前的机制永远只会递增 if ( result == null ) { result = loadedProcessId + 1 ; } if ( result != null ) { StageProgress stage = progress . get ( result ) ; if ( stage != null && stage . getStage ( ) . isTransform ( ) ) { return result ; } else { logger . info ( "rpc compute [{}] but stage [{}]" , result , stage == null ? null : stage . getStage ( ) ) ; return null ; } } } return null ;
public class SecurityCenterClient { /** * Gets the settings for an organization . * < p > Sample code : * < pre > < code > * try ( SecurityCenterClient securityCenterClient = SecurityCenterClient . create ( ) ) { * OrganizationSettingsName name = OrganizationSettingsName . of ( " [ ORGANIZATION ] " ) ; * OrganizationSettings response = securityCenterClient . getOrganizationSettings ( name ) ; * < / code > < / pre > * @ param name Name of the organization to get organization settings for . Its format is * " organizations / [ organization _ id ] / organizationSettings " . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final OrganizationSettings getOrganizationSettings ( OrganizationSettingsName name ) { } }
GetOrganizationSettingsRequest request = GetOrganizationSettingsRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return getOrganizationSettings ( request ) ;
public class DatatypeConverter { /** * Print units . * @ param value units value * @ return units value */ public static final BigDecimal printUnits ( Number value ) { } }
return ( value == null ? BIGDECIMAL_ONE : new BigDecimal ( value . doubleValue ( ) / 100 ) ) ;
public class XlsMapper { /** * JavaのオブジェクトをExeclファイルに出力する 。 * < p > 出力するファイルは 、 引数で指定した雛形となるテンプレート用のExcelファイルをもとに出力する 。 < / p > * @ param templateXlsIn 雛形となるExcelファイルの入力 * @ param xlsOut 出力先のストリーム * @ param beanObj 書き込むBeanオブジェクト * @ throws IllegalArgumentException { @ literal templateXlsIn = = null or xlsOut = = null or beanObj = = null } * @ throws XlsMapperException マッピングに失敗した場合 * @ throws IOException テンプレートのファイルの読み込みやファイルの出力に失敗した場合 */ public void save ( final InputStream templateXlsIn , final OutputStream xlsOut , final Object beanObj ) throws XlsMapperException , IOException { } }
saver . save ( templateXlsIn , xlsOut , beanObj ) ;
public class PathfindableModel { /** * Update reference by updating map object Id . * @ param lastStep The last step . * @ param nextStep The next step . */ private void updateObjectId ( int lastStep , int nextStep ) { } }
final int max = getMaxStep ( ) ; if ( nextStep < max ) { // Next step is free if ( checkObjectId ( path . getX ( nextStep ) , path . getY ( nextStep ) ) ) { takeNextStep ( lastStep , nextStep ) ; } else { avoidObstacle ( nextStep , max ) ; } }
public class ManipulationUtils { /** * Adds the fields for the model tail and the logger to the class . */ private static void addFields ( CtClass clazz ) throws CannotCompileException , NotFoundException { } }
CtField tail = CtField . make ( String . format ( "private Map %s = new HashMap();" , TAIL_FIELD ) , clazz ) ; clazz . addField ( tail ) ; String loggerDefinition = "private static final Logger %s = LoggerFactory.getLogger(%s.class.getName());" ; CtField logger = CtField . make ( String . format ( loggerDefinition , LOGGER_FIELD , clazz . getName ( ) ) , clazz ) ; clazz . addField ( logger ) ;
public class AbstractSamlProfileHandlerController { /** * Build cas assertion . * @ param authentication the authentication * @ param service the service * @ param registeredService the registered service * @ param attributesToCombine the attributes to combine * @ return the assertion */ protected Assertion buildCasAssertion ( final Authentication authentication , final Service service , final RegisteredService registeredService , final Map < String , List < Object > > attributesToCombine ) { } }
val attributes = registeredService . getAttributeReleasePolicy ( ) . getAttributes ( authentication . getPrincipal ( ) , service , registeredService ) ; val principalId = registeredService . getUsernameAttributeProvider ( ) . resolveUsername ( authentication . getPrincipal ( ) , service , registeredService ) ; val principal = new AttributePrincipalImpl ( principalId , ( Map ) attributes ) ; val authnAttrs = new LinkedHashMap < > ( authentication . getAttributes ( ) ) ; authnAttrs . putAll ( attributesToCombine ) ; return new AssertionImpl ( principal , DateTimeUtils . dateOf ( authentication . getAuthenticationDate ( ) ) , null , DateTimeUtils . dateOf ( authentication . getAuthenticationDate ( ) ) , ( Map ) authnAttrs ) ;
public class UrlSchemeRegistry { /** * Used to register a handler for a scheme . The actual handler used will in fact be a runtime generated * subclass of handlerType in order to abide by the naming rules for URL scheme handlers . * @ param scheme scheme name to associate handlerType with * @ param handlerType non - final class with a no - arg public constructor which will create handlers * for scheme */ public static void register ( final String scheme , Class < ? extends URLStreamHandler > handlerType ) { } }
if ( ! registeredSchemes . add ( scheme ) ) { throw new IllegalStateException ( "a scheme has already been registered for " + scheme ) ; } registerPackage ( "org.skife.url.generated" ) ; Enhancer e = new Enhancer ( ) ; e . setNamingPolicy ( new NamingPolicy ( ) { @ Override public String getClassName ( String prefix , String source , Object key , Predicate names ) { return "org.skife.url.generated." + scheme + ".Handler" ; } } ) ; e . setSuperclass ( handlerType ) ; e . setCallbackType ( NoOp . class ) ; e . createClass ( ) ;
public class GPXLine { /** * Set a link to additional information about the route or the track . * @ param attributes The current attributes being parsed */ public final void setLink ( Attributes attributes ) { } }
lineValues [ GpxMetadata . LINELINK_HREF ] = attributes . getValue ( GPXTags . HREF ) ;
public class CMAApiKey { /** * Add environment ids to the api key . * @ param environment a new environment to be affected by this key . * A link for the payload will be created automatically . * @ return the api key for easy chaining . */ public CMAApiKey addEnvironment ( String environment ) { } }
environments . add ( new CMALink ( CMAType . Environment ) . setId ( environment ) ) ; return this ;
public class OpenFilePOptions { /** * < code > optional . alluxio . grpc . file . ReadPType readType = 1 ; < / code > */ public alluxio . grpc . ReadPType getReadType ( ) { } }
alluxio . grpc . ReadPType result = alluxio . grpc . ReadPType . valueOf ( readType_ ) ; return result == null ? alluxio . grpc . ReadPType . NO_CACHE : result ;
public class MapFuncSup { /** * define a function to deal with each entry in the map * @ param func a function takes in each entry from map * @ return return ' last loop value ' . < br > * check * < a href = " https : / / github . com / wkgcass / Style / " > tutorial < / a > for * more info about ' last loop value ' */ @ SuppressWarnings ( "unchecked" ) public < R > R forEach ( VFunc2 < K , V > func ) { } }
return ( R ) forEach ( Style . $ ( func ) ) ;
public class DatamodelConverter { /** * Copies a { @ link TimeValue } . * @ param object * object to copy * @ return copied object */ public TimeValue copy ( TimeValue object ) { } }
return dataObjectFactory . getTimeValue ( object . getYear ( ) , object . getMonth ( ) , object . getDay ( ) , object . getHour ( ) , object . getMinute ( ) , object . getSecond ( ) , object . getPrecision ( ) , object . getBeforeTolerance ( ) , object . getAfterTolerance ( ) , object . getTimezoneOffset ( ) , object . getPreferredCalendarModel ( ) ) ;
public class StringHelper { /** * Take a concatenated String and return a { @ link Set } of all elements in the * passed string , using specified separator string . * @ param sSep * The separator to use . May not be < code > null < / code > . * @ param sElements * The concatenated String to convert . May be < code > null < / code > or empty . * @ return The { @ link Set } represented by the passed string . Never * < code > null < / code > . If the passed input string is < code > null < / code > or * " " an empty list is returned . */ @ Nonnull @ ReturnsMutableCopy public static CommonsHashSet < String > getExplodedToSet ( @ Nonnull final String sSep , @ Nullable final String sElements ) { } }
return getExploded ( sSep , sElements , - 1 , new CommonsHashSet < > ( ) ) ;
public class AppServiceCertificateOrdersInner { /** * Verify domain ownership for this certificate order . * Verify domain ownership for this certificate order . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param certificateOrderName Name of the certificate order . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void resendRequestEmails ( String resourceGroupName , String certificateOrderName ) { } }
resendRequestEmailsWithServiceResponseAsync ( resourceGroupName , certificateOrderName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class Configuration { /** * region Helpers */ private boolean getBoolean ( String key , boolean defaultValue ) { } }
try { return optBoolean ( key , defaultValue ) ; } catch ( Exception e ) { ApptentiveLog . e ( e , "Exception while getting boolean key '%s'" , key ) ; logException ( e ) ; return defaultValue ; }
public class HScreenField { /** * Display this field in html input format . * @ param out The html out stream . * @ param strFieldDesc The field description . * @ param strFieldName The field name . * @ param strSize The control size . * @ param strMaxSize The string max size . * @ param strValue The default value . * @ param strControlType The control type . * @ param iHtmlAttribures The attributes . */ public void printInputControl ( PrintWriter out , String strFieldDesc , String strFieldName , String strSize , String strMaxSize , String strValue , String strControlType , int iHtmlAttributes ) { } }
out . println ( "<td><input type=\"" + strControlType + "\" name=\"" + strFieldName + "\" size=\"" + strSize + "\" maxlength=\"" + strMaxSize + "\" value=\"" + strValue + "\"/></td>" ) ;
public class DescribeEnvironmentsRequest { /** * The IDs of individual environments to get information about . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setEnvironmentIds ( java . util . Collection ) } or { @ link # withEnvironmentIds ( java . util . Collection ) } if you want * to override the existing values . * @ param environmentIds * The IDs of individual environments to get information about . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeEnvironmentsRequest withEnvironmentIds ( String ... environmentIds ) { } }
if ( this . environmentIds == null ) { setEnvironmentIds ( new java . util . ArrayList < String > ( environmentIds . length ) ) ; } for ( String ele : environmentIds ) { this . environmentIds . add ( ele ) ; } return this ;
public class PdfUtilities { /** * Splits PDF . * @ deprecated As of Release 3.0. * @ param inputPdfFile input file * @ param outputPdfFile output file * @ param firstPage begin page * @ param lastPage end page */ public static void splitPdf ( String inputPdfFile , String outputPdfFile , String firstPage , String lastPage ) { } }
if ( firstPage . trim ( ) . isEmpty ( ) ) { firstPage = "0" ; } if ( lastPage . trim ( ) . isEmpty ( ) ) { lastPage = "0" ; } splitPdf ( new File ( inputPdfFile ) , new File ( outputPdfFile ) , Integer . parseInt ( firstPage ) , Integer . parseInt ( lastPage ) ) ;
public class FileSystemConnector { /** * Utility method for creating a { @ link BinaryValue } for the given { @ link File } object . Subclasses should rarely override this * method . * @ param file the file ; may not be null * @ return the BinaryValue ; never null */ protected ExternalBinaryValue binaryFor ( File file ) { } }
try { return createBinaryValue ( file ) ; } catch ( RuntimeException e ) { throw e ; } catch ( Throwable e ) { throw new RuntimeException ( e ) ; }
public class MutableHashTable { /** * Gets the next buffer to be used with the hash - table , either for an in - memory partition , or for the * table buckets . This method returns < tt > null < / tt > , if no more buffer is available . Spilling a partition * may free new buffers then . * @ return The next buffer to be used by the hash - table , or null , if no buffer remains . * @ throws IOException Thrown , if the thread is interrupted while grabbing the next buffer . The I / O * exception replaces the < tt > InterruptedException < / tt > to consolidate the exception * signatures . */ final MemorySegment getNextBuffer ( ) { } }
// check if the list directly offers memory int s = this . availableMemory . size ( ) ; if ( s > 0 ) { return this . availableMemory . remove ( s - 1 ) ; } // check if there are write behind buffers that actually are to be used for the hash table if ( this . writeBehindBuffersAvailable > 0 ) { // grab at least one , no matter what MemorySegment toReturn ; try { toReturn = this . writeBehindBuffers . take ( ) ; } catch ( InterruptedException iex ) { throw new RuntimeException ( "Hybrid Hash Join was interrupted while taking a buffer." ) ; } this . writeBehindBuffersAvailable -- ; // grab as many more buffers as are available directly MemorySegment currBuff = null ; while ( this . writeBehindBuffersAvailable > 0 && ( currBuff = this . writeBehindBuffers . poll ( ) ) != null ) { this . availableMemory . add ( currBuff ) ; this . writeBehindBuffersAvailable -- ; } return toReturn ; } else { // no memory available return null ; }
public class SimpleSourcedTokenizer { /** * Return tokenized version of a string . */ public SourcedToken [ ] sourcedTokenize ( String input , String source ) { } }
Token [ ] tokens = tokenize ( input ) ; SourcedToken [ ] sourcedTokens = new SourcedToken [ tokens . length ] ; for ( int i = 0 ; i < tokens . length ; i ++ ) { String key = tokens [ i ] . getValue ( ) + "@" + source ; if ( tokMap . get ( key ) == null ) { tokMap . put ( key , new Integer ( ++ nextId ) ) ; } int id = ( ( Integer ) tokMap . get ( key ) ) . intValue ( ) ; sourcedTokens [ i ] = new BasicSourcedToken ( id , tokens [ i ] . getValue ( ) , source ) ; } return sourcedTokens ;
public class MultiWordMatcher { /** * Load the dictionaries . * @ param props * the properties object * @ throws IOException * if io problems */ private void loadDictionary ( final Properties props ) throws IOException { } }
dictionary = new HashMap < String , String > ( ) ; final String lang = props . getProperty ( "language" ) ; final String resourcesDirectory = props . getProperty ( "resourcesDirectory" ) ; final InputStream dictInputStream = getMultiWordDict ( lang , resourcesDirectory ) ; if ( dictInputStream == null ) { final String resourcesLocation = resourcesDirectory == null ? "src/main/resources" : resourcesDirectory ; System . err . println ( "ERROR: Not multiword dictionary for language " + lang + " in " + resourcesLocation + "!!" ) ; System . exit ( 1 ) ; } final BufferedReader breader = new BufferedReader ( new InputStreamReader ( dictInputStream , Charset . forName ( "UTF-8" ) ) ) ; String line ; while ( ( line = breader . readLine ( ) ) != null ) { final String [ ] lineArray = tabPattern . split ( line ) ; if ( lineArray . length == 4 ) { final Matcher lineMatcher = linePattern . matcher ( lineArray [ 0 ] . toLowerCase ( ) ) ; dictionary . put ( lineMatcher . replaceAll ( " " ) , lineArray [ 2 ] ) ; } else { System . err . println ( "WARNING: line starting with " + lineArray [ 0 ] + " is not well-formed; skipping!!" ) ; } }
public class AccessControlListHandler { /** * package private for unit tests */ boolean isAllowed ( String attribute ) { } }
if ( attribute != null ) { for ( AclMatch rule : acl ) { if ( rule . matches ( attribute ) ) { return ! rule . isDeny ( ) ; } } } return defaultAllow ;
public class LettuceFutures { /** * Wait until futures are complete or the supplied timeout is reached . Commands are not canceled ( in contrast to * { @ link # awaitOrCancel ( RedisFuture , long , TimeUnit ) } ) when the timeout expires . * @ param timeout Maximum time to wait for futures to complete . * @ param unit Unit of time for the timeout . * @ param futures Futures to wait for . * @ return { @ literal true } if all futures complete in time , otherwise { @ literal false } */ public static boolean awaitAll ( long timeout , TimeUnit unit , Future < ? > ... futures ) { } }
try { long nanos = unit . toNanos ( timeout ) ; long time = System . nanoTime ( ) ; for ( Future < ? > f : futures ) { if ( nanos < 0 ) { return false ; } f . get ( nanos , TimeUnit . NANOSECONDS ) ; long now = System . nanoTime ( ) ; nanos -= now - time ; time = now ; } return true ; } catch ( RuntimeException e ) { throw e ; } catch ( TimeoutException e ) { return false ; } catch ( ExecutionException e ) { if ( e . getCause ( ) instanceof RedisCommandExecutionException ) { throw ExceptionFactory . createExecutionException ( e . getCause ( ) . getMessage ( ) , e . getCause ( ) ) ; } throw new RedisException ( e . getCause ( ) ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; throw new RedisCommandInterruptedException ( e ) ; } catch ( Exception e ) { throw ExceptionFactory . createExecutionException ( null , e ) ; }
public class AbstractAttributeDefinitionBuilder { /** * Sets a { @ link AttributeDefinition # getUndefinedMetricValue ( ) default value } to use for the * metric if no runtime value is available ( e . g . we are a server running in admin - only mode ) . * @ param undefinedMetricValue the default value , or { @ code null } if no default should be used * @ return a builder that can be used to continue building the attribute definition */ public BUILDER setUndefinedMetricValue ( ModelNode undefinedMetricValue ) { } }
this . undefinedMetricValue = ( undefinedMetricValue == null || ! undefinedMetricValue . isDefined ( ) ) ? null : undefinedMetricValue ; return ( BUILDER ) this ;
public class StaticInvocation { /** * < p > send . < / p > * @ param args a { @ link java . lang . String } object . * @ return a { @ link java . lang . Object } object . * @ throws java . lang . Exception if any . */ public Object send ( String ... args ) throws Exception { } }
try { method . setAccessible ( true ) ; return method . invoke ( target , convert ( args ) ) ; } catch ( InvocationTargetException e ) { throw new SystemUnderDevelopmentException ( e . getCause ( ) ) ; }
public class RegexParser { /** * factor : : = ( ' ^ ' | ' $ ' | ' \ A ' | ' \ Z ' | ' \ z ' | ' \ b ' | ' \ B ' | ' \ < ' | ' \ > ' * | atom ( ( ' * ' | ' + ' | ' ? ' | minmax ) ' ? ' ? ) ? ) * | ' ( ? = ' regex ' ) ' | ' ( ? ! ' regex ' ) ' | ' ( ? & lt ; = ' regex ' ) ' | ' ( ? & lt ; ! ' regex ' ) ' * minmax : : = ' { ' min ( ' , ' max ? ) ? ' } ' * min : : = [ 0-9 ] + * max : : = [ 0-9 ] + */ Token parseFactor ( ) throws ParseException { } }
int ch = this . read ( ) ; Token tok ; switch ( ch ) { case T_CARET : return this . processCaret ( ) ; case T_DOLLAR : return this . processDollar ( ) ; case T_LOOKAHEAD : return this . processLookahead ( ) ; case T_NEGATIVELOOKAHEAD : return this . processNegativelookahead ( ) ; case T_LOOKBEHIND : return this . processLookbehind ( ) ; case T_NEGATIVELOOKBEHIND : return this . processNegativelookbehind ( ) ; case T_COMMENT : this . next ( ) ; return Token . createEmpty ( ) ; case T_BACKSOLIDUS : switch ( this . chardata ) { case 'A' : return this . processBacksolidus_A ( ) ; case 'Z' : return this . processBacksolidus_Z ( ) ; case 'z' : return this . processBacksolidus_z ( ) ; case 'b' : return this . processBacksolidus_b ( ) ; case 'B' : return this . processBacksolidus_B ( ) ; case '<' : return this . processBacksolidus_lt ( ) ; case '>' : return this . processBacksolidus_gt ( ) ; } // through down } tok = this . parseAtom ( ) ; ch = this . read ( ) ; switch ( ch ) { case T_STAR : return this . processStar ( tok ) ; case T_PLUS : return this . processPlus ( tok ) ; case T_QUESTION : return this . processQuestion ( tok ) ; case T_CHAR : if ( this . chardata == '{' && this . offset < this . regexlen ) { int off = this . offset ; // this . offset - > next of ' { ' int min = 0 , max = - 1 ; if ( ( ch = this . regex . charAt ( off ++ ) ) >= '0' && ch <= '9' ) { min = ch - '0' ; while ( off < this . regexlen && ( ch = this . regex . charAt ( off ++ ) ) >= '0' && ch <= '9' ) { min = min * 10 + ch - '0' ; if ( min < 0 ) throw ex ( "parser.quantifier.5" , this . offset ) ; } } else { throw ex ( "parser.quantifier.1" , this . offset ) ; } max = min ; if ( ch == ',' ) { if ( off >= this . regexlen ) { throw ex ( "parser.quantifier.3" , this . offset ) ; } else if ( ( ch = this . regex . charAt ( off ++ ) ) >= '0' && ch <= '9' ) { max = ch - '0' ; // { min , max } while ( off < this . regexlen && ( ch = this . regex . charAt ( off ++ ) ) >= '0' && ch <= '9' ) { max = max * 10 + ch - '0' ; if ( max < 0 ) throw ex ( "parser.quantifier.5" , this . offset ) ; } if ( min > max ) throw ex ( "parser.quantifier.4" , this . offset ) ; } else { // assume { min , } max = - 1 ; } } if ( ch != '}' ) throw ex ( "parser.quantifier.2" , this . offset ) ; if ( this . checkQuestion ( off ) ) { // off - > next of ' } ' tok = Token . createNGClosure ( tok ) ; this . offset = off + 1 ; } else { tok = Token . createClosure ( tok ) ; this . offset = off ; } tok . setMin ( min ) ; tok . setMax ( max ) ; // System . err . println ( " CLOSURE : " + min + " , " + max ) ; this . next ( ) ; } } return tok ;
public class SimpleThreadPool { /** * Terminate any worker threads in this thread group . * < p > Jobs currently in progress will complete . */ @ Override public void shutdown ( ) { } }
synchronized ( nextRunnableLock ) { isShutdown = true ; if ( workers == null ) { return ; } // signal each worker thread to shut down Iterator < WorkerThread > workerThreads = workers . iterator ( ) ; while ( workerThreads . hasNext ( ) ) { WorkerThread wt = workerThreads . next ( ) ; JobRunShell jobRunShell = wt . getRunnable ( ) ; if ( jobRunShell != null ) { log . info ( "Waiting for Job to shutdown: {}" , wt . getRunnable ( ) . getJobName ( ) ) ; } wt . shutdown ( ) ; availWorkers . remove ( wt ) ; } // Active worker threads will shut down after finishing their // current job . nextRunnableLock . notifyAll ( ) ; }
public class Client { /** * { @ inheritDoc } */ public boolean hasPermission ( IConnection conn , String permissionName ) { } }
final Collection < String > permissions = getPermissions ( conn ) ; return permissions . contains ( permissionName ) ;
public class TreeKernel { /** * s函数 * @ param t1 * @ param t2 * @ return */ private float getWordScore ( DependencyTree t1 , DependencyTree t2 ) { } }
float score = 0 ; if ( wg != null ) { if ( wg . isSym ( t1 . word , t2 . word ) ) score = 1 ; else if ( wg . isAntonym ( t1 . word , t2 . word ) ) score = - 1 ; } else if ( t1 . word . equals ( t2 . word ) ) score = 1 ; return score ;
public class TabbedPaneDemo { /** * Sets up the tab - wise validation . * A tab is considered valid only if all of its fields are valid . * @ param tabbedPane Tabbed pane holding the tab on which the validation should be installed . * @ param i Index of the tab in the tabbed pane . * @ param tabResultsProperty Results of the individual fields inside the tab . * @ return Property representing the result of the tab - wise validation and that can be used for tabbed pane - wise * validation . */ private ReadableProperty < Boolean > installTabValidation ( JTabbedPane tabbedPane , int i , ReadableProperty < Collection < Boolean > > tabResultsProperty ) { } }
// Tab is valid only if all fields are valid SimpleBooleanProperty tabResultProperty = new SimpleBooleanProperty ( ) ; read ( tabResultsProperty ) . transform ( new AndBooleanAggregator ( ) ) . write ( tabResultProperty ) ; // Handle tab - wise result read ( tabResultProperty ) . write ( new ResultHandlerProperty < Boolean > ( new TabIconBooleanFeedback ( tabbedPane , i , "This tab contains errors." ) ) ) ; // Tabbed pane will be valid only if all tabs are valid return tabResultProperty ;
public class FFmpegMuxer { /** * Adds the SPS + PPS data to the ByteBuffer containing a h264 keyframe * @ param encodedData * @ param bufferInfo */ private void packageH264Keyframe ( ByteBuffer encodedData , MediaCodec . BufferInfo bufferInfo ) { } }
mH264Keyframe . position ( mH264MetaSize ) ; mH264Keyframe . put ( encodedData ) ; // BufferOverflow
public class BitcoindeAdapters { /** * Adapt a org . knowm . xchange . bitcoinde . dto . marketdata . BitcoindeTrade [ ] object to a Trades object . * @ param bitcoindeTradesWrapper Exchange specific trades * @ param currencyPair ( e . g . BTC / USD ) * @ return The XChange Trades */ public static Trades adaptTrades ( BitcoindeTradesWrapper bitcoindeTradesWrapper , CurrencyPair currencyPair ) { } }
List < Trade > trades = new ArrayList < > ( ) ; long lastTradeId = 0 ; for ( BitcoindeTrade bitcoindeTrade : bitcoindeTradesWrapper . getTrades ( ) ) { final long tid = bitcoindeTrade . getTid ( ) ; if ( tid > lastTradeId ) { lastTradeId = tid ; } trades . add ( new Trade ( null , bitcoindeTrade . getAmount ( ) , currencyPair , bitcoindeTrade . getPrice ( ) , DateUtils . fromMillisUtc ( bitcoindeTrade . getDate ( ) * 1000L ) , String . valueOf ( tid ) ) ) ; } return new Trades ( trades , lastTradeId , TradeSortType . SortByID ) ;
public class EndpointTransformerImpl { /** * { @ inheritDoc */ @ Override public byte [ ] fromEndpoint ( Endpoint endpoint , long lastTimeStarted , long lastTimeStopped ) throws ServiceLocatorException { } }
return serialize ( createEndpointData ( endpoint , lastTimeStarted , lastTimeStopped ) ) ;
public class UtilImpl_IdentityStringSet { /** * { @ inheritDoc } */ @ Override public boolean add ( String i_newElement ) { } }
for ( int offset = 0 ; offset < this . size ; offset ++ ) { // Note that this LOOKS like improper string comparison , but since we are using interned strings // ( i _ strings ) it ' s OK . The entire point of this data structure is that passed in strings are // object - equal in addition to String equal . // This has been added to findbugs ignore . if ( this . storage [ offset ] == i_newElement ) { return false ; } } if ( this . size == this . storage . length ) { this . storage = growStorage ( ) ; } this . storage [ this . size ++ ] = i_newElement ; return true ;
public class AtomContainerManipulator { /** * Returns the sum of bond orders , where a single bond counts as one * < i > single bond equivalent < / i > , a double as two , etc . */ public static int getSingleBondEquivalentSum ( IAtomContainer container ) { } }
int sum = 0 ; for ( IBond bond : container . bonds ( ) ) { IBond . Order order = bond . getOrder ( ) ; if ( order != null ) { sum += order . numeric ( ) ; } } return sum ;
public class HiveJsonSerDe { /** * Indicates how you want Kinesis Data Firehose to parse the date and timestamps that may be present in your input * data JSON . To specify these format strings , follow the pattern syntax of JodaTime ' s DateTimeFormat format * strings . For more information , see < a * href = " https : / / www . joda . org / joda - time / apidocs / org / joda / time / format / DateTimeFormat . html " > Class DateTimeFormat < / a > . * You can also use the special value < code > millis < / code > to parse timestamps in epoch milliseconds . If you don ' t * specify a format , Kinesis Data Firehose uses < code > java . sql . Timestamp : : valueOf < / code > by default . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setTimestampFormats ( java . util . Collection ) } or { @ link # withTimestampFormats ( java . util . Collection ) } if you * want to override the existing values . * @ param timestampFormats * Indicates how you want Kinesis Data Firehose to parse the date and timestamps that may be present in your * input data JSON . To specify these format strings , follow the pattern syntax of JodaTime ' s DateTimeFormat * format strings . For more information , see < a * href = " https : / / www . joda . org / joda - time / apidocs / org / joda / time / format / DateTimeFormat . html " > Class * DateTimeFormat < / a > . You can also use the special value < code > millis < / code > to parse timestamps in epoch * milliseconds . If you don ' t specify a format , Kinesis Data Firehose uses * < code > java . sql . Timestamp : : valueOf < / code > by default . * @ return Returns a reference to this object so that method calls can be chained together . */ public HiveJsonSerDe withTimestampFormats ( String ... timestampFormats ) { } }
if ( this . timestampFormats == null ) { setTimestampFormats ( new java . util . ArrayList < String > ( timestampFormats . length ) ) ; } for ( String ele : timestampFormats ) { this . timestampFormats . add ( ele ) ; } return this ;
public class ClientRequestResponseChannel { @ Override public void probeChannel ( ) { } }
if ( closed ) return ; try { final SocketChannel channel = preparedChannel ( ) ; if ( channel != null ) { readConsume ( channel ) ; } } catch ( IOException e ) { logger . log ( "Failed to read channel selector for " + address + " because: " + e . getMessage ( ) , e ) ; }
public class Sign { /** * Decompress a compressed public key ( x co - ord and low - bit of y - coord ) . */ private static ECPoint decompressKey ( BigInteger xBN , boolean yBit ) { } }
X9IntegerConverter x9 = new X9IntegerConverter ( ) ; byte [ ] compEnc = x9 . integerToBytes ( xBN , 1 + x9 . getByteLength ( CURVE . getCurve ( ) ) ) ; compEnc [ 0 ] = ( byte ) ( yBit ? 0x03 : 0x02 ) ; return CURVE . getCurve ( ) . decodePoint ( compEnc ) ;
public class Filters { /** * A predicate usable as a filter ( element ) of a { @ link org . parboiled . parserunners . TracingParseRunner } . * Enables printing of rule tracing log messages for all matched rules . * @ return a predicate */ public static Predicate < Tuple2 < Context < ? > , Boolean > > onlyMatches ( ) { } }
return new Predicate < Tuple2 < Context < ? > , Boolean > > ( ) { public boolean apply ( Tuple2 < Context < ? > , Boolean > tuple ) { return tuple . b ; } } ;
public class HttpClientRestEndpoint { /** * Splice base URL and URL of resource * @ param resource REST Resource Path * @ param parameters Map of query parameters * @ return Absolute URL to the REST Resource including server and port * @ throws RestEndpointIOException In case of incorrect URL format */ final URI spliceUrl ( String resource , Map < String , String > parameters ) throws RestEndpointIOException { } }
try { URIBuilder builder ; if ( ! Strings . isNullOrEmpty ( baseUrl ) ) { builder = new URIBuilder ( baseUrl ) ; builder . setPath ( builder . getPath ( ) + resource ) ; } else { builder = new URIBuilder ( resource ) ; } for ( Entry < String , String > parameter : parameters . entrySet ( ) ) { builder . addParameter ( parameter . getKey ( ) , parameter . getValue ( ) ) ; } return builder . build ( ) ; } catch ( URISyntaxException e ) { throw new RestEndpointIOException ( "Unable to builder URL with base url '" + baseUrl + "' and resouce '" + resource + "'" , e ) ; }
public class AwsSecurityFindingFilters { /** * The key name associated with the instance . * @ param resourceAwsEc2InstanceKeyName * The key name associated with the instance . */ public void setResourceAwsEc2InstanceKeyName ( java . util . Collection < StringFilter > resourceAwsEc2InstanceKeyName ) { } }
if ( resourceAwsEc2InstanceKeyName == null ) { this . resourceAwsEc2InstanceKeyName = null ; return ; } this . resourceAwsEc2InstanceKeyName = new java . util . ArrayList < StringFilter > ( resourceAwsEc2InstanceKeyName ) ;
public class ProxyReader { /** * Invokes the delegate ' s < code > ready ( ) < / code > method . * @ return true if the stream is ready to be read * @ throws IOException if an I / O error occurs */ @ Override public boolean ready ( ) throws IOException { } }
try { return in . ready ( ) ; } catch ( IOException e ) { handleIOException ( e ) ; return false ; }
public class DeleteGlobalSecondaryIndexActionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteGlobalSecondaryIndexAction deleteGlobalSecondaryIndexAction , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteGlobalSecondaryIndexAction == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteGlobalSecondaryIndexAction . getIndexName ( ) , INDEXNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class QueryRequest { /** * The primary key of the first item that this operation will evaluate . Use the value that was returned for * < code > LastEvaluatedKey < / code > in the previous operation . * The data type for < code > ExclusiveStartKey < / code > must be String , Number or Binary . No set data types are allowed . * @ param exclusiveStartKey * The primary key of the first item that this operation will evaluate . Use the value that was returned for * < code > LastEvaluatedKey < / code > in the previous operation . < / p > * The data type for < code > ExclusiveStartKey < / code > must be String , Number or Binary . No set data types are * allowed . * @ return Returns a reference to this object so that method calls can be chained together . */ public QueryRequest withExclusiveStartKey ( java . util . Map < String , AttributeValue > exclusiveStartKey ) { } }
setExclusiveStartKey ( exclusiveStartKey ) ; return this ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcFireSuppressionTerminalTypeEnum ( ) { } }
if ( ifcFireSuppressionTerminalTypeEnumEEnum == null ) { ifcFireSuppressionTerminalTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 991 ) ; } return ifcFireSuppressionTerminalTypeEnumEEnum ;
public class CommerceTierPriceEntryUtil { /** * Returns the last commerce tier price entry in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce tier price entry , or < code > null < / code > if a matching commerce tier price entry could not be found */ public static CommerceTierPriceEntry fetchByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CommerceTierPriceEntry > orderByComparator ) { } }
return getPersistence ( ) . fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ;
public class DelegatedClientFactory { /** * Configure windows live client . * @ param properties the properties */ protected void configureWindowsLiveClient ( final Collection < BaseClient > properties ) { } }
val live = pac4jProperties . getWindowsLive ( ) ; if ( StringUtils . isNotBlank ( live . getId ( ) ) && StringUtils . isNotBlank ( live . getSecret ( ) ) ) { val client = new WindowsLiveClient ( live . getId ( ) , live . getSecret ( ) ) ; configureClient ( client , live ) ; LOGGER . debug ( "Created client [{}] with identifier [{}]" , client . getName ( ) , client . getKey ( ) ) ; properties . add ( client ) ; }
public class ScriptUtil { /** * Creates a new { @ link ExecutableScript } from a static resource . * @ param language the language of the script * @ param resource the resource path of the script code * @ param scriptFactory the script factory used to create the script * @ return the newly created script * @ throws NotValidException if language or resource are null or empty */ public static ExecutableScript getScriptFromResource ( String language , String resource , ScriptFactory scriptFactory ) { } }
ensureNotEmpty ( NotValidException . class , "Script language" , language ) ; ensureNotEmpty ( NotValidException . class , "Script resource" , resource ) ; return scriptFactory . createScriptFromResource ( language , resource ) ;
public class DBFRow { /** * Reads the data as string * @ param columnIndex * columnIndex * @ return the value converted to String */ public String getString ( int columnIndex ) { } }
if ( columnIndex < 0 || columnIndex >= data . length ) { throw new IllegalArgumentException ( "Invalid index field: (" + columnIndex + "). Valid range is 0 to " + ( data . length - 1 ) ) ; } Object fieldValue = data [ columnIndex ] ; if ( fieldValue == null ) { return null ; } if ( fieldValue instanceof String ) { return ( String ) fieldValue ; } return fieldValue . toString ( ) ;
public class ScoredValue { /** * Returns a { @ link ScoredValue } consisting of the results of applying the given function to the value of this element . * Mapping is performed only if a { @ link # hasValue ( ) value is present } . * @ param < R > The element type of the new stream * @ param mapper a stateless function to apply to each element * @ return the new { @ link ScoredValue } */ @ SuppressWarnings ( "unchecked" ) public < R > ScoredValue < R > map ( Function < ? super V , ? extends R > mapper ) { } }
LettuceAssert . notNull ( mapper , "Mapper function must not be null" ) ; if ( hasValue ( ) ) { return new ScoredValue < > ( score , mapper . apply ( getValue ( ) ) ) ; } return ( ScoredValue < R > ) this ;
public class CmsGitCheckin { /** * Gets the log text . < p > * @ return the log text */ @ SuppressWarnings ( "resource" ) public String getLogText ( ) { } }
try { String logFilePath = getLogFilePath ( ) ; byte [ ] logData = CmsFileUtil . readFully ( new FileInputStream ( logFilePath ) ) ; return new String ( logData , "UTF-8" ) ; } catch ( IOException e ) { return "Error reading log file: " + getLogFilePath ( ) ; }
public class CriteriaVisitor { /** * { @ inheritDoc } */ @ Override public Object visit ( ExcludeFilter filter , Object userData ) { } }
return Restrictions . not ( Restrictions . conjunction ( ) ) ;
public class Dictionary { /** * Tests whether a property exists or not . * This can be less expensive than getValue ( String ) , because it does not have to allocate an Object for the * property value . * @ param key the key * @ return the boolean value representing whether a property exists or not . */ @ Override public boolean contains ( @ NonNull String key ) { } }
if ( key == null ) { throw new IllegalArgumentException ( "key cannot be null." ) ; } synchronized ( lock ) { return ! getMValue ( internalDict , key ) . isEmpty ( ) ; }
public class WorkSheet { /** * Get cell value * @ param row * @ param col * @ return * @ throws Exception */ public String getCell ( String row , String col ) throws Exception { } }
if ( col . equals ( this . getIndexColumnName ( ) ) ) { return row ; } HeaderInfo rowIndex = rowLookup . get ( row ) ; HeaderInfo colIndex = columnLookup . get ( col ) ; if ( rowIndex == null ) { // allow for case insentive search for ( String rowtable : rowLookup . keySet ( ) ) { if ( row . equalsIgnoreCase ( rowtable ) ) { rowIndex = rowLookup . get ( rowtable ) ; break ; } } if ( rowIndex == null ) { throw new Exception ( "Row " + row + " not found in worksheet" ) ; } } if ( colIndex == null ) { // allow for case insentive search for ( String coltable : columnLookup . keySet ( ) ) { if ( col . equalsIgnoreCase ( coltable ) ) { colIndex = columnLookup . get ( coltable ) ; break ; } } if ( colIndex == null ) { throw new Exception ( "Column " + col + " not found in worksheet" ) ; } } CompactCharSequence ccs = data [ rowIndex . getIndex ( ) ] [ colIndex . getIndex ( ) ] ; if ( ccs != null ) { return ccs . toString ( ) ; } else { return "" ; } // return . toString ( ) ;
public class CmsVfsSitemapService { /** * Returns the galleries of the given sub site for the requested gallery type . < p > * @ param entryPointUri the sub site entry point * @ param galleryType the gallery type * @ param subSitePaths the sub site paths * @ return the gallery folder entries * @ throws CmsException if reading the resources fails */ private List < CmsGalleryFolderEntry > getGalleriesForType ( String entryPointUri , CmsGalleryType galleryType , List < String > subSitePaths ) throws CmsException { } }
List < CmsGalleryFolderEntry > galleries = new ArrayList < CmsGalleryFolderEntry > ( ) ; @ SuppressWarnings ( "deprecation" ) List < CmsResource > galleryFolders = getCmsObject ( ) . readResources ( entryPointUri , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED . addRequireType ( galleryType . getTypeId ( ) ) ) ; for ( CmsResource folder : galleryFolders ) { try { if ( ! isInSubsite ( subSitePaths , folder . getRootPath ( ) ) ) { galleries . add ( readGalleryFolderEntry ( folder , galleryType . getResourceType ( ) ) ) ; } } catch ( CmsException ex ) { log ( ex . getLocalizedMessage ( ) , ex ) ; } } // create a tree structure Collections . sort ( galleries , new Comparator < CmsGalleryFolderEntry > ( ) { public int compare ( CmsGalleryFolderEntry o1 , CmsGalleryFolderEntry o2 ) { return o1 . getSitePath ( ) . compareTo ( o2 . getSitePath ( ) ) ; } } ) ; List < CmsGalleryFolderEntry > galleryTree = new ArrayList < CmsGalleryFolderEntry > ( ) ; for ( int i = 0 ; i < galleries . size ( ) ; i ++ ) { boolean isSubGallery = false ; if ( i > 0 ) { for ( int j = i - 1 ; j >= 0 ; j -- ) { if ( galleries . get ( i ) . getSitePath ( ) . startsWith ( galleries . get ( j ) . getSitePath ( ) ) ) { galleries . get ( j ) . addSubGallery ( galleries . get ( i ) ) ; isSubGallery = true ; break ; } } } if ( ! isSubGallery ) { galleryTree . add ( galleries . get ( i ) ) ; } } return galleryTree ;
public class CreateGroupRequest { /** * The tags to add to the group . A tag is a string - to - string map of key - value pairs . Tag keys can have a maximum * character length of 128 characters , and tag values can have a maximum length of 256 characters . * @ param tags * The tags to add to the group . A tag is a string - to - string map of key - value pairs . Tag keys can have a * maximum character length of 128 characters , and tag values can have a maximum length of 256 characters . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateGroupRequest withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;
public class EnumExpression { /** * Get the ordinal of this enum * @ return ordinal number */ public NumberExpression < Integer > ordinal ( ) { } }
if ( ordinal == null ) { ordinal = Expressions . numberOperation ( Integer . class , Ops . ORDINAL , mixin ) ; } return ordinal ;
public class CountMin4 { /** * Increments the specified counter by 1 if it is not already at the maximum value ( 15 ) . * @ param i the table index ( 16 counters ) * @ param j the counter to increment * @ param step the increase amount * @ return if incremented */ boolean incrementAt ( int i , int j , long step ) { } }
int offset = j << 2 ; long mask = ( 0xfL << offset ) ; if ( ( table [ i ] & mask ) != mask ) { long current = ( table [ i ] & mask ) >>> offset ; long update = Math . min ( current + step , 15 ) ; table [ i ] = ( table [ i ] & ~ mask ) | ( update << offset ) ; return true ; } return false ;
public class RuleSessionImpl { /** * Method alreadyResettingRule . * Sometimes a rule attempts to reset twice and gets into a loop * This checks for that situation . Rules only need to be reset once . * @ param rc * @ return boolean */ protected boolean alreadyResettingRule ( final RuleContext rc ) { } }
if ( m_resettingRules . contains ( rc ) ) { return true ; } m_resettingRules . add ( rc ) ; return false ;
public class SugarDataSource { /** * Method that performs a bulk insert . It works on top of SugarRecord class , and executes the query * asynchronously using Futures . * @ param objects the list of objects that you want to insert . They must be SugarRecord extended objects or @ Table annotatd objects . * @ param successCallback the callback for successful bulk insert operation * @ param errorCallback the callback for an error in bulk insert operation */ public void bulkInsert ( final List < T > objects , final SuccessCallback < List < Long > > successCallback , final ErrorCallback errorCallback ) { } }
checkNotNull ( successCallback ) ; checkNotNull ( errorCallback ) ; checkNotNull ( objects ) ; final Callable < List < Long > > call = new Callable < List < Long > > ( ) { @ Override public List < Long > call ( ) throws Exception { List < Long > ids = new ArrayList < > ( objects . size ( ) ) ; for ( int i = 0 ; i < objects . size ( ) ; i ++ ) { Long id = SugarRecord . save ( objects . get ( i ) ) ; ids . add ( i , id ) ; } return ids ; } } ; final Future < List < Long > > future = doInBackground ( call ) ; List < Long > ids ; try { ids = future . get ( ) ; if ( null == ids || ids . isEmpty ( ) ) { errorCallback . onError ( new Exception ( "Error when performing bulk insert" ) ) ; } else { successCallback . onSuccess ( ids ) ; } } catch ( Exception e ) { errorCallback . onError ( e ) ; }