signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DTDValidator { /** * Internal methods */ protected void checkIdRefs ( ) throws XMLStreamException { } }
/* 02 - Oct - 2004 , TSa : Now we can also check that all id references * pointed to ids that actually are defined */ if ( mIdMap != null ) { ElementId ref = mIdMap . getFirstUndefined ( ) ; if ( ref != null ) { // problem ! reportValidationProblem ( "Undefined id '" + ref . getId ( ) + "': referenced from element <" + ref . getElemName ( ) + ">, attribute '" + ref . getAttrName ( ) + "'" , ref . getLocation ( ) ) ; } }
public class DatabaseAccountsInner { /** * Regenerates an access key for the specified Azure Cosmos DB database account . * @ param resourceGroupName Name of an Azure resource group . * @ param accountName Cosmos DB database account name . * @ param keyKind The access key to regenerate . Possible values include : ' primary ' , ' secondary ' , ' primaryReadonly ' , ' secondaryReadonly ' * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ServiceResponse < Void > > regenerateKeyWithServiceResponseAsync ( String resourceGroupName , String accountName , KeyKind keyKind ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( accountName == null ) { throw new IllegalArgumentException ( "Parameter accountName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( keyKind == null ) { throw new IllegalArgumentException ( "Parameter keyKind is required and cannot be null." ) ; } DatabaseAccountRegenerateKeyParameters keyToRegenerate = new DatabaseAccountRegenerateKeyParameters ( ) ; keyToRegenerate . withKeyKind ( keyKind ) ; Observable < Response < ResponseBody > > observable = service . regenerateKey ( this . client . subscriptionId ( ) , resourceGroupName , accountName , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , keyToRegenerate , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPostOrDeleteResultAsync ( observable , new TypeToken < Void > ( ) { } . getType ( ) ) ;
public class TinylogLoggingProvider { /** * Calculates for which tag a full stack trace element with method name , file name and line number is required . * @ param logEntryValues * Matrix with required log entry values * @ return Each set bit represents a tag that requires a full stack trace element */ private static BitSet calculateFullStackTraceRequirements ( final Collection < LogEntryValue > [ ] [ ] logEntryValues ) { } }
BitSet result = new BitSet ( logEntryValues . length ) ; for ( int i = 0 ; i < logEntryValues . length ; ++ i ) { Collection < LogEntryValue > values = logEntryValues [ i ] [ Level . ERROR . ordinal ( ) ] ; if ( values . contains ( LogEntryValue . METHOD ) || values . contains ( LogEntryValue . FILE ) || values . contains ( LogEntryValue . LINE ) ) { result . set ( i ) ; } } return result ;
public class AbstractServiceValidateController { /** * Validate service ticket assertion . * @ param service the service * @ param serviceTicketId the service ticket id * @ return the assertion */ protected Assertion validateServiceTicket ( final WebApplicationService service , final String serviceTicketId ) { } }
return serviceValidateConfigurationContext . getCentralAuthenticationService ( ) . validateServiceTicket ( serviceTicketId , service ) ;
public class ConfigChangeListenerThread { /** * ( non - Javadoc ) * @ see java . lang . Thread # run ( ) */ @ Override public void run ( ) { } }
// Flag to avoid checking the very first time , when the request handler // has just started . boolean firstRun = true ; while ( continuePolling ) { try { // Must check before sleeping , otherwise stopPolling does not // work . if ( ! firstRun ) { if ( propertiesSource . configChanged ( ) ) { Properties props = propertiesSource . getConfigProperties ( ) ; if ( overriddenProperties != null ) { props . putAll ( overriddenProperties ) ; } listener . configChanged ( props ) ; } else if ( bundlesHandler != null && bundlesHandler . bundlesNeedToBeRebuild ( ) ) { listener . rebuildDirtyBundles ( ) ; } } sleep ( waitMillis ) ; firstRun = false ; } catch ( InterruptedException e ) { // Nothing to do } catch ( InterruptBundlingProcessException e ) { if ( LOGGER . isInfoEnabled ( ) ) { LOGGER . info ( "Bundling processed stopped" ) ; } } } this . bundlesHandler = null ; this . listener = null ;
public class ContentPackage { /** * Adds a binary file with explicit mime type . * @ param path Full content path and file name of file * @ param file File with binary data * @ param contentType Mime type , optionally with " ; charset = XYZ " extension * @ throws IOException I / O exception */ public void addFile ( String path , File file , String contentType ) throws IOException { } }
try ( InputStream is = new FileInputStream ( file ) ) { addFile ( path , is , contentType ) ; }
public class DataJoinJob { /** * Submit / run a map / reduce job . * @ param job * @ return true for success * @ throws IOException */ public static boolean runJob ( JobConf job ) throws IOException { } }
JobClient jc = new JobClient ( job ) ; boolean sucess = true ; RunningJob running = null ; try { running = jc . submitJob ( job ) ; JobID jobId = running . getID ( ) ; System . out . println ( "Job " + jobId + " is submitted" ) ; while ( ! running . isComplete ( ) ) { System . out . println ( "Job " + jobId + " is still running." ) ; try { Thread . sleep ( 60000 ) ; } catch ( InterruptedException e ) { } running = jc . getJob ( jobId ) ; } sucess = running . isSuccessful ( ) ; } finally { if ( ! sucess && ( running != null ) ) { running . killJob ( ) ; } jc . close ( ) ; } return sucess ;
public class CmsStringUtil { /** * Checks if the first path is a prefix of the second path , but not equivalent to it . < p > * @ param firstPath the first path * @ param secondPath the second path * @ return true if the first path is a prefix path of the second path , but not equivalent */ public static boolean isProperPrefixPath ( String firstPath , String secondPath ) { } }
firstPath = CmsStringUtil . joinPaths ( firstPath , "/" ) ; secondPath = CmsStringUtil . joinPaths ( secondPath , "/" ) ; return secondPath . startsWith ( firstPath ) && ! firstPath . equals ( secondPath ) ;
public class DocumentBuilder { /** * Parse the content of the given URI as an XML document * and return a new DOM { @ link Document } object . * An < code > IllegalArgumentException < / code > is thrown if the * URI is < code > null < / code > null . * @ param uri The location of the content to be parsed . * @ return A new DOM Document object . * @ exception IOException If any IO errors occur . * @ exception SAXException If any parse errors occur . * @ see org . xml . sax . DocumentHandler */ public Document parse ( String uri ) throws SAXException , IOException { } }
if ( uri == null ) { throw new IllegalArgumentException ( "URI cannot be null" ) ; } InputSource in = new InputSource ( uri ) ; return parse ( in ) ;
public class BaseMonetaryCurrenciesSingletonSpi { /** * Provide access to all currently known currencies . * @ param providers the ( optional ) specification of providers to consider . If not set ( empty ) the providers * as defined by # getDefaultRoundingProviderChain ( ) should be used . * @ return a collection of all known currencies , never null . */ public Set < CurrencyUnit > getCurrencies ( String ... providers ) { } }
return getCurrencies ( CurrencyQueryBuilder . of ( ) . setProviderNames ( providers ) . build ( ) ) ;
public class DockerFileUtil { /** * Extract all lines containing the given keyword * @ param dockerFile dockerfile to examine * @ param keyword keyword to extract the lines for * @ param interpolator interpolator for replacing properties * @ return list of matched lines or an empty list */ public static List < String [ ] > extractLines ( File dockerFile , String keyword , FixedStringSearchInterpolator interpolator ) throws IOException { } }
List < String [ ] > ret = new ArrayList < > ( ) ; try ( BufferedReader reader = new BufferedReader ( new FileReader ( dockerFile ) ) ) { String line ; while ( ( line = reader . readLine ( ) ) != null ) { String lineInterpolated = interpolator . interpolate ( line ) ; String [ ] lineParts = lineInterpolated . split ( "\\s+" ) ; if ( lineParts . length > 0 && lineParts [ 0 ] . equalsIgnoreCase ( keyword ) ) { ret . add ( lineParts ) ; } } } return ret ;
public class SchemaParser { /** * Parse a reader of schema definitions and create a { @ link TypeDefinitionRegistry } * @ param reader the reader to parse * @ return registry of type definitions * @ throws SchemaProblem if there are problems compiling the schema definitions */ public TypeDefinitionRegistry parse ( Reader reader ) throws SchemaProblem { } }
try ( Reader input = reader ) { return parseImpl ( input ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; }
public class CertificateManager { /** * This method retrieves a public certificate from a key store . * @ param keyStore The key store containing the certificate . * @ param certificateName The name ( alias ) of the certificate . * @ return The X509 format public certificate . */ public final X509Certificate retrieveCertificate ( KeyStore keyStore , String certificateName ) { } }
try { logger . entry ( ) ; X509Certificate certificate = ( X509Certificate ) keyStore . getCertificate ( certificateName ) ; logger . exit ( ) ; return certificate ; } catch ( KeyStoreException e ) { RuntimeException exception = new RuntimeException ( "An unexpected exception occurred while attempting to retrieve a certificate." , e ) ; logger . error ( exception . toString ( ) ) ; throw exception ; }
public class Assert { /** * Asserts that { @ code runnable } throws an exception of type { @ code expectedThrowable } when * executed . If it does , the exception object is returned . If it does not throw an exception , an * { @ link AssertionError } is thrown . If it throws the wrong type of exception , an { @ code * AssertionError } is thrown describing the mismatch ; the exception that was actually thrown can * be obtained by calling { @ link AssertionError # getCause } . * @ param message the identifying message for the { @ link AssertionError } ( < code > null < / code > * okay ) * @ param expectedThrowable the expected type of the exception * @ param runnable a function that is expected to throw an exception when executed * @ return the exception thrown by { @ code runnable } * @ since 4.13 */ public static < T extends Throwable > T assertThrows ( String message , Class < T > expectedThrowable , ThrowingRunnable runnable ) { } }
try { runnable . run ( ) ; } catch ( Throwable actualThrown ) { if ( expectedThrowable . isInstance ( actualThrown ) ) { @ SuppressWarnings ( "unchecked" ) T retVal = ( T ) actualThrown ; return retVal ; } else { String expected = formatClass ( expectedThrowable ) ; Class < ? extends Throwable > actualThrowable = actualThrown . getClass ( ) ; String actual = formatClass ( actualThrowable ) ; if ( expected . equals ( actual ) ) { // There must be multiple class loaders . Add the identity hash code so the message // doesn ' t say " expected : java . lang . String < my . package . MyException > . . . " expected += "@" + Integer . toHexString ( System . identityHashCode ( expectedThrowable ) ) ; actual += "@" + Integer . toHexString ( System . identityHashCode ( actualThrowable ) ) ; } String mismatchMessage = buildPrefix ( message ) + format ( "unexpected exception type thrown;" , expected , actual ) ; // The AssertionError ( String , Throwable ) ctor is only available on JDK7. AssertionError assertionError = new AssertionError ( mismatchMessage ) ; assertionError . initCause ( actualThrown ) ; throw assertionError ; } } String notThrownMessage = buildPrefix ( message ) + String . format ( "expected %s to be thrown, but nothing was thrown" , formatClass ( expectedThrowable ) ) ; throw new AssertionError ( notThrownMessage ) ;
public class ClassFile { /** * Specify what target virtual machine version classfile should generate * for . Calling this method changes the major and minor version of the * classfile format . * @ param target VM version , 1.0 , 1.1 , etc . * @ throws IllegalArgumentException if target is not supported */ public void setTarget ( String target ) throws IllegalArgumentException { } }
int major , minor ; if ( target == null || "1.0" . equals ( target ) || "1.1" . equals ( target ) ) { major = 45 ; minor = 3 ; if ( target == null ) { target = "1.0" ; } } else if ( "1.2" . equals ( target ) ) { major = 46 ; minor = 0 ; } else if ( "1.3" . equals ( target ) ) { major = 47 ; minor = 0 ; } else if ( "1.4" . equals ( target ) ) { major = 48 ; minor = 0 ; } else if ( "1.5" . equals ( target ) ) { major = 49 ; minor = 0 ; } else if ( "1.6" . equals ( target ) ) { major = 50 ; minor = 0 ; } else if ( "1.7" . equals ( target ) ) { major = 51 ; minor = 0 ; } else { throw new IllegalArgumentException ( "Unsupported target version: " + target ) ; } mVersion = ( minor << 16 ) | ( major & 0xffff ) ; mTarget = target . intern ( ) ;
public class HotSpotJavaDumperImpl { /** * Create a heap dump . This is the same as jmap - dump : file = . . . * @ param outputDir the server output directory * @ return the resulting file */ private File createHeapDump ( File outputDir ) { } }
if ( hotSpotDiagnosticName == null ) { return null ; } File outputFile ; try { // The default dump name is " java . hprof " . outputFile = createNewFile ( outputDir , "java" , "hprof" ) ; platformMBeanServer . invoke ( hotSpotDiagnosticName , "dumpHeap" , new Object [ ] { outputFile . getAbsolutePath ( ) , false } , new String [ ] { String . class . getName ( ) , boolean . class . getName ( ) } ) ; } catch ( Exception ex ) { throw new RuntimeException ( ex ) ; } return outputFile ;
public class InjectionBinding { /** * Merges a value specified in XML . * < p > If an error occurs , { @ link # mergeError } will be called , which * requires { @ link # getJNDIEnvironmentRefType } to be defined . * @ param oldValue the old value * @ param newValue the new value * @ param elementName the name of the XML element containing the value * @ param key the optional key to be passed to { @ link # mergeError } * @ param valueNames the names of possible old and new values to be used * for error messages , or null if the values themselves should * be used when reporting errors * @ return the merged value * @ throws InjectionConfigurationException if an error occurs */ protected < T > T mergeXMLValue ( T oldValue , T newValue , String elementName , String key , Map < T , String > valueNames ) throws InjectionConfigurationException { } }
if ( newValue == null ) { return oldValue ; } if ( oldValue != null && ! newValue . equals ( oldValue ) ) { Object oldValueName = valueNames == null ? oldValue : valueNames . get ( oldValue ) ; Object newValueName = valueNames == null ? newValue : valueNames . get ( newValue ) ; mergeError ( oldValueName , newValueName , true , elementName , false , key ) ; return oldValue ; } return newValue ;
public class AbstractHTMLFilter { /** * Moves all child elements of the parent into destination element . * @ param parent the parent { @ link Element } . * @ param destination the destination { @ link Element } . */ protected void moveChildren ( Element parent , Element destination ) { } }
NodeList children = parent . getChildNodes ( ) ; while ( children . getLength ( ) > 0 ) { destination . appendChild ( parent . removeChild ( parent . getFirstChild ( ) ) ) ; }
public class AbstractJcrNode { /** * Find the property definition for the property , given this node ' s primary type and mixin types . * @ param property the property owned by this node ; may not be null * @ param primaryType the name of the node ' s primary type ; may not be null * @ param mixinTypes the names of the node ' s mixin types ; may be null or empty * @ param nodeTypes the node types cache to use ; may not be null * @ return the property definition ; never null * @ throws ConstraintViolationException if the property has no valid property definition */ final JcrPropertyDefinition propertyDefinitionFor ( org . modeshape . jcr . value . Property property , Name primaryType , Set < Name > mixinTypes , NodeTypes nodeTypes ) throws ConstraintViolationException { } }
// Figure out the JCR property type . . . boolean single = property . isSingle ( ) ; boolean skipProtected = false ; JcrPropertyDefinition defn = findBestPropertyDefinition ( primaryType , mixinTypes , property , single , skipProtected , false , nodeTypes ) ; if ( defn != null ) return defn ; // See if there is a definition that has constraints that were violated . . . defn = findBestPropertyDefinition ( primaryType , mixinTypes , property , single , skipProtected , true , nodeTypes ) ; String pName = readable ( property . getName ( ) ) ; String loc = location ( ) ; if ( defn != null ) { I18n msg = JcrI18n . propertyNoLongerSatisfiesConstraints ; throw new ConstraintViolationException ( msg . text ( pName , loc , defn . getName ( ) , defn . getDeclaringNodeType ( ) . getName ( ) ) ) ; } CachedNode node = sessionCache ( ) . getNode ( key ) ; String ptype = readable ( node . getPrimaryType ( sessionCache ( ) ) ) ; String mixins = readable ( node . getMixinTypes ( sessionCache ( ) ) ) ; String pstr = property . getString ( session . namespaces ( ) ) ; throw new ConstraintViolationException ( JcrI18n . propertyNoLongerHasValidDefinition . text ( pstr , loc , ptype , mixins ) ) ;
public class FTPControlChannel { /** * Closes the control channel */ public void close ( ) throws IOException { } }
logger . debug ( "ftp socket closed" ) ; if ( ftpIn != null ) ftpIn . close ( ) ; if ( ftpOut != null ) ftpOut . close ( ) ; if ( socket != null ) socket . close ( ) ; hasBeenOpened = false ;
public class TwoDScrollView { /** * < p > Scrolls the view to make the area defined by < code > top < / code > and * < code > bottom < / code > visible . This method attempts to give the focus * to a component visible in this area . If no component can be focused in * the new visible area , the focus is reclaimed by this scrollview . < / p > * @ param direction the scroll direction : { @ link android . view . View # FOCUS _ UP } * to go upward * { @ link android . view . View # FOCUS _ DOWN } to downward * @ param top the top offset of the new area to be made visible * @ param bottom the bottom offset of the new area to be made visible * @ return true if the key event is consumed by this method , false otherwise */ private boolean scrollAndFocus ( int directionY , int top , int bottom , int directionX , int left , int right ) { } }
boolean handled = true ; int height = getHeight ( ) ; int containerTop = getScrollY ( ) ; int containerBottom = containerTop + height ; boolean up = directionY == View . FOCUS_UP ; int width = getWidth ( ) ; int containerLeft = getScrollX ( ) ; int containerRight = containerLeft + width ; boolean leftwards = directionX == View . FOCUS_UP ; View newFocused = findFocusableViewInBounds ( up , top , bottom , leftwards , left , right ) ; if ( newFocused == null ) { newFocused = this ; } if ( ( top >= containerTop && bottom <= containerBottom ) || ( left >= containerLeft && right <= containerRight ) ) { handled = false ; } else { int deltaY = up ? ( top - containerTop ) : ( bottom - containerBottom ) ; int deltaX = leftwards ? ( left - containerLeft ) : ( right - containerRight ) ; doScroll ( deltaX , deltaY ) ; } if ( newFocused != findFocus ( ) && newFocused . requestFocus ( directionY ) ) { mTwoDScrollViewMovedFocus = true ; mTwoDScrollViewMovedFocus = false ; } return handled ;
public class CommerceDiscountRelUtil { /** * Returns the commerce discount rels before and after the current commerce discount rel in the ordered set where commerceDiscountId = & # 63 ; . * @ param commerceDiscountRelId the primary key of the current commerce discount rel * @ param commerceDiscountId the commerce discount ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next commerce discount rel * @ throws NoSuchDiscountRelException if a commerce discount rel with the primary key could not be found */ public static CommerceDiscountRel [ ] findByCommerceDiscountId_PrevAndNext ( long commerceDiscountRelId , long commerceDiscountId , OrderByComparator < CommerceDiscountRel > orderByComparator ) throws com . liferay . commerce . discount . exception . NoSuchDiscountRelException { } }
return getPersistence ( ) . findByCommerceDiscountId_PrevAndNext ( commerceDiscountRelId , commerceDiscountId , orderByComparator ) ;
public class AmazonAppStreamClient { /** * Starts the specified fleet . * @ param startFleetRequest * @ return Result of the StartFleet operation returned by the service . * @ throws ResourceNotFoundException * The specified resource was not found . * @ throws OperationNotPermittedException * The attempted operation is not permitted . * @ throws LimitExceededException * The requested limit exceeds the permitted limit for an account . * @ throws InvalidAccountStatusException * The resource cannot be created because your AWS account is suspended . For assistance , contact AWS * Support . * @ throws ConcurrentModificationException * An API error occurred . Wait a few minutes and try again . * @ sample AmazonAppStream . StartFleet * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / appstream - 2016-12-01 / StartFleet " target = " _ top " > AWS API * Documentation < / a > */ @ Override public StartFleetResult startFleet ( StartFleetRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeStartFleet ( request ) ;
public class ExceptionUtils { /** * Throws the given { @ code Throwable } in scenarios where the signatures do allow to * throw a Exception . Errors and Exceptions are thrown directly , other " exotic " * subclasses of Throwable are wrapped in an Exception . * @ param t The throwable to be thrown . */ public static void rethrowException ( Throwable t ) throws Exception { } }
if ( t instanceof Error ) { throw ( Error ) t ; } else if ( t instanceof Exception ) { throw ( Exception ) t ; } else { throw new Exception ( t . getMessage ( ) , t ) ; }
public class JsDocInfoParser { /** * FieldTypeList : = FieldType | FieldType ' , ' FieldTypeList */ private Node parseFieldTypeList ( JsDocToken token ) { } }
Node fieldTypeList = newNode ( Token . LB ) ; Set < String > names = new HashSet < > ( ) ; do { Node fieldType = parseFieldType ( token ) ; if ( fieldType == null ) { return null ; } String name = fieldType . isStringKey ( ) ? fieldType . getString ( ) : fieldType . getFirstChild ( ) . getString ( ) ; if ( names . add ( name ) ) { fieldTypeList . addChildToBack ( fieldType ) ; } else { addTypeWarning ( "msg.jsdoc.type.record.duplicate" , name ) ; } skipEOLs ( ) ; if ( ! match ( JsDocToken . COMMA ) ) { break ; } // Move to the comma token . next ( ) ; // Move to the token past the comma skipEOLs ( ) ; if ( match ( JsDocToken . RIGHT_CURLY ) ) { // Allow trailing comma ( ie , right curly following the comma ) break ; } token = next ( ) ; } while ( true ) ; return fieldTypeList ;
public class XmlQueueExporter { /** * Exports all available queues to an XML file . */ static void export ( String path , DbConn cnx , List < String > qNames ) throws JqmXmlException { } }
if ( cnx == null ) { throw new IllegalArgumentException ( "database connection cannot be null" ) ; } if ( qNames == null || qNames . isEmpty ( ) ) { throw new IllegalArgumentException ( "queue names list name cannot be null or empty" ) ; } List < Queue > qList = new ArrayList < > ( ) ; for ( String qn : qNames ) { Queue q = CommonXml . findQueue ( qn , cnx ) ; if ( q == null ) { throw new IllegalArgumentException ( "There is no queue named " + qn ) ; } qList . add ( q ) ; } export ( path , qList , cnx ) ;
public class SipFactoryImpl { /** * ( non - Javadoc ) * @ see javax . servlet . sip . SipFactory # createAddress ( javax . servlet . sip . URI ) */ public Address createAddress ( URI uri ) { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "Creating Address fromm URI[" + uri . toString ( ) + "]" ) ; } URIImpl uriImpl = ( URIImpl ) uri ; return new AddressImpl ( SipFactoryImpl . addressFactory . createAddress ( uriImpl . getURI ( ) ) , null , ModifiableRule . Modifiable ) ;
public class DomainsInner { /** * Lists domain ownership identifiers . * Lists domain ownership identifiers . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param domainName Name of domain . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws DefaultErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; DomainOwnershipIdentifierInner & gt ; object if successful . */ public PagedList < DomainOwnershipIdentifierInner > listOwnershipIdentifiers ( final String resourceGroupName , final String domainName ) { } }
ServiceResponse < Page < DomainOwnershipIdentifierInner > > response = listOwnershipIdentifiersSinglePageAsync ( resourceGroupName , domainName ) . toBlocking ( ) . single ( ) ; return new PagedList < DomainOwnershipIdentifierInner > ( response . body ( ) ) { @ Override public Page < DomainOwnershipIdentifierInner > nextPage ( String nextPageLink ) { return listOwnershipIdentifiersNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class Keys { /** * Gets the { @ code Key } hierarchy of the specified entity . * The ancestor is the first entry . * @ param key The { @ code Key } of the specified entity . * @ return The { @ code Key } hierarchy of the specified entity . */ public static List < Key > hierarchy ( Key key ) { } }
List < Key > hierarchy = new ArrayList < Key > ( ) ; hierarchy . add ( key ) ; while ( key . getParent ( ) != null ) { key = key . getParent ( ) ; hierarchy . add ( key ) ; } Collections . reverse ( hierarchy ) ; return hierarchy ;
public class CommonOps_DDRM { /** * < p > Performs the following operation : < br > * < br > * c = a + b < br > * c < sub > ij < / sub > = a < sub > ij < / sub > + b < sub > ij < / sub > < br > * Matrix C can be the same instance as Matrix A and / or B . * @ param a A Matrix . Not modified . * @ param b A Matrix . Not modified . * @ param c A Matrix where the results are stored . Modified . */ public static void add ( final DMatrixD1 a , final DMatrixD1 b , final DMatrixD1 c ) { } }
if ( a . numCols != b . numCols || a . numRows != b . numRows ) { throw new MatrixDimensionException ( "The matrices are not all the same dimension." ) ; } c . reshape ( a . numRows , a . numCols ) ; final int length = a . getNumElements ( ) ; for ( int i = 0 ; i < length ; i ++ ) { c . set ( i , a . get ( i ) + b . get ( i ) ) ; }
public class AbstractMemberWriter { /** * Add use information to the documentation tree . * @ param mems list of program elements for which the use information will be added * @ param heading the section heading * @ param tableSummary the summary for the use table * @ param contentTree the content tree to which the use information will be added */ protected void addUseInfo ( List < ? extends ProgramElementDoc > mems , Content heading , String tableSummary , Content contentTree ) { } }
if ( mems == null ) { return ; } List < ? extends ProgramElementDoc > members = mems ; boolean printedUseTableHeader = false ; if ( members . size ( ) > 0 ) { Content table = HtmlTree . TABLE ( HtmlStyle . useSummary , 0 , 3 , 0 , tableSummary , writer . getTableCaption ( heading ) ) ; Content tbody = new HtmlTree ( HtmlTag . TBODY ) ; Iterator < ? extends ProgramElementDoc > it = members . iterator ( ) ; for ( int i = 0 ; it . hasNext ( ) ; i ++ ) { ProgramElementDoc pgmdoc = it . next ( ) ; ClassDoc cd = pgmdoc . containingClass ( ) ; if ( ! printedUseTableHeader ) { table . addContent ( writer . getSummaryTableHeader ( this . getSummaryTableHeader ( pgmdoc ) , "col" ) ) ; printedUseTableHeader = true ; } HtmlTree tr = new HtmlTree ( HtmlTag . TR ) ; if ( i % 2 == 0 ) { tr . addStyle ( HtmlStyle . altColor ) ; } else { tr . addStyle ( HtmlStyle . rowColor ) ; } HtmlTree tdFirst = new HtmlTree ( HtmlTag . TD ) ; tdFirst . addStyle ( HtmlStyle . colFirst ) ; writer . addSummaryType ( this , pgmdoc , tdFirst ) ; tr . addContent ( tdFirst ) ; HtmlTree tdLast = new HtmlTree ( HtmlTag . TD ) ; tdLast . addStyle ( HtmlStyle . colLast ) ; if ( cd != null && ! ( pgmdoc instanceof ConstructorDoc ) && ! ( pgmdoc instanceof ClassDoc ) ) { HtmlTree name = new HtmlTree ( HtmlTag . SPAN ) ; name . addStyle ( HtmlStyle . typeNameLabel ) ; name . addContent ( cd . name ( ) + "." ) ; tdLast . addContent ( name ) ; } addSummaryLink ( pgmdoc instanceof ClassDoc ? LinkInfoImpl . Kind . CLASS_USE : LinkInfoImpl . Kind . MEMBER , cd , pgmdoc , tdLast ) ; writer . addSummaryLinkComment ( this , pgmdoc , tdLast ) ; tr . addContent ( tdLast ) ; tbody . addContent ( tr ) ; } table . addContent ( tbody ) ; contentTree . addContent ( table ) ; }
public class AtomicRateLimiter { /** * { @ inheritDoc } */ @ Override public void changeTimeoutDuration ( final Duration timeoutDuration ) { } }
RateLimiterConfig newConfig = RateLimiterConfig . from ( state . get ( ) . config ) . timeoutDuration ( timeoutDuration ) . build ( ) ; state . updateAndGet ( currentState -> new State ( newConfig , currentState . activeCycle , currentState . activePermissions , currentState . nanosToWait ) ) ;
public class PDPageContentStreamExt { /** * Writes a real real to the content stream . * @ param real * the value to be written * @ throws IOException * In case of IO error */ protected void writeOperand ( final float real ) throws IOException { } }
final int byteCount = NumberFormatUtil . formatFloatFast ( real , formatDecimal . getMaximumFractionDigits ( ) , formatBuffer ) ; if ( byteCount == - 1 ) { // Fast formatting failed write ( formatDecimal . format ( real ) ) ; } else { m_aOS . write ( formatBuffer , 0 , byteCount ) ; } m_aOS . write ( ' ' ) ;
public class TokenStream { /** * Attempt to consume this current token and the next tokens if and only if they match the expected values , and return whether * this method was indeed able to consume all of the supplied tokens . * This is < i > not < / i > the same as calling { @ link # canConsume ( String ) } for each of the supplied arguments , since this method * ensures that < i > all < / i > of the supplied values can be consumed . * This method < i > is < / i > equivalent to calling the following : * < pre > * if ( tokens . matches ( currentExpected , expectedForNextTokens ) ) { * tokens . consume ( currentExpected , expectedForNextTokens ) ; * < / pre > * The { @ link # ANY _ VALUE ANY _ VALUE } constant can be used in the expected values as a wildcard . * @ param currentExpected the expected value of the current token * @ param expectedForNextTokens the expected values fo the following tokens * @ return true if the current token did match and was consumed , or false if the current token did not match and therefore was * not consumed * @ throws IllegalStateException if this method was called before the stream was { @ link # start ( ) started } */ public boolean canConsume ( String currentExpected , String ... expectedForNextTokens ) throws IllegalStateException { } }
if ( completed ) return false ; ListIterator < Token > iter = tokens . listIterator ( tokenIterator . previousIndex ( ) ) ; if ( ! iter . hasNext ( ) ) return false ; Token token = iter . next ( ) ; if ( currentExpected != ANY_VALUE && ! token . matches ( currentExpected ) ) return false ; for ( String nextExpected : expectedForNextTokens ) { if ( ! iter . hasNext ( ) ) return false ; token = iter . next ( ) ; if ( nextExpected == ANY_VALUE ) continue ; if ( ! token . matches ( nextExpected ) ) return false ; } this . tokenIterator = iter ; this . currentToken = tokenIterator . hasNext ( ) ? tokenIterator . next ( ) : null ; this . completed = this . currentToken == null ; return true ;
public class TimeSeries { /** * Record events at a timestamp into the time series . * @ param timeNano the time in nano seconds * @ param numEvents the number of events happened at timeNano */ public void record ( long timeNano , int numEvents ) { } }
long leftEndPoint = bucket ( timeNano ) ; mSeries . put ( leftEndPoint , mSeries . getOrDefault ( leftEndPoint , 0 ) + numEvents ) ;
public class CmsEditModelPageMenuEntry { /** * Checks if the model page menu entry should be visible . < p > * @ param id the id of the model page * @ return true if the entry should be visible */ public static boolean checkVisible ( CmsUUID id ) { } }
boolean show = false ; if ( CmsSitemapView . getInstance ( ) . getController ( ) . isEditable ( ) ) { CmsNewResourceInfo info = CmsSitemapView . getInstance ( ) . getController ( ) . getData ( ) . getNewResourceInfoById ( id ) ; show = CmsSitemapView . getInstance ( ) . isModelPageMode ( ) && ( ( ( info != null ) && info . isEditable ( ) ) || CmsSitemapView . getInstance ( ) . isModelGroupEntry ( id ) ) ; } return show ;
public class PubSubOutputHandler { /** * Creates a NOTFLUSHED message for sending * @ param target The target cellule ( er ME ) for the message . * @ param stream The UUID of the stream the message should be sent on . * @ param reqID The request ID that the message answers . * @ return the new NOTFLUSHED message . * @ throws SIResourceException if the message can ' t be created . */ private ControlNotFlushed createControlNotFlushed ( SIBUuid8 target , SIBUuid12 stream , long reqID ) throws SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createControlNotFlushed" , new Object [ ] { target , stream , new Long ( reqID ) } ) ; ControlNotFlushed notFlushedMsg ; // Create new message try { notFlushedMsg = _cmf . createNewControlNotFlushed ( ) ; } catch ( MessageCreateFailedException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.PubSubOutputHandler.createControlNotFlushed" , "1:1498:1.164.1.5" , this ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . exception ( tc , e ) ; SibTr . exit ( tc , "createControlNotFlushed" , e ) ; } SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.PubSubOutputHandler" , "1:1510:1.164.1.5" , e } ) ; throw new SIResourceException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.PubSubOutputHandler" , "1:1518:1.164.1.5" , e } , null ) , e ) ; } // As we are using the Guaranteed Header - set all the attributes as // well as the ones we want . SIMPUtils . setGuaranteedDeliveryProperties ( notFlushedMsg , _messageProcessor . getMessagingEngineUuid ( ) , null , stream , null , _destinationHandler . getUuid ( ) , ProtocolType . PUBSUBINPUT , GDConfig . PROTOCOL_VERSION ) ; notFlushedMsg . setPriority ( SIMPConstants . CTRL_MSG_PRIORITY ) ; notFlushedMsg . setReliability ( Reliability . ASSURED_PERSISTENT ) ; notFlushedMsg . setRequestID ( reqID ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createControlNotFlushed" ) ; return notFlushedMsg ;
public class DateColumn { /** * Returns the count of missing values in this column */ @ Override public int countMissing ( ) { } }
int count = 0 ; for ( int i = 0 ; i < size ( ) ; i ++ ) { if ( getPackedDate ( i ) == DateColumnType . missingValueIndicator ( ) ) { count ++ ; } } return count ;
public class AtomDeserializer { /** * Skips the next atom . */ public void skipNext ( ) throws IOException { } }
nameDeserializer . skipNext ( ) ; nextFlags = nextFlags == Integer . MIN_VALUE ? in . readUnsignedByte ( ) : nextFlags ; if ( ( nextFlags & ColumnSerializer . RANGE_TOMBSTONE_MASK ) != 0 ) type . rangeTombstoneSerializer ( ) . skipBody ( in , version ) ; else type . columnSerializer ( ) . skipColumnBody ( in , nextFlags ) ; nextFlags = Integer . MIN_VALUE ;
public class ControlBeanContextSupport { /** * This public api is necessary to allow a bean context with a peer to deserialize its children . * This api is not part any standard api . * @ param in ObjectInputStream * @ throws IOException * @ throws ClassNotFoundException */ public final void readChildren ( ObjectInputStream in ) throws IOException , ClassNotFoundException { } }
int childCount = in . readInt ( ) ; for ( int i = 0 ; i < childCount ; i ++ ) { internalAdd ( in . readObject ( ) , false ) ; }
public class TableProcessor { /** * Populate metadata . * @ param < X > * the generic type * @ param < T > * the generic type * @ param metadata * the metadata * @ param clazz * the clazz */ private < X extends Class , T extends Object > void populateMetadata ( EntityMetadata metadata , Class < X > clazz , Map puProperties ) { } }
// process for metamodelImpl if ( metadata . getPersistenceUnit ( ) != null ) { MetaModelBuilder < X , T > metaModelBuilder = kunderaMetadata . getApplicationMetadata ( ) . getMetaModelBuilder ( metadata . getPersistenceUnit ( ) ) ; onBuildMetaModelSuperClass ( clazz . getSuperclass ( ) , metaModelBuilder ) ; metaModelBuilder . process ( clazz ) ; for ( Field f : clazz . getDeclaredFields ( ) ) { if ( f != null && ! Modifier . isStatic ( f . getModifiers ( ) ) && ! Modifier . isTransient ( f . getModifiers ( ) ) && ! f . isAnnotationPresent ( Transient . class ) ) { // construct metamodel . metaModelBuilder . construct ( clazz , f ) ; // on id attribute . onIdAttribute ( metaModelBuilder , metadata , clazz , f ) ; // determine if it is a column family or super column // family . onFamilyType ( metadata , clazz , f ) ; } } EntityType entityType = ( EntityType ) metaModelBuilder . getManagedTypes ( ) . get ( metadata . getEntityClazz ( ) ) ; validateAndSetId ( metadata , clazz , metaModelBuilder ) ; validateandSetEntityType ( metadata , clazz , metaModelBuilder ) ; MetadataUtils . onJPAColumnMapping ( entityType , metadata ) ; /* Scan for Relationship field */ populateRelationMetaData ( entityType , clazz , metadata ) ; }
public class PkRSS { /** * Similar to { @ link PkRSS # get ( String ) } but also looks for the search term . * @ param url Safe URL to look up loaded articles from . * @ param search Search term . * @ return A { @ link List } containing all loaded articles associated with that * URL and query . May be null if no such URL has yet been loaded . */ public List < Article > get ( String url , String search ) { } }
if ( search == null ) return articleMap . get ( url ) ; return articleMap . get ( url + "?s=" + Uri . encode ( search ) ) ;
public class LazyUserTransaction { protected void suspendForcedlyBegunLazyTransactionIfNeeds ( ) throws SystemException { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "#lazyTx ...Suspending the outer forcedly-begun lazy transaction: {}" , buildLazyTxExp ( ) ) ; } final Transaction suspended = transactionManager . suspend ( ) ; arrangeForcedlyBegunResumer ( ( ) -> { if ( isHerarchyLevelFirst ( ) ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "#lazyTx ...Resuming the outer forcedly-begun lazy transaction: {}" , buildLazyTxExp ( ) ) ; } doResumeForcedlyBegunLazyTransaction ( suspended ) ; return true ; } else { return false ; } } ) ;
public class ProjectUtils { /** * Creates a project for Roboconf . * @ param targetDirectory the directory into which the Roboconf files must be copied * @ param creationBean the creation properties * @ throws IOException if something went wrong */ public static void createProjectSkeleton ( File targetDirectory , CreationBean creationBean ) throws IOException { } }
if ( creationBean . isMavenProject ( ) ) createMavenProject ( targetDirectory , creationBean ) ; else createSimpleProject ( targetDirectory , creationBean ) ;
public class JSConverter { /** * serialize a List ( as Array ) * @ param name * @ param list List to serialize * @ param sb * @ param done * @ return serialized list * @ throws ConverterException */ private void _serializeList ( String name , List list , StringBuilder sb , Set < Object > done ) throws ConverterException { } }
if ( useShortcuts ) sb . append ( "[];" ) ; else sb . append ( "new Array();" ) ; ListIterator it = list . listIterator ( ) ; int index = - 1 ; while ( it . hasNext ( ) ) { // if ( index ! = - 1 ) sb . append ( " , " ) ; index = it . nextIndex ( ) ; sb . append ( name + "[" + index + "]=" ) ; _serialize ( name + "[" + index + "]" , it . next ( ) , sb , done ) ; // sb . append ( " ; " ) ; }
public class StAXEncoder { /** * Closes any start tags and writes corresponding end tags . * ( non - Javadoc ) * @ see javax . xml . stream . XMLStreamWriter # writeEndDocument ( ) */ public void writeEndDocument ( ) throws XMLStreamException { } }
try { checkPendingATEvents ( ) ; encoder . encodeEndDocument ( ) ; encoder . flush ( ) ; } catch ( Exception e ) { throw new XMLStreamException ( e . getLocalizedMessage ( ) , e ) ; }
public class AmazonIdentityManagementClient { /** * Lists the tags that are attached to the specified role . The returned list of tags is sorted by tag key . For more * information about tagging , see < a href = " https : / / docs . aws . amazon . com / IAM / latest / UserGuide / id _ tags . html " > Tagging * IAM Identities < / a > in the < i > IAM User Guide < / i > . * @ param listRoleTagsRequest * @ return Result of the ListRoleTags operation returned by the service . * @ throws NoSuchEntityException * The request was rejected because it referenced a resource entity that does not exist . The error message * describes the resource . * @ throws ServiceFailureException * The request processing has failed because of an unknown error , exception or failure . * @ sample AmazonIdentityManagement . ListRoleTags * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / iam - 2010-05-08 / ListRoleTags " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ListRoleTagsResult listRoleTags ( ListRoleTagsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListRoleTags ( request ) ;
public class ConvertImage { /** * Converts a { @ link Planar } into the equivalent { @ link InterleavedF32} * @ param input ( Input ) Planar image that is being converted . Not modified . * @ param output ( Optional ) The output image . If null a new image is created . Modified . * @ return Converted image . */ public static InterleavedF32 convert ( Planar < GrayF32 > input , InterleavedF32 output ) { } }
if ( output == null ) { output = new InterleavedF32 ( input . width , input . height , input . getNumBands ( ) ) ; } else { output . reshape ( input . width , input . height , input . getNumBands ( ) ) ; } if ( BoofConcurrency . USE_CONCURRENT ) { ImplConvertImage_MT . convert ( input , output ) ; } else { ImplConvertImage . convert ( input , output ) ; } return output ;
public class DistCp { /** * Sanity check for srcPath */ private static void checkSrcPath ( Configuration conf , List < Path > srcPaths ) throws IOException { } }
List < IOException > rslt = new ArrayList < IOException > ( ) ; List < Path > unglobbed = new LinkedList < Path > ( ) ; for ( Path p : srcPaths ) { FileSystem fs = p . getFileSystem ( conf ) ; FileStatus [ ] inputs = fs . globStatus ( p ) ; if ( inputs != null && inputs . length > 0 ) { for ( FileStatus onePath : inputs ) { unglobbed . add ( onePath . getPath ( ) ) ; } } else { rslt . add ( new IOException ( "Input source " + p + " does not exist." ) ) ; } } if ( ! rslt . isEmpty ( ) ) { throw new InvalidInputException ( rslt ) ; } srcPaths . clear ( ) ; srcPaths . addAll ( unglobbed ) ;
public class NodePod { /** * Test if the server is the primary for the node . */ @ Override public boolean isServerPrimary ( ServerBartender server ) { } }
for ( int i = 0 ; i < Math . min ( 1 , _owners . length ) ; i ++ ) { ServerBartender serverBar = server ( i ) ; if ( serverBar == null ) { continue ; } else if ( serverBar . isSameServer ( server ) ) { return true ; } } return false ;
public class LexemeDocumentImpl { /** * LexemeIdValue id , * ItemIdValue lexicalCategory , * ItemIdValue language , * Map < String , MonolingualTextValue > lemmas , * Map < String , List < Statement > > statements , * List < FormDocument > forms , * long revisionId * ( non - Javadoc ) * @ see org . wikidata . wdtk . datamodel . interfaces . StatementDocument # withStatement ( org . wikidata . wdtk . datamodel . interfaces . Statement ) */ @ Override public LexemeDocument withStatement ( Statement statement ) { } }
Map < String , List < Statement > > newGroups = addStatementToGroups ( statement , claims ) ; return new LexemeDocumentImpl ( getEntityId ( ) , lexicalCategory , language , lemmas , newGroups , forms , revisionId ) ;
public class FSM { /** * Process event . Will handle all retry attempts . If attempts exceed maximum retries , * it will throw a TooBusyException . * @ param stateful The Stateful Entity * @ param event The Event * @ param args Optional parameters to pass into the Action * @ return The current State * @ throws TooBusyException Exception indicating that we ' ve exceeded the number of RetryAttempts */ public State < T > onEvent ( T stateful , String event , Object ... args ) throws TooBusyException { } }
int attempts = 0 ; while ( this . retryAttempts == - 1 || attempts < this . retryAttempts ) { try { State < T > current = this . getCurrentState ( stateful ) ; // Fetch the transition for this event from the current state Transition < T > transition = this . getTransition ( event , current ) ; // Is there one ? if ( transition != null ) { current = this . transition ( stateful , current , event , transition , args ) ; } else { if ( logger . isDebugEnabled ( ) ) logger . debug ( "{}({})::{}({})->{}/noop" , this . name , stateful . getClass ( ) . getSimpleName ( ) , current . getName ( ) , event , current . getName ( ) ) ; // If blocking , force a transition to the current state as // it ' s possible that another thread has moved out of the blocking state . // Either way , we ' ll retry this event if ( current . isBlocking ( ) ) { this . setCurrent ( stateful , current , current ) ; throw new WaitAndRetryException ( this . retryInterval ) ; } } return current ; } catch ( RetryException re ) { logger . warn ( "{}({})::Retrying event" , this . name , stateful ) ; // Wait ? if ( WaitAndRetryException . class . isInstance ( re ) ) { try { Thread . sleep ( ( ( WaitAndRetryException ) re ) . getWait ( ) ) ; } catch ( InterruptedException ie ) { throw new RuntimeException ( ie ) ; } } attempts ++ ; } } logger . error ( "{}({})::Unable to process event" , this . name , stateful ) ; throw new TooBusyException ( ) ;
public class EthereumUtil { /** * Converts a long in a RLPElement to long * @ param rpe RLP element containing a raw long * @ return long or null if not long */ public static Long convertToLong ( RLPElement rpe ) { } }
Long result = 0L ; byte [ ] rawBytes = rpe . getRawData ( ) ; if ( ( rawBytes != null ) ) { // fill leading zeros if ( rawBytes . length < EthereumUtil . LONG_SIZE ) { byte [ ] fullBytes = new byte [ EthereumUtil . LONG_SIZE ] ; int dtDiff = EthereumUtil . LONG_SIZE - rawBytes . length ; for ( int i = 0 ; i < rawBytes . length ; i ++ ) { fullBytes [ dtDiff + i ] = rawBytes [ i ] ; result = ByteBuffer . wrap ( fullBytes ) . getLong ( ) ; } } else { result = ByteBuffer . wrap ( rawBytes ) . getLong ( ) ; } } return result ;
public class JsonUtils { /** * Attempt to decode a JSON string as a Java object * @ param json The JSON string to decode * @ param cls The class to decode as * @ param < T > Class parameter * @ return An instance of { @ code cls } , or an exception */ public static < T > T decode ( String json , Class < T > cls ) { } }
return GSON . fromJson ( json , cls ) ;
public class Value { /** * Returns an { @ code ARRAY < FLOAT64 > } value . * @ param v the source of element values , which may be null to produce a value for which { @ code * isNull ( ) } is { @ code true } */ public static Value float64Array ( @ Nullable double [ ] v ) { } }
return float64Array ( v , 0 , v == null ? 0 : v . length ) ;
public class StorageAccountsInner { /** * Asynchronously creates a new storage account with the specified parameters . If an account is already created and a subsequent create request is issued with different properties , the account properties will be updated . If an account is already created and a subsequent create or update request is issued with the exact same set of properties , the request will succeed . * @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive . * @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only . * @ param parameters The parameters to provide for the created account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the StorageAccountInner object */ public Observable < StorageAccountInner > beginCreateAsync ( String resourceGroupName , String accountName , StorageAccountCreateParameters parameters ) { } }
return beginCreateWithServiceResponseAsync ( resourceGroupName , accountName , parameters ) . map ( new Func1 < ServiceResponse < StorageAccountInner > , StorageAccountInner > ( ) { @ Override public StorageAccountInner call ( ServiceResponse < StorageAccountInner > response ) { return response . body ( ) ; } } ) ;
public class ParamConvertUtils { /** * Creates a converter function that converts a path segment into the given result type . * Current implementation doesn ' t follow the { @ link PathParam } specification to maintain backward compatibility . */ public static Converter < String , Object > createPathParamConverter ( final Type resultType ) { } }
if ( ! ( resultType instanceof Class ) ) { throw new IllegalArgumentException ( "Unsupported @PathParam type " + resultType ) ; } return new Converter < String , Object > ( ) { @ Override public Object convert ( String value ) { return ConvertUtils . convert ( value , ( Class < ? > ) resultType ) ; } } ;
public class SelfExtractRun { /** * Run server extracted from jar * If environment variable WLP _ JAR _ DEBUG is set , use ' server debug ' instead * @ param extractDirectory * @ param serverName * @ return server run return code * @ throws IOException * @ throws InterruptedException */ private static int runServer ( String extractDirectory , String serverName , String [ ] args ) throws IOException , InterruptedException { } }
int rc = 0 ; Runtime rt = Runtime . getRuntime ( ) ; String action = "run" ; if ( System . getenv ( "WLP_JAR_DEBUG" ) != null ) action = "debug" ; // unless user specifies to enable 2PC , disable it if ( System . getenv ( "WLP_JAR_ENABLE_2PC" ) == null ) disable2PC ( extractDirectory , serverName ) ; String cmd = extractDirectory + File . separator + "wlp" + File . separator + "bin" + File . separator + "server " + action + " " + serverName ; if ( args . length > 0 ) { StringBuilder appArgs = new StringBuilder ( " --" ) ; for ( String arg : args ) { appArgs . append ( " " ) . append ( arg ) ; } cmd += appArgs . toString ( ) ; } System . out . println ( cmd ) ; if ( platformType == SelfExtractUtils . PlatformType_UNIX ) { // cmd ready as - is for Unix } else if ( platformType == SelfExtractUtils . PlatformType_WINDOWS ) { cmd = "cmd /k " + cmd ; } else if ( platformType == SelfExtractUtils . PlatformType_CYGWIN ) { cmd = "bash -c " + '"' + cmd . replace ( '\\' , '/' ) + '"' ; } Process proc = rt . exec ( cmd , SelfExtractUtils . runEnv ( extractDirectory ) , null ) ; // run server // setup and start reader threads for error and output streams StreamReader errorReader = new StreamReader ( proc . getErrorStream ( ) , "ERROR" ) ; errorReader . start ( ) ; StreamReader outputReader = new StreamReader ( proc . getInputStream ( ) , "OUTPUT" ) ; outputReader . start ( ) ; // now setup the shutdown hook Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( new ShutdownHook ( platformType , extractDirectory , serverName , outputReader , errorReader ) ) ) ; // wait on server start process to complete , capture and pass on return code rc = proc . waitFor ( ) ; return rc ;
public class AmazonKinesisVideoClient { /** * Adds one or more tags to a stream . A < i > tag < / i > is a key - value pair ( the value is optional ) that you can define * and assign to AWS resources . If you specify a tag that already exists , the tag value is replaced with the value * that you specify in the request . For more information , see < a * href = " https : / / docs . aws . amazon . com / awsaccountbilling / latest / aboutv2 / cost - alloc - tags . html " > Using Cost Allocation * Tags < / a > in the < i > AWS Billing and Cost Management User Guide < / i > . * You must provide either the < code > StreamName < / code > or the < code > StreamARN < / code > . * This operation requires permission for the < code > KinesisVideo : TagStream < / code > action . * Kinesis video streams support up to 50 tags . * @ param tagStreamRequest * @ return Result of the TagStream operation returned by the service . * @ throws ClientLimitExceededException * Kinesis Video Streams has throttled the request because you have exceeded the limit of allowed client * calls . Try making the call later . * @ throws InvalidArgumentException * The value for this input parameter is invalid . * @ throws ResourceNotFoundException * Amazon Kinesis Video Streams can ' t find the stream that you specified . * @ throws NotAuthorizedException * The caller is not authorized to perform this operation . * @ throws InvalidResourceFormatException * The format of the < code > StreamARN < / code > is invalid . * @ throws TagsPerResourceExceededLimitException * You have exceeded the limit of tags that you can associate with the resource . Kinesis video streams * support up to 50 tags . * @ sample AmazonKinesisVideo . TagStream * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / kinesisvideo - 2017-09-30 / TagStream " target = " _ top " > AWS API * Documentation < / a > */ @ Override public TagStreamResult tagStream ( TagStreamRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeTagStream ( request ) ;
public class Scope { /** * 自内向外在作用域栈中查找变量是否存在 */ public boolean exists ( Object key ) { } }
for ( Scope cur = this ; cur != null ; cur = cur . parent ) { if ( cur . data != null && cur . data . containsKey ( key ) ) { return true ; } } return false ;
public class SlotManager { /** * Registers a slot for the given task manager at the slot manager . The slot is identified by * the given slot id . The given resource profile defines the available resources for the slot . * The task manager connection can be used to communicate with the task manager . * @ param slotId identifying the slot on the task manager * @ param allocationId which is currently deployed in the slot * @ param resourceProfile of the slot * @ param taskManagerConnection to communicate with the remote task manager */ private void registerSlot ( SlotID slotId , AllocationID allocationId , JobID jobId , ResourceProfile resourceProfile , TaskExecutorConnection taskManagerConnection ) { } }
if ( slots . containsKey ( slotId ) ) { // remove the old slot first removeSlot ( slotId ) ; } final TaskManagerSlot slot = createAndRegisterTaskManagerSlot ( slotId , resourceProfile , taskManagerConnection ) ; final PendingTaskManagerSlot pendingTaskManagerSlot ; if ( allocationId == null ) { pendingTaskManagerSlot = findExactlyMatchingPendingTaskManagerSlot ( resourceProfile ) ; } else { pendingTaskManagerSlot = null ; } if ( pendingTaskManagerSlot == null ) { updateSlot ( slotId , allocationId , jobId ) ; } else { pendingSlots . remove ( pendingTaskManagerSlot . getTaskManagerSlotId ( ) ) ; final PendingSlotRequest assignedPendingSlotRequest = pendingTaskManagerSlot . getAssignedPendingSlotRequest ( ) ; if ( assignedPendingSlotRequest == null ) { handleFreeSlot ( slot ) ; } else { assignedPendingSlotRequest . unassignPendingTaskManagerSlot ( ) ; allocateSlot ( slot , assignedPendingSlotRequest ) ; } }
public class JSONWriter { /** * Pop an array or object scope . * @ param c The scope to close . */ protected void pop ( Mode c ) { } }
if ( this . stack . size ( ) == 0 || this . stack . pop ( ) != c ) { throw new JSONException ( "Nesting error." ) ; } if ( this . stack . size ( ) > 0 ) this . mode = this . stack . peek ( ) ; else this . mode = DONE ;
public class ComponentAPI { /** * 获取预授权码 * @ param component _ access _ token component _ access _ token * @ param component _ appid 公众号第三方平台appid * @ return 预授权码 */ public static PreAuthCode api_create_preauthcode ( String component_access_token , String component_appid ) { } }
String postJsonData = String . format ( "{\"component_appid\":\"%1$s\"}" , component_appid ) ; HttpUriRequest httpUriRequest = RequestBuilder . post ( ) . setHeader ( jsonHeader ) . setUri ( BASE_URI + "/cgi-bin/component/api_create_preauthcode" ) . addParameter ( "component_access_token" , API . componentAccessToken ( component_access_token ) ) . setEntity ( new StringEntity ( postJsonData , Charset . forName ( "utf-8" ) ) ) . build ( ) ; return LocalHttpClient . executeJsonResult ( httpUriRequest , PreAuthCode . class ) ;
public class Message { /** * Determines if an RRset with the given name and type is already * present in the given section . * @ see RRset * @ see Section */ public boolean findRRset ( Name name , int type , int section ) { } }
if ( sections [ section ] == null ) return false ; for ( int i = 0 ; i < sections [ section ] . size ( ) ; i ++ ) { Record r = ( Record ) sections [ section ] . get ( i ) ; if ( r . getType ( ) == type && name . equals ( r . getName ( ) ) ) return true ; } return false ;
public class RemoveFromQueryRecordOnCloseHandler { /** * Remove this record from the query record . */ public void removeIt ( ) { } }
if ( m_queryRecord != null ) if ( this . getOwner ( ) != null ) m_queryRecord . removeRecord ( this . getOwner ( ) ) ; m_queryRecord = null ;
public class ImageScaling { /** * Scaling region of bitmap to destination bitmap region * @ param src source bitmap * @ param dest destination bitmap * @ param x source x * @ param y source y * @ param sw source width * @ param sh source height * @ param dx destination x * @ param dy destination y * @ param dw destination width * @ param dh destination height */ public static void scale ( Bitmap src , Bitmap dest , int x , int y , int sw , int sh , int dx , int dy , int dw , int dh ) { } }
scale ( src , dest , CLEAR_COLOR , x , y , sw , sh , dx , dy , dw , dh ) ;
public class WarningsDescriptor { /** * Returns the URL of the warning project for the specified parser . * @ param group * the parser group * @ return a unique URL */ public static String getProjectUrl ( final String group ) { } }
if ( group == null ) { // prior 4.0 return PLUGIN_ID ; } else { return PLUGIN_ID + ParserRegistry . getUrl ( group ) ; }
public class AbstractLog { /** * Report a lint warning , unless suppressed by the - nowarn option or the * maximum number of warnings has been reached . * @ param lc The lint category for the diagnostic * @ param warningKey The key for the localized warning message . */ public void warning ( LintCategory lc , Warning warningKey ) { } }
report ( diags . warning ( lc , null , null , warningKey ) ) ;
public class ClasspathScanDescriptorProvider { /** * Resolve the given jar file URL into a JarFile object . */ private JarFile getJarFile ( String jarFileUrl ) throws IOException { } }
if ( jarFileUrl . startsWith ( "file:" ) ) { try { final URI uri = new URI ( jarFileUrl . replaceAll ( " " , "\\%20" ) ) ; final String jarFileName = uri . getSchemeSpecificPart ( ) ; logger . info ( "Creating new JarFile based on URI-scheme filename: {}" , jarFileName ) ; return new JarFile ( jarFileName ) ; } catch ( URISyntaxException ex ) { // Fallback for URLs that are not valid URIs ( should hardly ever // happen ) . final String jarFileName = jarFileUrl . substring ( "file:" . length ( ) ) ; logger . info ( "Creating new JarFile based on alternative filename: {}" , jarFileName ) ; return new JarFile ( jarFileName ) ; } } else { logger . info ( "Creating new JarFile based on URI (with '!/'): {}" , jarFileUrl ) ; return new JarFile ( jarFileUrl ) ; }
public class CommerceAddressPersistenceImpl { /** * Returns the first commerce address in the ordered set where commerceCountryId = & # 63 ; . * @ param commerceCountryId the commerce country ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce address * @ throws NoSuchAddressException if a matching commerce address could not be found */ @ Override public CommerceAddress findByCommerceCountryId_First ( long commerceCountryId , OrderByComparator < CommerceAddress > orderByComparator ) throws NoSuchAddressException { } }
CommerceAddress commerceAddress = fetchByCommerceCountryId_First ( commerceCountryId , orderByComparator ) ; if ( commerceAddress != null ) { return commerceAddress ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commerceCountryId=" ) ; msg . append ( commerceCountryId ) ; msg . append ( "}" ) ; throw new NoSuchAddressException ( msg . toString ( ) ) ;
public class ComputeNodeGetRemoteDesktopHeaders { /** * Set the time at which the resource was last modified . * @ param lastModified the lastModified value to set * @ return the ComputeNodeGetRemoteDesktopHeaders object itself . */ public ComputeNodeGetRemoteDesktopHeaders withLastModified ( DateTime lastModified ) { } }
if ( lastModified == null ) { this . lastModified = null ; } else { this . lastModified = new DateTimeRfc1123 ( lastModified ) ; } return this ;
public class ExpirationDate { /** * Gets the card expiration date , as a java . util . Date object . Returns * null if no date is available . * @ return Card expiration date . */ public Date getExpirationDateAsDate ( ) { } }
if ( hasExpirationDate ( ) ) { final LocalDateTime endOfMonth = expirationDate . atEndOfMonth ( ) . atStartOfDay ( ) . plus ( 1 , ChronoUnit . DAYS ) . minus ( 1 , ChronoUnit . NANOS ) ; final Instant instant = endOfMonth . atZone ( ZoneId . systemDefault ( ) ) . toInstant ( ) ; final Date date = new Date ( instant . toEpochMilli ( ) ) ; return date ; } else { return null ; }
public class MongoNativeExtractor { /** * Gets chunks . * @ param collection the collection * @ return the chunks */ private DBCursor getChunks ( DBCollection collection ) { } }
DB config = collection . getDB ( ) . getSisterDB ( "config" ) ; DBCollection configChunks = config . getCollection ( "chunks" ) ; return configChunks . find ( new BasicDBObject ( "ns" , collection . getFullName ( ) ) ) ;
public class ResourceFinder { /** * Executes mapAllStrings assuming the value of each entry in the * map is the name of a class that should be loaded . * Any class that cannot be loaded will be cause an exception to be thrown . * Example classpath : * META - INF / xmlparsers / xerces * META - INF / xmlparsers / crimson * ResourceFinder finder = new ResourceFinder ( " META - INF / " ) ; * Map map = finder . mapAvailableStrings ( " xmlparsers " ) ; * map . contains ( " xerces " ) ; / / true * map . contains ( " crimson " ) ; / / true * Class xercesClass = map . get ( " xerces " ) ; * Class crimsonClass = map . get ( " crimson " ) ; * @ param uri * @ return * @ throws IOException * @ throws ClassNotFoundException */ public Map < String , Class > mapAllClasses ( String uri ) throws IOException , ClassNotFoundException { } }
Map < String , Class > classes = new HashMap < > ( ) ; Map < String , String > map = mapAllStrings ( uri ) ; for ( Iterator iterator = map . entrySet ( ) . iterator ( ) ; iterator . hasNext ( ) ; ) { Map . Entry entry = ( Map . Entry ) iterator . next ( ) ; String string = ( String ) entry . getKey ( ) ; String className = ( String ) entry . getValue ( ) ; Class clazz = classLoader . loadClass ( className ) ; classes . put ( string , clazz ) ; } return classes ;
public class BeanInfoIntrospector { /** * apply the base fields to other views if configured to do so . */ private static Map < String , Set < String > > expand ( Map < String , Set < String > > viewToPropNames ) { } }
Set < String > baseProps = viewToPropNames . get ( PropertyView . BASE_VIEW ) ; if ( baseProps == null ) { baseProps = ImmutableSet . of ( ) ; } if ( ! SquigglyConfig . isFilterImplicitlyIncludeBaseFieldsInView ( ) ) { // make an exception for full view Set < String > fullView = viewToPropNames . get ( PropertyView . FULL_VIEW ) ; if ( fullView != null ) { fullView . addAll ( baseProps ) ; } return viewToPropNames ; } for ( Map . Entry < String , Set < String > > entry : viewToPropNames . entrySet ( ) ) { String viewName = entry . getKey ( ) ; Set < String > propNames = entry . getValue ( ) ; if ( ! PropertyView . BASE_VIEW . equals ( viewName ) ) { propNames . addAll ( baseProps ) ; } } return viewToPropNames ;
public class AgreementSeeker { /** * Adds alive and dead graph information * @ param reportingHsid site reporting failures * @ param failures seen by the reporting site */ void add ( long reportingHsid , final Map < Long , Boolean > failed ) { } }
// skip if the reporting site did not belong to the pre // failure mesh if ( ! m_hsids . contains ( reportingHsid ) ) return ; // ship if the reporting site is reporting itself dead Boolean harakiri = failed . get ( reportingHsid ) ; if ( harakiri != null && harakiri . booleanValue ( ) ) return ; Set < Long > dead = Sets . newHashSet ( ) ; for ( Map . Entry < Long , Boolean > e : failed . entrySet ( ) ) { // skip if the failed site did not belong to the // pre failure mesh if ( ! m_hsids . contains ( e . getKey ( ) ) ) continue ; m_reported . put ( e . getKey ( ) , reportingHsid ) ; // if the failure is witnessed add it to the dead graph if ( e . getValue ( ) ) { m_dead . put ( e . getKey ( ) , reportingHsid ) ; dead . add ( e . getKey ( ) ) ; } } // once you are witnessed dead you cannot become undead , // but it is not the case for alive nodes , as they can // die . So remove all what the reporting site thought // was alive before this invocation removeValue ( m_alive , reportingHsid ) ; for ( Long alive : Sets . difference ( m_hsids , dead ) ) { m_alive . put ( alive , reportingHsid ) ; }
public class CommerceRegionUtil { /** * Returns a range of all the commerce regions where commerceCountryId = & # 63 ; and active = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceRegionModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param commerceCountryId the commerce country ID * @ param active the active * @ param start the lower bound of the range of commerce regions * @ param end the upper bound of the range of commerce regions ( not inclusive ) * @ return the range of matching commerce regions */ public static List < CommerceRegion > findByC_A ( long commerceCountryId , boolean active , int start , int end ) { } }
return getPersistence ( ) . findByC_A ( commerceCountryId , active , start , end ) ;
public class CelebrityDetailMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CelebrityDetail celebrityDetail , ProtocolMarshaller protocolMarshaller ) { } }
if ( celebrityDetail == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( celebrityDetail . getUrls ( ) , URLS_BINDING ) ; protocolMarshaller . marshall ( celebrityDetail . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( celebrityDetail . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( celebrityDetail . getConfidence ( ) , CONFIDENCE_BINDING ) ; protocolMarshaller . marshall ( celebrityDetail . getBoundingBox ( ) , BOUNDINGBOX_BINDING ) ; protocolMarshaller . marshall ( celebrityDetail . getFace ( ) , FACE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ZipkinScribeCollectorAutoConfiguration { /** * The init method will block until the scribe port is listening , or crash on port conflict */ @ Bean ( initMethod = "start" ) ScribeCollector scribe ( ZipkinScribeCollectorProperties scribe , CollectorSampler sampler , CollectorMetrics metrics , StorageComponent storage ) { } }
return scribe . toBuilder ( ) . sampler ( sampler ) . metrics ( metrics ) . storage ( storage ) . build ( ) ;
public class OpenApiDeploymentProcessor { /** * Process the deployment in order to produce an OpenAPI document . * @ see org . wildfly . swarm . spi . api . DeploymentProcessor # process ( ) */ @ Override public void process ( ) throws Exception { } }
// if the deployment is Implicit , we don ' t want to process it if ( deploymentContext != null && deploymentContext . isImplicit ( ) ) { return ; } try { // First register OpenApiServletContextListener which triggers the final init WARArchive warArchive = archive . as ( WARArchive . class ) ; warArchive . findWebXmlAsset ( ) . addListener ( LISTENER_CLASS ) ; } catch ( Exception e ) { throw new RuntimeException ( "Failed to register OpenAPI listener" , e ) ; } OpenApiStaticFile staticFile = ArchiveUtil . archiveToStaticFile ( archive ) ; // Set models from annotations and static file OpenApiDocument openApiDocument = OpenApiDocument . INSTANCE ; openApiDocument . config ( config ) ; openApiDocument . modelFromStaticFile ( OpenApiProcessor . modelFromStaticFile ( staticFile ) ) ; openApiDocument . modelFromAnnotations ( OpenApiProcessor . modelFromAnnotations ( config , index ) ) ;
public class VertexArrayList { /** * Utility method used to convert the list of vertices into a list of vertex ids ( assuming all vertices have ids ) * @ param vertices * @ return */ private static final LongArrayList toLongList ( List < TitanVertex > vertices ) { } }
LongArrayList result = new LongArrayList ( vertices . size ( ) ) ; for ( TitanVertex n : vertices ) { result . add ( n . longId ( ) ) ; } return result ;
public class Version { /** * Returns whether the given String is a valid build meta data identifier . That is , * this method returns < code > true < / code > if , and only if the { @ code buildMetaData } * parameter is either the empty string or properly formatted as a build meta data * identifier according to the semantic version specification . * Note : this method does not throw an exception upon < code > null < / code > input , but * returns < code > false < / code > instead . * @ param buildMetaData The String to check . * @ return Whether the given String is a valid build meta data identifier . * @ since 0.5.0 */ public static boolean isValidBuildMetaData ( String buildMetaData ) { } }
if ( buildMetaData == null ) { return false ; } else if ( buildMetaData . isEmpty ( ) ) { return true ; } return parseID ( buildMetaData . toCharArray ( ) , buildMetaData , 0 , true , true , false , null , "" ) != FAILURE ;
public class ArgParser { /** * Registers all the @ Opt annotations on a class with this ArgParser . */ public void registerClass ( Class < ? > clazz ) { } }
registeredClasses . add ( clazz ) ; for ( Field field : clazz . getFields ( ) ) { if ( field . isAnnotationPresent ( Opt . class ) ) { int mod = field . getModifiers ( ) ; if ( ! Modifier . isPublic ( mod ) ) { throw new IllegalStateException ( "@" + Opt . class . getName ( ) + " on non-public field: " + field ) ; } if ( Modifier . isFinal ( mod ) ) { throw new IllegalStateException ( "@" + Opt . class . getName ( ) + " on final field: " + field ) ; } if ( Modifier . isAbstract ( mod ) ) { throw new IllegalStateException ( "@" + Opt . class . getName ( ) + " on abstract field: " + field ) ; } // Add an Apache Commons CLI Option for this field . Opt opt = field . getAnnotation ( Opt . class ) ; String name = getName ( opt , field ) ; if ( ! names . add ( name ) ) { throw new RuntimeException ( "Multiple options have the same name: --" + name ) ; } String shortName = null ; if ( createShortNames ) { shortName = getAndAddUniqueShortName ( name ) ; } Option apacheOpt = new Option ( shortName , name , opt . hasArg ( ) , opt . description ( ) ) ; apacheOpt . setRequired ( opt . required ( ) ) ; options . addOption ( apacheOpt ) ; optionFieldMap . put ( apacheOpt , field ) ; // Check that only boolean has hasArg ( ) = = false . if ( ! field . getType ( ) . equals ( Boolean . TYPE ) && ! opt . hasArg ( ) ) { throw new RuntimeException ( "Only booleans can not have arguments." ) ; } } }
public class MessageFormat { /** * < strong > [ icu ] < / strong > Converts an ' apostrophe - friendly ' pattern into a standard * pattern . * < em > This is obsolete for ICU 4.8 and higher MessageFormat pattern strings . < / em > * It can still be useful together with { @ link java . text . MessageFormat } . * < p > See the class description for more about apostrophes and quoting , * and differences between ICU and { @ link java . text . MessageFormat } . * < p > { @ link java . text . MessageFormat } and ICU 4.6 and earlier MessageFormat * treat all ASCII apostrophes as * quotes , which is problematic in some languages , e . g . * French , where apostrophe is commonly used . This utility * assumes that only an unpaired apostrophe immediately before * a brace is a true quote . Other unpaired apostrophes are paired , * and the resulting standard pattern string is returned . * < p > < b > Note < / b > : It is not guaranteed that the returned pattern * is indeed a valid pattern . The only effect is to convert * between patterns having different quoting semantics . * < p > < b > Note < / b > : This method only works on top - level messageText , * not messageText nested inside a complexArg . * @ param pattern the ' apostrophe - friendly ' pattern to convert * @ return the standard equivalent of the original pattern */ public static String autoQuoteApostrophe ( String pattern ) { } }
StringBuilder buf = new StringBuilder ( pattern . length ( ) * 2 ) ; int state = STATE_INITIAL ; int braceCount = 0 ; for ( int i = 0 , j = pattern . length ( ) ; i < j ; ++ i ) { char c = pattern . charAt ( i ) ; switch ( state ) { case STATE_INITIAL : switch ( c ) { case SINGLE_QUOTE : state = STATE_SINGLE_QUOTE ; break ; case CURLY_BRACE_LEFT : state = STATE_MSG_ELEMENT ; ++ braceCount ; break ; } break ; case STATE_SINGLE_QUOTE : switch ( c ) { case SINGLE_QUOTE : state = STATE_INITIAL ; break ; case CURLY_BRACE_LEFT : case CURLY_BRACE_RIGHT : state = STATE_IN_QUOTE ; break ; default : buf . append ( SINGLE_QUOTE ) ; state = STATE_INITIAL ; break ; } break ; case STATE_IN_QUOTE : switch ( c ) { case SINGLE_QUOTE : state = STATE_INITIAL ; break ; } break ; case STATE_MSG_ELEMENT : switch ( c ) { case CURLY_BRACE_LEFT : ++ braceCount ; break ; case CURLY_BRACE_RIGHT : if ( -- braceCount == 0 ) { state = STATE_INITIAL ; } break ; } break ; // / CLOVER : OFF default : // Never happens . break ; // / CLOVER : ON } buf . append ( c ) ; } // End of scan if ( state == STATE_SINGLE_QUOTE || state == STATE_IN_QUOTE ) { buf . append ( SINGLE_QUOTE ) ; } return new String ( buf ) ;
public class DonutOptions { /** * Converts a list of { @ link BrowserUsageData } into a list of * { @ link PointSeries } containing the data about the browser versions . */ private PointSeries toVersionSeries ( final List < BrowserUsageData > browserUsage ) { } }
PointSeries versionSeries = new PointSeries ( ) ; for ( BrowserUsageData browserData : browserUsage ) { for ( VersionUsageData versionData : browserData . getVersionUsageData ( ) ) { versionSeries . addPoint ( new Point ( versionData . getName ( ) , versionData . getMarketShare ( ) , versionData . getColor ( ) ) ) ; } } return versionSeries ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcElectricCurrentMeasure ( ) { } }
if ( ifcElectricCurrentMeasureEClass == null ) { ifcElectricCurrentMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 672 ) ; } return ifcElectricCurrentMeasureEClass ;
public class CmsSearchReplaceThread { /** * Renames a nested container within a container page XML . < p > * @ param targetContainerPage the target container page * @ param layoutResource the container element resource generating the nested container * @ param oldName the old container name * @ param newName the new container name * @ return the changed content bytes * @ throws Exception in case unmarshalling of the container page fails */ private byte [ ] renameNestedContainers ( CmsFile targetContainerPage , CmsResource layoutResource , String oldName , String newName ) throws Exception { } }
byte [ ] contents = targetContainerPage . getContents ( ) ; Set < String > replaceElementIds = new HashSet < String > ( ) ; try { CmsXmlContainerPage page = CmsXmlContainerPageFactory . unmarshal ( getCms ( ) , targetContainerPage ) ; for ( CmsContainerElementBean element : page . getContainerPage ( getCms ( ) ) . getElements ( ) ) { if ( element . getId ( ) . equals ( layoutResource . getStructureId ( ) ) && ( element . getInstanceId ( ) != null ) ) { replaceElementIds . add ( element . getInstanceId ( ) ) ; } } if ( replaceElementIds . size ( ) > 0 ) { String encoding = CmsLocaleManager . getResourceEncoding ( getCms ( ) , targetContainerPage ) ; String content = new String ( contents , encoding ) ; for ( String instanceId : replaceElementIds ) { Pattern patt = Pattern . compile ( CmsJspTagContainer . getNestedContainerName ( oldName , instanceId , null ) ) ; Matcher m = patt . matcher ( content ) ; StringBuffer sb = new StringBuffer ( content . length ( ) ) ; while ( m . find ( ) ) { m . appendReplacement ( sb , Matcher . quoteReplacement ( CmsJspTagContainer . getNestedContainerName ( newName , instanceId , null ) ) ) ; } m . appendTail ( sb ) ; content = sb . toString ( ) ; } contents = content . getBytes ( encoding ) ; } } catch ( Exception e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; throw e ; } return contents ;
public class StandardConversions { /** * Converts a java object to a text / plain representation . * @ param source Object to convert . * @ param sourceMediaType The MediaType for the source object . * @ param destinationMediaType The required text / plain specification . * @ return byte [ ] with the text / plain representation of the object with the requested charset . */ public static byte [ ] convertJavaToText ( Object source , MediaType sourceMediaType , MediaType destinationMediaType ) { } }
if ( source == null ) return null ; if ( sourceMediaType == null || destinationMediaType == null ) { throw new NullPointerException ( "sourceMediaType and destinationMediaType cannot be null!" ) ; } Object decoded = decodeObjectContent ( source , sourceMediaType ) ; if ( decoded instanceof byte [ ] ) { return convertCharset ( source , StandardCharsets . UTF_8 , destinationMediaType . getCharset ( ) ) ; } else { String asString = decoded . toString ( ) ; return asString . getBytes ( destinationMediaType . getCharset ( ) ) ; }
public class LHS { /** * Reflect Field is not serializable , hide it . * @ param s serializer * @ throws IOException mandatory throwing exception */ private synchronized void writeObject ( final ObjectOutputStream s ) throws IOException { } }
if ( null != field ) { this . object = field . getDeclaringClass ( ) ; this . varName = field . getName ( ) ; this . field = null ; } s . defaultWriteObject ( ) ;
public class CassQuery { /** * Append in clause . * @ param queryBuilder * the query builder * @ param translator * the translator * @ param value * the value * @ param fieldClazz * the field clazz * @ param columnName * the column name * @ param isPresent * the is present * @ return true , if successful */ private boolean appendInClause ( StringBuilder queryBuilder , CQLTranslator translator , List < Object > value , Class fieldClazz , String columnName , boolean isPresent ) { } }
isPresent = appendIn ( queryBuilder , translator , columnName ) ; queryBuilder . append ( "(" ) ; for ( Object objectvalue : value ) { translator . appendValue ( queryBuilder , fieldClazz , objectvalue , isPresent , false ) ; queryBuilder . append ( ", " ) ; } queryBuilder . deleteCharAt ( queryBuilder . lastIndexOf ( ", " ) ) ; queryBuilder . append ( ") " ) ; queryBuilder . append ( " AND " ) ; return isPresent ;
public class FailurePolicy { /** * Returns a predicate that returns whether any of the { @ code failures } are assignable from an execution failure . */ static < R > BiPredicate < R , Throwable > failurePredicateFor ( List < Class < ? extends Throwable > > failures ) { } }
return ( t , u ) -> { if ( u == null ) return false ; for ( Class < ? extends Throwable > failureType : failures ) if ( failureType . isAssignableFrom ( u . getClass ( ) ) ) return true ; return false ; } ;
public class Serializer { /** * serializes object to byte [ ] * @ param o the object to serialize * @ param zip true if the data should be compressed * @ return the serialized bytes * @ throws IOException thrown when an error is encountered writing the data */ public static byte [ ] serialize ( final Object o , final boolean zip ) throws IOException { } }
final ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; OutputStream os = baos ; ObjectOutputStream oos = null ; try { if ( zip ) { os = new GZIPOutputStream ( os ) ; } oos = new ObjectOutputStream ( os ) ; oos . writeObject ( o ) ; oos . flush ( ) ; } finally { if ( oos != null ) { oos . close ( ) ; } os . close ( ) ; } return baos . toByteArray ( ) ;
public class ClientAsynchEventThreadPool { /** * Dispatches the data to be sent to the connection event listeners on a thread . * @ param eventId * @ param conversation */ public void dispatchAsynchEvent ( short eventId , Conversation conversation ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "dispatchAsynchEvent" , new Object [ ] { "" + eventId , conversation } ) ; // Create a runnable with the data AsynchEventThread thread = new AsynchEventThread ( eventId , conversation ) ; dispatchThread ( thread ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "dispatchAsynchEvent" ) ;
public class FormRequestParser { /** * Stuff whatever GET / POST arguments are sent up into the returned * WaybackRequest object , except the Submit button argument . */ public WaybackRequest parse ( HttpServletRequest httpRequest , AccessPoint accessPoint ) throws BetterRequestException { } }
WaybackRequest wbRequest = null ; @ SuppressWarnings ( "unchecked" ) Map < String , String [ ] > queryMap = httpRequest . getParameterMap ( ) ; if ( queryMap . size ( ) > 0 ) { wbRequest = new WaybackRequest ( ) ; String base = accessPoint . translateRequestPath ( httpRequest ) ; if ( base . startsWith ( REPLAY_BASE ) ) { wbRequest . setReplayRequest ( ) ; } else if ( base . startsWith ( QUERY_BASE ) ) { wbRequest . setCaptureQueryRequest ( ) ; } else if ( base . startsWith ( XQUERY_BASE ) ) { wbRequest . setCaptureQueryRequest ( ) ; wbRequest . setXMLMode ( true ) ; } else { return null ; } wbRequest . setResultsPerPage ( getMaxRecords ( ) ) ; Set < String > keys = queryMap . keySet ( ) ; Iterator < String > itr = keys . iterator ( ) ; while ( itr . hasNext ( ) ) { String key = itr . next ( ) ; if ( key . equals ( SUBMIT_BUTTON ) ) { continue ; } // just jam everything else in : String val = AccessPoint . getMapParam ( queryMap , key ) ; if ( key . equals ( WaybackRequest . REQUEST_URL ) ) { String scheme = UrlOperations . urlToScheme ( val ) ; if ( scheme == null ) { val = UrlOperations . HTTP_SCHEME + val ; } } wbRequest . put ( key , val ) ; } String partialTS = wbRequest . getReplayTimestamp ( ) ; if ( partialTS != null ) { if ( wbRequest . getStartTimestamp ( ) == null ) { String startTS = Timestamp . parseBefore ( partialTS ) . getDateStr ( ) ; wbRequest . setStartTimestamp ( startTS ) ; } if ( wbRequest . getEndTimestamp ( ) == null ) { String endTS = Timestamp . parseAfter ( partialTS ) . getDateStr ( ) ; wbRequest . setEndTimestamp ( endTS ) ; } } else { if ( wbRequest . getStartTimestamp ( ) == null ) { wbRequest . setStartTimestamp ( getEarliestTimestamp ( ) ) ; } if ( wbRequest . getEndTimestamp ( ) == null ) { wbRequest . setEndTimestamp ( getLatestTimestamp ( ) ) ; } } } return wbRequest ;
public class PseudoClassSpecifierChecker { /** * Add { @ code : first - child } elements . * @ see < a href = " http : / / www . w3 . org / TR / css3 - selectors / # first - child - pseudo " > < code > : first - child < / code > pseudo - class < / a > */ private void addFirstChildElements ( ) { } }
for ( Node node : nodes ) { if ( DOMHelper . getPreviousSiblingElement ( node ) == null ) { result . add ( node ) ; } }
public class ApacheMultipartParser { /** * Parses the < code > header - part < / code > and returns as key / value pairs . * If there are multiple headers of the same names , the name will map to a * comma - separated list containing the values . * @ param headerPart The < code > header - part < / code > of the current * < code > encapsulation < / code > . * @ return A < code > Map < / code > containing the parsed HTTP request headers . */ private Map /* String , String */ parseHeaders ( String headerPart ) { } }
final int len = headerPart . length ( ) ; Map < String , String > headers = new HashMap < String , String > ( ) ; int start = 0 ; for ( ; ; ) { int end = parseEndOfLine ( headerPart , start ) ; if ( start == end ) { break ; } String header = headerPart . substring ( start , end ) ; start = end + 2 ; while ( start < len ) { int nonWs = start ; while ( nonWs < len ) { char c = headerPart . charAt ( nonWs ) ; if ( c != ' ' && c != '\t' ) { break ; } ++ nonWs ; } if ( nonWs == start ) { break ; } // Continuation line found end = parseEndOfLine ( headerPart , nonWs ) ; header += " " + headerPart . substring ( nonWs , end ) ; start = end + 2 ; } parseHeaderLine ( headers , header ) ; } return headers ;
public class TypeValidator { /** * Expect the type to be an object . Unlike expectObject , a type convertible to object is not * acceptable . */ void expectActualObject ( Node n , JSType type , String msg ) { } }
if ( ! type . isObject ( ) ) { mismatch ( n , msg , type , OBJECT_TYPE ) ; }
public class CmsSiteDetailDialog { /** * Initializes the dialog site object . < p > */ private void initSite ( ) { } }
Object o = null ; if ( CmsStringUtil . isEmpty ( getParamAction ( ) ) || CmsDialog . DIALOG_INITIAL . equals ( getParamAction ( ) ) ) { // this is the initial dialog call if ( CmsStringUtil . isNotEmpty ( m_paramSites ) ) { // edit an existing site , get it from manager o = OpenCms . getSiteManager ( ) . getSiteForSiteRoot ( m_paramSites ) ; } } else { // this is not the initial call , get site from session o = getDialogObject ( ) ; } if ( o instanceof CmsSite ) { // reuse site stored in session m_site = new CmsSiteBean ( ( CmsSite ) o ) ; } else if ( o instanceof CmsSiteBean ) { // create a new site m_site = ( CmsSiteBean ) o ; } else if ( DIALOG_NEW . equals ( getParamEditaction ( ) ) ) { m_site = new CmsSiteBean ( ) ; } else { try { getToolManager ( ) . jspForwardTool ( this , "/sites" , new HashMap < String , String [ ] > ( ) ) ; } catch ( Exception e ) { // noop } } if ( ! m_site . hasSecureServer ( ) ) { m_site . setSecureUrl ( "" ) ; } try { CmsObject clone = OpenCms . initCmsObject ( getCms ( ) ) ; clone . getRequestContext ( ) . setSiteRoot ( "" ) ; String iconPath = m_site . getSiteRoot ( ) + "/" + CmsSiteFaviconDialog . ICON_NAME ; if ( clone . existsResource ( iconPath ) ) { m_site . setFavicon ( iconPath ) ; } } catch ( Throwable t ) { // noop } if ( m_site . getSiteRoot ( ) != null ) { setSitename ( CmsResource . getName ( m_site . getSiteRoot ( ) ) ) ; } CmsModule module = OpenCms . getModuleManager ( ) . getModule ( MODULE_NAME ) ; m_createou = Boolean . valueOf ( module . getParameter ( PARAM_CREATE_OU , Boolean . FALSE . toString ( ) ) ) . booleanValue ( ) ; m_ouDescription = module . getParameter ( PARAM_OU_DESCRIPTION , "OU for: %(site)" ) ; setDialogObject ( m_site ) ;
public class DAOValidatorHelper { /** * Methode permettant d ' extraire le nom de la fonction * @ param functionTokenTopken de fonction * @ returnNom de la fonction */ public static String extractFunctionName ( String functionToken ) { } }
// Si le Token est null if ( functionToken == null || functionToken . trim ( ) . length ( ) == 0 ) { // On retourne la chaine return functionToken ; } int index0 = functionToken . indexOf ( SIMPLE_FUNCTION_LEFT_DELIMITER ) ; int index1 = functionToken . indexOf ( SIMPLE_FUNCTION_OPEN ) ; // Extraction du nom de la fonction String fName = functionToken . substring ( index0 + SIMPLE_FUNCTION_LEFT_DELIMITER . length ( ) , index1 ) ; // On retourne la deuxieme return fName ;
public class AbstractIoBuffer { /** * { @ inheritDoc } */ @ Override public final IoBuffer putFloat ( int index , float value ) { } }
autoExpand ( index , 4 ) ; buf ( ) . putFloat ( index , value ) ; return this ;