signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class RegistryUtils { /** * Add a specific value to a registry . If the registry doesn ' t exist yet , create it . * @ param registryName the name of the registry . * @ param key the unique key corresponding to the value to insert ( typically an appid ) . * @ param value the value to add to the registry . */ public static void putValue ( String registryName , String key , Object value ) { } }
if ( StringUtils . isBlank ( registryName ) || StringUtils . isBlank ( key ) || value == null ) { return ; } Sysprop registryObject = readRegistryObject ( registryName ) ; if ( registryObject == null ) { registryObject = new Sysprop ( getRegistryID ( registryName ) ) ; registryObject . addProperty ( key , value ) ; CoreUtils . getInstance ( ) . getDao ( ) . create ( REGISTRY_APP_ID , registryObject ) ; } else { registryObject . addProperty ( key , value ) ; CoreUtils . getInstance ( ) . getDao ( ) . update ( REGISTRY_APP_ID , registryObject ) ; }
public class Logger { /** * Gets a message from the logger ' s backing resource bundle if what ' s passed in is a message key ; if it ' s not then * what ' s passed in is , itself , returned . If what ' s passed in is the same thing as what ' s returned , any additional * details passed in are ignored . * @ param aMessage A message to check against the backing resource bundle * @ param aDetails An array of additional details * @ return A message value ( potentially from the backing resource bundle ) */ public String getMessage ( final String aMessage , final Object ... aDetails ) { } }
if ( hasI18nKey ( aMessage ) ) { return getI18n ( aMessage , aDetails ) ; } else if ( aDetails . length == 0 ) { return aMessage ; } else { return StringUtils . format ( aMessage , aDetails ) ; }
public class MemoryMappedFileWriter { /** * / * ( non - Javadoc ) * @ see org . audit4j . core . handler . file . AuditFileWriter # write ( java . lang . String ) */ @ Override public AuditFileWriter write ( String event ) { } }
String str2 = event + CoreConstants . NEW_LINE ; out . put ( str2 . getBytes ( Charset . forName ( "UTF-8" ) ) ) ; return this ;
public class SipPhone { /** * This method releases all resources associated with this SipPhone . Neither this SipPhone object * nor its SipSession base class should be used again after calling the dispose ( ) method . * Server / proxy unregistration occurs and SipCall ( s ) associated with this SipPhone are dropped . No * un - SUBSCRIBE is done for active Subscriptions in the buddy list . * @ see org . cafesip . sipunit . SipCall # dispose ( ) */ public void dispose ( ) { } }
this . removeRequestListener ( Request . NOTIFY , this ) ; // drop calls while ( ! callList . isEmpty ( ) ) { ( ( SipCall ) callList . get ( 0 ) ) . dispose ( ) ; } unregister ( contactInfo . getContactHeader ( ) . getAddress ( ) . getURI ( ) . clone ( ) . toString ( ) , 15000 ) ; super . dispose ( ) ;
public class Calendars { /** * Loads a calendar from the specified file . * @ param filename the name of the file from which to load calendar data * @ return returns a new calendar instance initialised from the specified file * @ throws IOException occurs when there is an error reading the specified file * @ throws ParserException occurs when the data in the specified file is invalid */ public static Calendar load ( final String filename ) throws IOException , ParserException { } }
return new CalendarBuilder ( ) . build ( Files . newInputStream ( Paths . get ( filename ) ) ) ;
public class ImageParser { /** * Process buffered image obtained from Reader 2011-09-08 PwD added * formatName param to support getBase64Data ( ) */ private static void processBufferedImage ( BufferedImage buffimage , String formatName , NodeList nodes ) throws Exception { } }
HashMap < Object , Object > bandMap = new HashMap < Object , Object > ( ) ; for ( int i = 0 ; i < nodes . getLength ( ) ; i ++ ) { Node node = nodes . item ( i ) ; if ( node . getNodeType ( ) == Node . ELEMENT_NODE ) { if ( node . getLocalName ( ) . equals ( "subimage" ) ) { Element e = ( Element ) node ; int x = Integer . parseInt ( e . getAttribute ( "x" ) ) ; int y = Integer . parseInt ( e . getAttribute ( "y" ) ) ; int w = Integer . parseInt ( e . getAttribute ( "width" ) ) ; int h = Integer . parseInt ( e . getAttribute ( "height" ) ) ; processBufferedImage ( buffimage . getSubimage ( x , y , w , h ) , formatName , e . getChildNodes ( ) ) ; } else if ( node . getLocalName ( ) . equals ( "checksum" ) ) { CRC32 checksum = new CRC32 ( ) ; Raster raster = buffimage . getRaster ( ) ; DataBufferByte buffer ; if ( node . getParentNode ( ) . getLocalName ( ) . equals ( "subimage" ) ) { WritableRaster outRaster = raster . createCompatibleWritableRaster ( ) ; buffimage . copyData ( outRaster ) ; buffer = ( DataBufferByte ) outRaster . getDataBuffer ( ) ; } else { buffer = ( DataBufferByte ) raster . getDataBuffer ( ) ; } int numbanks = buffer . getNumBanks ( ) ; for ( int j = 0 ; j < numbanks ; j ++ ) { checksum . update ( buffer . getData ( j ) ) ; } Document doc = node . getOwnerDocument ( ) ; node . appendChild ( doc . createTextNode ( Long . toString ( checksum . getValue ( ) ) ) ) ; } else if ( node . getLocalName ( ) . equals ( "count" ) ) { String band = ( ( Element ) node ) . getAttribute ( "bands" ) ; String sample = ( ( Element ) node ) . getAttribute ( "sample" ) ; if ( sample . equals ( "all" ) ) { bandMap . put ( band , null ) ; } else { HashMap < Object , Object > sampleMap = ( HashMap < Object , Object > ) bandMap . get ( band ) ; if ( sampleMap == null && ! bandMap . containsKey ( band ) ) { sampleMap = new HashMap < Object , Object > ( ) ; bandMap . put ( band , sampleMap ) ; } sampleMap . put ( Integer . decode ( sample ) , new Integer ( 0 ) ) ; } } else if ( node . getLocalName ( ) . equals ( "transparentNodata" ) ) { // 2011-08-24 // PwD String transparentNodata = checkTransparentNodata ( buffimage , node ) ; node . setTextContent ( transparentNodata ) ; } } } Iterator bandIt = bandMap . keySet ( ) . iterator ( ) ; while ( bandIt . hasNext ( ) ) { String band_str = ( String ) bandIt . next ( ) ; int [ ] band_indexes ; if ( buffimage . getType ( ) == BufferedImage . TYPE_BYTE_BINARY || buffimage . getType ( ) == BufferedImage . TYPE_BYTE_GRAY ) { band_indexes = new int [ 1 ] ; band_indexes [ 0 ] = 0 ; } else { band_indexes = new int [ band_str . length ( ) ] ; for ( int i = 0 ; i < band_str . length ( ) ; i ++ ) { if ( band_str . charAt ( i ) == 'A' ) band_indexes [ i ] = 3 ; if ( band_str . charAt ( i ) == 'B' ) band_indexes [ i ] = 2 ; if ( band_str . charAt ( i ) == 'G' ) band_indexes [ i ] = 1 ; if ( band_str . charAt ( i ) == 'R' ) band_indexes [ i ] = 0 ; } } Raster raster = buffimage . getRaster ( ) ; java . util . HashMap sampleMap = ( java . util . HashMap ) bandMap . get ( band_str ) ; boolean addall = ( sampleMap == null ) ; if ( sampleMap == null ) { sampleMap = new java . util . HashMap ( ) ; bandMap . put ( band_str , sampleMap ) ; } int minx = raster . getMinX ( ) ; int maxx = minx + raster . getWidth ( ) ; int miny = raster . getMinY ( ) ; int maxy = miny + raster . getHeight ( ) ; int [ ] [ ] bands = new int [ band_indexes . length ] [ raster . getWidth ( ) ] ; for ( int y = miny ; y < maxy ; y ++ ) { for ( int i = 0 ; i < band_indexes . length ; i ++ ) { raster . getSamples ( minx , y , maxx , 1 , band_indexes [ i ] , bands [ i ] ) ; } for ( int x = minx ; x < maxx ; x ++ ) { int sample = 0 ; for ( int i = 0 ; i < band_indexes . length ; i ++ ) { sample |= bands [ i ] [ x ] << ( ( band_indexes . length - i - 1 ) * 8 ) ; } Integer sampleObj = new Integer ( sample ) ; boolean add = addall ; if ( ! addall ) { add = sampleMap . containsKey ( sampleObj ) ; } if ( add ) { Integer count = ( Integer ) sampleMap . get ( sampleObj ) ; if ( count == null ) { count = new Integer ( 0 ) ; } count = new Integer ( count . intValue ( ) + 1 ) ; sampleMap . put ( sampleObj , count ) ; } } } } Node node = nodes . item ( 0 ) ; while ( node != null ) { if ( node . getNodeType ( ) == Node . ELEMENT_NODE && node . getLocalName ( ) . equals ( "count" ) ) { String band = ( ( Element ) node ) . getAttribute ( "bands" ) ; String sample = ( ( Element ) node ) . getAttribute ( "sample" ) ; HashMap sampleMap = ( HashMap ) bandMap . get ( band ) ; Document doc = node . getOwnerDocument ( ) ; if ( sample . equals ( "all" ) ) { Node parent = node . getParentNode ( ) ; Node prevSibling = node . getPreviousSibling ( ) ; Iterator sampleIt = sampleMap . keySet ( ) . iterator ( ) ; Element countnode = null ; int digits ; String prefix ; switch ( buffimage . getType ( ) ) { case BufferedImage . TYPE_BYTE_BINARY : digits = 1 ; prefix = "" ; break ; case BufferedImage . TYPE_BYTE_GRAY : digits = 2 ; prefix = "0x" ; break ; default : prefix = "0x" ; digits = band . length ( ) * 2 ; } while ( sampleIt . hasNext ( ) ) { countnode = doc . createElementNS ( node . getNamespaceURI ( ) , "count" ) ; Integer sampleInt = ( Integer ) sampleIt . next ( ) ; Integer count = ( Integer ) sampleMap . get ( sampleInt ) ; if ( band . length ( ) > 0 ) { countnode . setAttribute ( "bands" , band ) ; } countnode . setAttribute ( "sample" , prefix + HexString ( sampleInt . intValue ( ) , digits ) ) ; Node textnode = doc . createTextNode ( count . toString ( ) ) ; countnode . appendChild ( textnode ) ; parent . insertBefore ( countnode , node ) ; if ( sampleIt . hasNext ( ) && prevSibling != null && prevSibling . getNodeType ( ) == Node . TEXT_NODE ) { parent . insertBefore ( prevSibling . cloneNode ( false ) , node ) ; } } parent . removeChild ( node ) ; // Fortify Mod : If the Iterator sampleIt is empty , then countnode will never be set . // This will result in a null derefernce of node at the end of the loop . Test for this // case and mitigate the damage . if ( countnode == null ) { countnode = ( Element ) node ; jlogger . log ( Level . SEVERE , "Error processing image node" ) ; } node = countnode ; } else { Integer count = ( Integer ) sampleMap . get ( Integer . decode ( sample ) ) ; if ( count == null ) count = new Integer ( 0 ) ; Node textnode = doc . createTextNode ( count . toString ( ) ) ; node . appendChild ( textnode ) ; } } node = node . getNextSibling ( ) ; }
public class Cluster { /** * free the slots . */ public void freeSlots ( Collection < WorkerSlot > slots ) { } }
if ( slots != null ) { for ( WorkerSlot slot : slots ) { this . freeSlot ( slot ) ; } }
public class EmptyCollector { /** * @ return zero list * @ see com . oath . cyclops . react . collectors . lazy . LazyResultConsumer # getResults ( ) */ @ Override public Collection < FastFuture < T > > getResults ( ) { } }
active . stream ( ) . forEach ( cf -> safeJoin . apply ( cf ) ) ; active . clear ( ) ; return new ArrayList < > ( ) ;
public class RectangleConstraintSolver { /** * Extracts a specific { @ link BoundingBox } from the domain of a { @ link RectangularRegion } . * @ param rect The { @ link RectangularRegion } from to extract the { @ link BoundingBox } . * @ return A specific { @ link BoundingBox } from the domain of a { @ link RectangularRegion } . */ public BoundingBox extractBoundingBoxesFromSTPs ( RectangularRegion rect ) { } }
Bounds xLB , xUB , yLB , yUB ; xLB = new Bounds ( ( ( AllenInterval ) rect . getInternalVariables ( ) [ 0 ] ) . getEST ( ) , ( ( AllenInterval ) rect . getInternalVariables ( ) [ 0 ] ) . getLST ( ) ) ; xUB = new Bounds ( ( ( AllenInterval ) rect . getInternalVariables ( ) [ 0 ] ) . getEET ( ) , ( ( AllenInterval ) rect . getInternalVariables ( ) [ 0 ] ) . getLET ( ) ) ; yLB = new Bounds ( ( ( AllenInterval ) rect . getInternalVariables ( ) [ 1 ] ) . getEST ( ) , ( ( AllenInterval ) rect . getInternalVariables ( ) [ 1 ] ) . getLST ( ) ) ; yUB = new Bounds ( ( ( AllenInterval ) rect . getInternalVariables ( ) [ 1 ] ) . getEET ( ) , ( ( AllenInterval ) rect . getInternalVariables ( ) [ 1 ] ) . getLET ( ) ) ; return new BoundingBox ( xLB , xUB , yLB , yUB ) ;
public class TimerView { /** * Check that the frame knows about the timer . */ public void checkFrameParticipation ( ) { } }
// Determine whether or not the timer should participate in // media ticks boolean participate = _host . isShowing ( ) ; // Start participating if necessary if ( participate && ! _participating ) { _fmgr . registerFrameParticipant ( this ) ; _participating = true ; } // Stop participating if necessary else if ( ! participate && _participating ) { _fmgr . removeFrameParticipant ( this ) ; _participating = false ; }
public class AtmosRequestMappingHandlerMapping { /** * Process all user scripting javascript files in configured location , then * url - handler mapping infos gotta be stored in memory . */ private void processAtmostRequestMappingInfo ( ) { } }
Context cx = Context . enter ( ) ; global = new Global ( cx ) ; // javascript library loading /* * List < String > modulePath = new ArrayList < String > ( ) ; * modulePath . add ( getServletContextPath ( ) + atmosLibraryLocation ) ; * global . installRequire ( cx , modulePath , false ) ; */ try { // optimization level - 1 means interpret mode cx . setOptimizationLevel ( - 1 ) ; if ( debugger == null ) { debugger = RhinoDebuggerFactory . create ( ) ; } // Debugger debugger = RhinoDebuggerFactory . create ( ) ; cx . setDebugger ( debugger , new Dim . ContextData ( ) ) ; atmosLibraryStream = getClass ( ) . getClassLoader ( ) . getResourceAsStream ( ATMOS_JS_FILE_NAME ) ; InputStreamReader isr = new InputStreamReader ( atmosLibraryStream ) ; // define Spring application context to context variable global . defineProperty ( "context" , getApplicationContext ( ) , 0 ) ; cx . evaluateReader ( global , isr , ATMOS_JS_FILE_NAME , 1 , null ) ; /* * execute all user scripting javascript files in configured * location , then url - handler informations gotta be stored in * memory . */ for ( String userSourceLocation : userSourceLocations ) { File dir = new File ( getServletContextPath ( ) + userSourceLocation ) ; if ( dir . isDirectory ( ) ) { String [ ] fileArray = dir . list ( ) ; for ( String fileName : fileArray ) { File jsFile = new File ( dir . getAbsolutePath ( ) + "/" + fileName ) ; if ( jsFile . isFile ( ) ) { FileReader reader = new FileReader ( jsFile ) ; global . defineProperty ( "mappingInfo" , handlerMappingInfoStorage , 0 ) ; cx . evaluateReader ( global , reader , fileName , 1 , null ) ; } } } else { FileReader reader = new FileReader ( dir ) ; global . defineProperty ( "mappingInfo" , handlerMappingInfoStorage , 0 ) ; cx . evaluateReader ( global , reader , dir . getName ( ) , 1 , null ) ; } } atmosLibraryStream . close ( ) ; } catch ( Exception ex ) { ex . printStackTrace ( ) ; }
public class MessageProtocol { /** * Return a copy of this message protocol with the charset set to the given charset . * @ param charset The charset to set . * @ return A copy of this message protocol . */ public MessageProtocol withCharset ( String charset ) { } }
return new MessageProtocol ( contentType , Optional . ofNullable ( charset ) , version ) ;
public class AnnisXmlContextHelper { /** * Adds the ANNIS specific { @ link ResourcePropertySource } sources to the * { @ link GenericApplicationContext } . * The annis - service . properties is loaded from the following locations ( in order ) : * $ ANNIS _ HOME / conf / * $ ANNIS _ CFG or / etc / annis / * ~ / . annis / * @ param ctx */ public static void prepareContext ( GenericApplicationContext ctx ) { } }
final String configFileName = "annis-service.properties" ; MutablePropertySources sources = ctx . getEnvironment ( ) . getPropertySources ( ) ; try { File fBase = new File ( Utils . getAnnisFile ( "conf/" + configFileName ) . getAbsolutePath ( ) ) ; if ( fBase . canRead ( ) && fBase . isFile ( ) ) { sources . addFirst ( new ResourcePropertySource ( "file:" + fBase . getCanonicalPath ( ) ) ) ; } String globalConfig = System . getenv ( "ANNIS_CFG" ) ; if ( globalConfig == null ) { globalConfig = "/etc/annis" ; } File fGlobal = new File ( globalConfig + "/" + configFileName ) ; if ( fGlobal . canRead ( ) && fGlobal . isFile ( ) ) { sources . addFirst ( new ResourcePropertySource ( "file:" + fBase . getCanonicalPath ( ) ) ) ; } String userConfig = System . getProperty ( "user.home" ) + "/.annis" ; File fUser = new File ( userConfig + "/" + configFileName ) ; if ( fUser . canRead ( ) && fUser . isFile ( ) ) { sources . addFirst ( new ResourcePropertySource ( "file:" + fUser . getCanonicalPath ( ) ) ) ; } } catch ( IOException ex ) { log . error ( "Could not load configuration" , ex ) ; }
public class InputElement { /** * getter for quuid - gets * @ generated */ public String getQuuid ( ) { } }
if ( InputElement_Type . featOkTst && ( ( InputElement_Type ) jcasType ) . casFeat_quuid == null ) jcasType . jcas . throwFeatMissing ( "quuid" , "edu.cmu.lti.oaqa.framework.types.InputElement" ) ; return jcasType . ll_cas . ll_getStringValue ( addr , ( ( InputElement_Type ) jcasType ) . casFeatCode_quuid ) ;
public class CharacterParser { public Match parseUntil ( String src , String delimiter ) { } }
return this . parseUntil ( src , delimiter , new Options ( ) ) ;
public class SimpleDBUtils { /** * Encodes real integer value into a string by offsetting and zero - padding number up to the * specified number of digits . Use this encoding method if the data range set includes both * positive and negative values . * @ param number * integer to be encoded * @ param maxNumDigits * maximum number of digits in the largest absolute value in the data set * @ param offsetValue * offset value , has to be greater than absolute value of any negative number in the * data set . * @ return string representation of the integer */ public static String encodeRealNumberRange ( int number , int maxNumDigits , int offsetValue ) { } }
long offsetNumber = number + offsetValue ; String longString = Long . toString ( offsetNumber ) ; int numZeroes = maxNumDigits - longString . length ( ) ; StringBuffer strBuffer = new StringBuffer ( numZeroes + longString . length ( ) ) ; for ( int i = 0 ; i < numZeroes ; i ++ ) { strBuffer . insert ( i , '0' ) ; } strBuffer . append ( longString ) ; return strBuffer . toString ( ) ;
public class ConfigUtils { /** * Loads the specified file as { @ link YamlConfiguration } . * @ param file the YAML configuration file to load * @ return the YAML configuration * @ throws DeployerConfigurationException if an error occurred */ public static YamlConfiguration loadYamlConfiguration ( File file ) throws DeployerConfigurationException { } }
try { try ( Reader reader = new BufferedReader ( new FileReader ( file ) ) ) { return doLoadYamlConfiguration ( reader ) ; } } catch ( Exception e ) { throw new DeployerConfigurationException ( "Failed to load YAML configuration at " + file , e ) ; }
public class JavaSyntax { /** * Returns the Java type of an optional field for the given { @ link ClassTemplateSpec } as a * Java source code string . * If the field is optional it is always represented as a * { @ link AndroidProperties . Optionality # REQUIRED _ FIELDS _ MAY _ BE _ ABSENT } type else it is * represented using the { @ link AndroidProperties . Optionality } for this instance . * @ param spec to get a Java type name for . * @ param optional indicates if the type is optional or not . * @ return a Java source code string identifying the given type . */ public String toOptionalType ( ClassTemplateSpec spec , boolean optional ) { } }
return toType ( spec , optional ? Optionality . REQUIRED_FIELDS_MAY_BE_ABSENT : androidProperties . optionality ) ;
public class JsonRpcRestClient { /** * { @ inheritDoc } */ @ Override public Object invoke ( String methodName , Object argument , Type returnType ) throws Throwable { } }
return invoke ( methodName , argument , returnType , new HashMap < String , String > ( ) ) ;
public class ResourceManager { /** * Checks all observed resources for changes to their { @ link File # lastModified } properties , * notifying their listeners if the files have been modified since the last call to this * method . */ public void checkForModifications ( ) { } }
for ( Iterator < Map . Entry < String , ObservedResource > > it = _observed . entrySet ( ) . iterator ( ) ; it . hasNext ( ) ; ) { Map . Entry < String , ObservedResource > entry = it . next ( ) ; ObservedResource resource = entry . getValue ( ) ; if ( resource . checkForModification ( entry . getKey ( ) ) ) { it . remove ( ) ; } }
public class DeviceUseStatement { /** * syntactic sugar */ public StringType addNotesElement ( ) { } }
StringType t = new StringType ( ) ; if ( this . notes == null ) this . notes = new ArrayList < StringType > ( ) ; this . notes . add ( t ) ; return t ;
public class TtlTimerTask { /** * Unwrap { @ link TtlTimerTask } to the original / underneath one . * Invoke { @ link # unwrap ( TimerTask ) } for each element in input collection . * This method is { @ code null } - safe , when input { @ code TimerTask } parameter is { @ code null } , return a empty list . * @ see # unwrap ( TimerTask ) * @ since 2.10.2 */ @ Nonnull public static List < TimerTask > unwraps ( @ Nullable Collection < ? extends TimerTask > tasks ) { } }
if ( null == tasks ) return Collections . emptyList ( ) ; List < TimerTask > copy = new ArrayList < TimerTask > ( ) ; for ( TimerTask task : tasks ) { if ( ! ( task instanceof TtlTimerTask ) ) copy . add ( task ) ; else copy . add ( ( ( TtlTimerTask ) task ) . getTimerTask ( ) ) ; } return copy ;
public class NatureLibrary { /** * 获得两个词性之间的频率 * @ param from * @ param to * @ return */ public static int getTwoNatureFreq ( Nature from , Nature to ) { } }
if ( from . index < 0 || to . index < 0 ) { return 0 ; } return NATURETABLE [ from . index ] [ to . index ] ;
public class ReplicationClientFactory { /** * Creates a view of this instance using the given API Key and sharing the same underlying resources . * Note that this method may return a new instance so the caller must use the returned value . */ public ReplicationClientFactory usingApiKey ( String apiKey ) { } }
if ( Objects . equal ( _apiKey , apiKey ) ) { return this ; } return new ReplicationClientFactory ( _jerseyClient , apiKey ) ;
public class XMLUtil { /** * Replies the node that corresponds to the specified path . * < p > The path is an ordered list of tag ' s names and ended by the name of * the desired node . * @ param document is the XML document to explore . * @ param constraint is the constraint that the replied element must respect . * @ param path is the list of names . * @ return the node or < code > null < / code > if it was not found in the document . */ @ Pure public static Element getElementMatching ( Node document , XMLConstraint constraint , String ... path ) { } }
assert document != null : AssertMessages . notNullParameter ( 0 ) ; assert constraint != null : AssertMessages . notNullParameter ( 1 ) ; return getElementMatching ( document , constraint , true , path ) ;
public class MockDAO { @ Override public < P extends ParaObject > String create ( P so ) { } }
return create ( Config . getRootAppIdentifier ( ) , so ) ;
public class Resource { /** * Set an enterprise field value . * @ param index field index * @ param value field value */ public void setEnterpriseDate ( int index , Date value ) { } }
set ( selectField ( ResourceFieldLists . ENTERPRISE_DATE , index ) , value ) ;
public class XMLJaspiConfiguration { /** * Return a Java representation of the JASPI persistent providers that are defined in the given configuration file or null * if the object returned by JAXB is not an JaspiConfig instance or an exception is thrown by method AccessController . doPrivileged . * @ param configFile * @ return * @ throws PrivilegedActionException */ synchronized private JaspiConfig readConfigFile ( final File configFile ) throws PrivilegedActionException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "readConfigFile" , new Object [ ] { configFile } ) ; if ( configFile == null ) { // TODO handle persistence // String msg = MessageFormatHelper . getFormattedMessage ( msgBundle , AdminConstants . MSG _ JASPI _ PERSISTENT _ FILE , new Object [ ] { PersistenceManager . JASPI _ CONFIG } ) ; // throw new RuntimeException ( msg ) ; } PrivilegedExceptionAction < JaspiConfig > unmarshalFile = new PrivilegedExceptionAction < JaspiConfig > ( ) { @ Override public JaspiConfig run ( ) throws Exception { JaspiConfig cfg = null ; JAXBContext jc = JAXBContext . newInstance ( JaspiConfig . class ) ; Object obj = jc . createUnmarshaller ( ) . unmarshal ( configFile ) ; if ( obj instanceof JaspiConfig ) { cfg = ( JaspiConfig ) obj ; } return cfg ; } } ; JaspiConfig jaspi = AccessController . doPrivileged ( unmarshalFile ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "readConfigFile" , jaspi ) ; return jaspi ;
public class DefaultGrailsApplication { /** * Override property access and hit on xxxxClasses to return class arrays of artefacts . * @ param propertyName The name of the property , if it ends in * Classes then match and invoke internal ArtefactHandler * @ return All the artifacts or delegate to super . getProperty */ @ Override public Object getProperty ( String propertyName ) { } }
// look for getXXXXClasses final Matcher match = GETCLASSESPROP_PATTERN . matcher ( propertyName ) ; // find match match . find ( ) ; if ( match . matches ( ) ) { String artefactName = GrailsNameUtils . getClassNameRepresentation ( match . group ( 1 ) ) ; if ( artefactHandlersByName . containsKey ( artefactName ) ) { return getArtefacts ( artefactName ) ; } } return super . getProperty ( propertyName ) ;
public class BlackoutSlateMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BlackoutSlate blackoutSlate , ProtocolMarshaller protocolMarshaller ) { } }
if ( blackoutSlate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( blackoutSlate . getBlackoutSlateImage ( ) , BLACKOUTSLATEIMAGE_BINDING ) ; protocolMarshaller . marshall ( blackoutSlate . getNetworkEndBlackout ( ) , NETWORKENDBLACKOUT_BINDING ) ; protocolMarshaller . marshall ( blackoutSlate . getNetworkEndBlackoutImage ( ) , NETWORKENDBLACKOUTIMAGE_BINDING ) ; protocolMarshaller . marshall ( blackoutSlate . getNetworkId ( ) , NETWORKID_BINDING ) ; protocolMarshaller . marshall ( blackoutSlate . getState ( ) , STATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ExtendedConfigurationImpl { /** * Updates ConfigurationAdmin ' s cache with current config properties . * If replaceProp is set to true , current config properties is replace with * the given properties before caching * and the internal pid - to - config table is updated to reflect the new config * properties . * @ param properties * @ param replaceProp * @ param isMetaTypeProperties * true if properties is MetaType converted properties * @ param newUniques * @ throws IOException */ @ Override public void updateCache ( Dictionary < String , Object > properties , Set < ConfigID > references , Set < String > newUniques ) throws IOException { } }
lock . lock ( ) ; try { removeReferences ( ) ; setProperties ( properties ) ; this . references = references ; this . uniqueVariables = newUniques ; caFactory . getConfigurationStore ( ) . saveConfiguration ( pid , this ) ; changeCount . incrementAndGet ( ) ; addReferences ( ) ; sendEvents = true ; } finally { lock . unlock ( ) ; }
public class HebrewTime { /** * / * [ deutsch ] * < p > Ermittelt die hebr & auml ; ische Uhrzeit zum angegebenen Zeitpunkt und der & ouml ; rtlichen * Sonnenzeit . < / p > * < p > Die L & auml ; nge einer Stunde h & auml ; ngt von der geographischen Breite und der jahreszeitlichen * Ver & auml ; nderung der Tagesl & auml ; nge ab . Au & szlig ; erdem kann die hebr & auml ; ische Uhrzeit in * den Polargebieten nicht bestimmt werden , falls die Sonne nicht auf - oder untergeht . < / p > * @ param geoLocation the geographical position as basis of the solar time * @ return function which maps a moment to hebrew time * @ see # now ( SolarTime ) * @ see # on ( HebrewCalendar , SolarTime ) */ public static ChronoFunction < Moment , Optional < HebrewTime > > at ( SolarTime geoLocation ) { } }
return ( moment ) -> { ZonalOffset offset = ZonalOffset . atLongitude ( new BigDecimal ( geoLocation . getLongitude ( ) ) ) ; PlainTimestamp tsp = moment . toZonalTimestamp ( offset ) ; // local mean time Optional < Moment > sunset = tsp . toDate ( ) . get ( geoLocation . sunset ( ) ) ; if ( sunset . isPresent ( ) ) { Optional < Moment > sunrise ; ClockCycle cycle = null ; Moment t1 = null ; Moment t2 = null ; if ( moment . isBefore ( sunset . get ( ) ) ) { sunrise = tsp . toDate ( ) . get ( geoLocation . sunrise ( ) ) ; if ( sunrise . isPresent ( ) ) { if ( moment . isBefore ( sunrise . get ( ) ) ) { sunset = tsp . toDate ( ) . minus ( 1 , CalendarUnit . DAYS ) . get ( geoLocation . sunset ( ) ) ; if ( sunset . isPresent ( ) ) { cycle = ClockCycle . NIGHT ; t1 = sunset . get ( ) ; t2 = sunrise . get ( ) ; } } else { cycle = ClockCycle . DAY ; t1 = sunrise . get ( ) ; t2 = sunset . get ( ) ; } } } else { sunrise = tsp . toDate ( ) . plus ( 1 , CalendarUnit . DAYS ) . get ( geoLocation . sunrise ( ) ) ; if ( sunrise . isPresent ( ) ) { cycle = ClockCycle . NIGHT ; t1 = sunset . get ( ) ; t2 = sunrise . get ( ) ; } } if ( cycle != null && t1 != null && t2 != null ) { long halfDay = t1 . until ( t2 , TimeUnit . SECONDS ) * 1_000_000_000L + t2 . getNanosecond ( ) - t1 . getNanosecond ( ) ; long delta = t1 . until ( moment , TimeUnit . SECONDS ) * 1_000_000_000L + moment . getNanosecond ( ) - t1 . getNanosecond ( ) ; double halakim = ( 12.0 * PARTS_IN_HOUR * delta ) / halfDay ; int hourOfCycle = ( int ) Math . floor ( halakim / PARTS_IN_HOUR ) ; int partOfHour = ( int ) Math . floor ( halakim - hourOfCycle * PARTS_IN_HOUR ) ; return Optional . of ( new HebrewTime ( cycle , ( ( hourOfCycle == 0 ) ? 12 : hourOfCycle ) , partOfHour ) ) ; } } return Optional . empty ( ) ;
public class AuthenticationWsDelegate { /** * Terminates a session . * @ param sessionId a session ID * @ throws DebugWsException */ public void logout ( String sessionId ) throws DebugWsException { } }
this . logger . finer ( "Logging out... Session ID = " + sessionId ) ; WebResource path = this . resource . path ( UrlConstants . AUTHENTICATION ) . path ( "s" ) ; ClientResponse response = this . wsClient . createBuilder ( path ) . get ( ClientResponse . class ) ; this . logger . finer ( String . valueOf ( response . getStatusInfo ( ) ) ) ; this . wsClient . setSessionId ( null ) ;
public class CxxPreprocessor { /** * Create temporary unitMacros map ; This map will be used as an active macros ' * storage while parsing of forced includes . After parsing was over extract * resulting macros and destroy the unitMacros . fixedMacros will be set as * active macros again . */ private Map < String , Macro > parsePredefinedUnitMacros ( Map < String , Macro > configuredMacros ) { } }
if ( ! ctorInProgress || ( unitMacros != null ) ) { throw new IllegalStateException ( "Preconditions for initial fill-out of predefinedUnitMacros were violated" ) ; } if ( conf . getCompilationUnitSourceFiles ( ) . isEmpty ( ) && ( conf . getGlobalCompilationUnitSettings ( ) == null ) ) { // configuration doesn ' t contain any settings for compilation units . // CxxPreprocessor will use fixedMacros only return Collections . emptyMap ( ) ; } unitMacros = new MapChain < > ( ) ; if ( getMacros ( ) != unitMacros ) { throw new IllegalStateException ( "expected unitMacros as active macros map" ) ; } try { getMacros ( ) . setHighPrio ( true ) ; getMacros ( ) . putAll ( Macro . UNIT_MACROS ) ; getMacros ( ) . putAll ( configuredMacros ) ; parseForcedIncludes ( ) ; final HashMap < String , Macro > result = new HashMap < > ( unitMacros . getHighPrioMap ( ) ) ; return result ; } finally { getMacros ( ) . setHighPrio ( false ) ; // just for the symmetry unitMacros = null ; // remove unitMacros , switch getMacros ( ) to fixedMacros }
public class TypesafeConfigUtils { /** * Get a configuration as duration ( parses special strings like " 10s " ) . Return { @ code null } * if missing , wrong type or bad value . * @ param config * @ param path * @ return */ public static Optional < Duration > getDurationOptional ( Config config , String path ) { } }
return Optional . ofNullable ( getDuration ( config , path ) ) ;
public class RpmParser { /** * find yumdb folder from collection */ public File checkFolders ( Collection < String > yumDbFolders , String yumDbFolderPath ) { } }
if ( ! yumDbFolders . isEmpty ( ) ) { for ( String folderPath : yumDbFolders ) { File file = new File ( folderPath ) ; if ( file . listFiles ( ) . length > 0 && folderPath . contains ( yumDbFolderPath ) ) { return file ; } } } return null ;
public class DocumentImpl { /** * Overload of the { @ link # getElement ( Node ) } method using first node from given W3C DOM nodes list . Returns null if * < code > nodeList < / code > parameter is empty . * @ param nodeList native DOM nodes list , possible empty . * @ return element instance or null . * @ throws IllegalArgumentException if nodes list parameter is null . */ Element getElement ( NodeList nodeList ) { } }
Params . notNull ( nodeList , "Nodes list" ) ; if ( nodeList . getLength ( ) == 0 ) { return null ; } return getElement ( nodeList . item ( 0 ) ) ;
public class TransformerIdentityImpl { /** * Receive notification of the start of a Namespace mapping . * < p > By default , do nothing . Application writers may override this * method in a subclass to take specific actions at the start of * each Namespace prefix scope ( such as storing the prefix mapping ) . < / p > * @ param prefix The Namespace prefix being declared . * @ param uri The Namespace URI mapped to the prefix . * @ throws org . xml . sax . SAXException Any SAX exception , possibly * wrapping another exception . * @ see org . xml . sax . ContentHandler # startPrefixMapping * @ throws SAXException */ public void startPrefixMapping ( String prefix , String uri ) throws SAXException { } }
flushStartDoc ( ) ; m_resultContentHandler . startPrefixMapping ( prefix , uri ) ;
public class BaseChronology { /** * Gets the values of a period from an interval . * @ param period the period instant to use * @ param startInstant the start instant of an interval to query * @ param endInstant the start instant of an interval to query * @ return the values of the period extracted from the interval */ public int [ ] get ( ReadablePeriod period , long startInstant , long endInstant ) { } }
int size = period . size ( ) ; int [ ] values = new int [ size ] ; if ( startInstant != endInstant ) { for ( int i = 0 ; i < size ; i ++ ) { DurationField field = period . getFieldType ( i ) . getField ( this ) ; int value = field . getDifference ( endInstant , startInstant ) ; if ( value != 0 ) { startInstant = field . add ( startInstant , value ) ; } values [ i ] = value ; } } return values ;
public class InitIfSubFieldHandler { /** * Move the physical binary data to this field . * If there is not SubFileFilter , then don ' t allow the field to be inited . * @ param objData the raw data to set the basefield to . * @ param bDisplayOption If true , display the change . * @ param iMoveMode The type of move being done ( init / read / screen ) . * @ return The error code ( or NORMAL _ RETURN if okay ) . */ public int doSetData ( Object fieldPtr , boolean bDisplayOption , int iMoveMode ) { } }
int iErrorCode = DBConstants . NORMAL_RETURN ; boolean bSubExists = ( this . getOwner ( ) . getRecord ( ) . getListener ( SubFileFilter . class . getName ( ) ) != null ) ; if ( bSubExists ) iErrorCode = super . doSetData ( fieldPtr , bDisplayOption , iMoveMode ) ; return iErrorCode ;
public class DataCubeAPI { /** * 获取图文群发每日数据 * @ param day 查询日期 * @ return 图文群发每日数据 */ public GetArticleSummaryResponse getArticleSummary ( Date day ) { } }
BeanUtil . requireNonNull ( day , "day is null" ) ; GetArticleSummaryResponse response = null ; String url = BASE_API_URL + "datacube/getarticlesummary?access_token=#" ; Map < String , String > param = new HashMap < String , String > ( ) ; param . put ( "begin_date" , DATE_FORMAT . format ( day ) ) ; param . put ( "end_date" , DATE_FORMAT . format ( day ) ) ; String json = JSONUtil . toJson ( param ) ; BaseResponse r = executePost ( url , json ) ; String resultJson = isSuccess ( r . getErrcode ( ) ) ? r . getErrmsg ( ) : r . toJsonString ( ) ; response = JSONUtil . toBean ( resultJson , GetArticleSummaryResponse . class ) ; return response ;
public class TypeResolverPostProcessor { /** * Create a lookup list for reference resolution . * < p > Type name resolution in the protocol buffer language works like C + + : first * the innermost scope is searched , then the next - innermost , and so on , with * each package considered to be " inner " to its parent package . */ public static Deque < String > createScopeLookupList ( UserTypeContainer container ) { } }
String namespace = container . getNamespace ( ) ; Deque < String > scopeLookupList = new ArrayDeque < > ( ) ; int end = 0 ; while ( end >= 0 ) { end = namespace . indexOf ( '.' , end ) ; if ( end >= 0 ) { end ++ ; String scope = namespace . substring ( 0 , end ) ; scopeLookupList . addFirst ( scope ) ; } } return scopeLookupList ;
public class CaptureActivityIntents { /** * Get the height of the scanning rectangle in pixels from { @ code Intent } . * @ param intent Target intent . It can be { @ code null } . * @ return Height of scanning rectangle in pixels if specified , or zero otherwise . */ public static int getHeightOfScanningRectangleInPxOrZero ( Intent intent ) { } }
if ( intent == null ) return 0 ; return intent . getIntExtra ( Intents . Scan . HEIGHT , 0 ) ;
public class DefaultDOManager { /** * Manages the INGEST process which includes validation of the ingest XML * file , deserialization of the XML into a Digital Object instance , setting * of properties on the object by the system ( dates and states ) , PID * validation or generation , object registry functions , getting a writer for * the digital object , and ultimately writing the object to persistent * storage via the writer . * @ param context * @ param in * the input stream that is the XML ingest file for a digital * object * @ param format * the format of the XML ingest file ( e . g . , FOXML , Fedora METS ) * @ param encoding * the character encoding of the XML ingest file ( e . g . , UTF - 8) * @ param pid * " new " if the system should generate a new PID for the object , * otherwise the value of the additional pid parameter for * ingests ( may be null or any valid pid ) */ @ Override public DOWriter getIngestWriter ( boolean cachedObjectRequired , Context context , InputStream in , String format , String encoding , String pid ) throws ServerException { } }
logger . debug ( "Entered getIngestWriter" ) ; DOWriter w = null ; BasicDigitalObject obj = null ; File tempFile = null ; if ( cachedObjectRequired ) { throw new InvalidContextException ( "A DOWriter is unavailable in a cached context." ) ; } else { try { // CURRENT TIME : // Get the current time to use for created dates on object // and object components ( if they are not already there ) . Date nowUTC = Server . getCurrentDate ( context ) ; // TEMP STORAGE : // write ingest input stream to a temporary file tempFile = File . createTempFile ( "fedora-ingest-temp" , ".xml" ) ; logger . debug ( "Creating temporary file for ingest: {}" , tempFile . toString ( ) ) ; StreamUtility . pipeStream ( in , new FileOutputStream ( tempFile ) , 4096 ) ; // VALIDATION : // perform initial validation of the ingest submission file logger . debug ( "Validation (ingest phase)" ) ; m_validator . validate ( tempFile , format , m_ingestValidationLevel , DOValidator . PHASE_INGEST ) ; // DESERIALIZE : // deserialize the ingest input stream into a digital object // instance obj = new BasicDigitalObject ( ) ; obj . setNew ( true ) ; logger . debug ( "Deserializing from format: {}" , format ) ; m_translator . deserialize ( new FileInputStream ( tempFile ) , obj , format , encoding , DOTranslationUtility . DESERIALIZE_INSTANCE ) ; // SET OBJECT PROPERTIES : logger . debug ( "Setting object/component states and create dates if unset" ) ; // set object state to " A " ( Active ) if not already set if ( obj . getState ( ) == null || obj . getState ( ) . isEmpty ( ) ) { obj . setState ( "A" ) ; } // set object create date to UTC if not already set if ( obj . getCreateDate ( ) == null ) { obj . setCreateDate ( nowUTC ) ; } // set object last modified date to UTC obj . setLastModDate ( nowUTC ) ; // SET DATASTREAM PROPERTIES . . . Iterator < String > dsIter = obj . datastreamIdIterator ( ) ; while ( dsIter . hasNext ( ) ) { for ( Datastream ds : obj . datastreams ( dsIter . next ( ) ) ) { // Set create date to UTC if not already set if ( ds . DSCreateDT == null ) { ds . DSCreateDT = nowUTC ; } // Set state to " A " ( Active ) if not already set if ( ds . DSState == null || ds . DSState . isEmpty ( ) ) { ds . DSState = "A" ; } ds . DSChecksumType = Datastream . validateChecksumType ( ds . DSChecksumType ) ; } } // SET MIMETYPE AND FORMAT _ URIS FOR LEGACY OBJECTS ' DATASTREAMS if ( FOXML1_0 . uri . equals ( format ) || FOXML1_0_LEGACY . equals ( format ) || METS_EXT1_0 . uri . equals ( format ) || METS_EXT1_0_LEGACY . equals ( format ) ) { DigitalObjectUtil . updateLegacyDatastreams ( obj ) ; } // If the PID was supplied as additional parameter ( see REST // API ) , make sure it doesn ' t conflict with the ( optional ) PID // of the digital object if ( pid != null && pid . length ( ) > 0 && ! pid . equals ( "new" ) ) { if ( obj . getPid ( ) != null && obj . getPid ( ) . length ( ) > 0 ) { if ( ! pid . equals ( obj . getPid ( ) ) ) { throw new GeneralException ( "The PID of the digital object and the PID provided as parameter are different. Digital object: " + obj . getPid ( ) + " parameter: " + pid ) ; } } else { obj . setPid ( pid ) ; } } // PID VALIDATION : // validate and normalized the provided pid , if any if ( obj . getPid ( ) != null && obj . getPid ( ) . length ( ) > 0 ) { obj . setPid ( Server . getPID ( obj . getPid ( ) ) . toString ( ) ) ; } // PID GENERATION : // have the system generate a PID if one was not provided if ( obj . getPid ( ) != null && obj . getPid ( ) . indexOf ( ":" ) != - 1 && ( m_retainPIDs == null || m_retainPIDs . contains ( obj . getPid ( ) . split ( ":" ) [ 0 ] ) ) ) { logger . debug ( "Stream contained PID with retainable namespace-id; will use PID from stream" ) ; try { m_pidGenerator . neverGeneratePID ( obj . getPid ( ) ) ; } catch ( IOException e ) { throw new GeneralException ( "Error calling pidGenerator.neverGeneratePID(): " + e . getMessage ( ) ) ; } } else { if ( pid . equals ( "new" ) ) { logger . debug ( "Client wants a new PID" ) ; // yes . . . so do that , then set it in the obj . String p = null ; try { // If the context contains a recovery PID , use that . // Otherwise , generate a new PID as usual . if ( context instanceof RecoveryContext ) { RecoveryContext rContext = ( RecoveryContext ) context ; p = rContext . getRecoveryValue ( Constants . RECOVERY . PID . attributeId ) ; } if ( p == null ) { p = m_pidGenerator . generatePID ( m_pidNamespace ) . toString ( ) ; } else { logger . debug ( "Using new PID from recovery context" ) ; m_pidGenerator . neverGeneratePID ( p ) ; } } catch ( Exception e ) { throw new GeneralException ( "Error generating PID" , e ) ; } logger . info ( "Generated new PID: {}" , p ) ; obj . setPid ( p ) ; } else { logger . debug ( "Client wants to use existing PID." ) ; } } logger . debug ( "New object PID is {}" , obj . getPid ( ) ) ; // WRITE LOCK : // ensure no one else can modify the object now getWriteLock ( obj . getPid ( ) ) ; // CHECK REGISTRY : // ensure the object doesn ' t already exist if ( objectExists ( obj . getPid ( ) ) ) { releaseWriteLock ( obj . getPid ( ) ) ; throw new ObjectExistsException ( "The PID '" + obj . getPid ( ) + "' already exists in the registry; the object can't be re-created." ) ; } // GET DIGITAL OBJECT WRITER : // get an object writer configured with the DEFAULT export // format logger . debug ( "Getting new writer with default export format: {}" , m_defaultExportFormat ) ; logger . debug ( "Instantiating a SimpleDOWriter" ) ; w = new SimpleDOWriter ( context , this , m_translator , m_defaultExportFormat , m_storageCharacterEncoding , obj ) ; // DEFAULT DATASTREAMS : populateDC ( context , obj , w , nowUTC ) ; // DATASTREAM VALIDATION ValidationUtility . validateReservedDatastreams ( w ) ; // REGISTRY : // at this point the object is valid , so make a record // of it in the digital object registry registerObject ( obj ) ; return w ; } catch ( IOException ioe ) { if ( w != null ) { releaseWriteLock ( obj . getPid ( ) ) ; } throw new GeneralException ( "Error reading/writing temporary " + "ingest file" , ioe ) ; } catch ( Exception e ) { if ( w != null ) { releaseWriteLock ( obj . getPid ( ) ) ; } if ( e instanceof ServerException ) { ServerException se = ( ServerException ) e ; throw se ; } throw new GeneralException ( "Ingest failed: " + e . getClass ( ) . getName ( ) , e ) ; } finally { if ( tempFile != null ) { logger . debug ( "Finally, removing temp file" ) ; try { tempFile . delete ( ) ; } catch ( Exception e ) { // don ' t worry if it doesn ' t exist } } } }
public class RestletUtilMemoryRealm { /** * Sets the modifiable list of root groups . This method clears the current list and adds all * entries in the parameter list . * @ param rootGroups * A list of root groups . */ public void setRootGroups ( final List < Group > rootGroups ) { } }
synchronized ( this . getRootGroups ( ) ) { if ( rootGroups != this . getRootGroups ( ) ) { this . getRootGroups ( ) . clear ( ) ; if ( rootGroups != null ) { this . getRootGroups ( ) . addAll ( rootGroups ) ; } } }
public class Signature { /** * @ description * Signs the message using the secret key and returns a signed message . */ public static KeyPair keyPair ( ) { } }
KeyPair kp = new KeyPair ( ) ; crypto_sign_keypair ( kp . getPublicKey ( ) , kp . getSecretKey ( ) , false ) ; return kp ;
public class VerificationOverTimeImpl { /** * Verify the given ongoing verification data , and confirm that it satisfies the delegate verification mode * before the full duration has passed . * In practice , this polls the delegate verification mode until it is satisfied . If it is not satisfied once * the full duration has passed , the last error returned by the delegate verification mode will be thrown * here in turn . This may be thrown early if the delegate is unsatisfied and the verification mode is known * to never recover from this situation ( e . g . { @ link AtMost } ) . * If it is satisfied before the full duration has passed , behaviour is dependent on the returnOnSuccess parameter * given in the constructor . If true , this verification mode is immediately satisfied once the delegate is . If * false , this verification mode is not satisfied until the delegate is satisfied and the full time has passed . * @ throws MockitoAssertionError if the delegate verification mode does not succeed before the timeout */ public void verify ( VerificationData data ) { } }
AssertionError error = null ; timer . start ( ) ; while ( timer . isCounting ( ) ) { try { delegate . verify ( data ) ; if ( returnOnSuccess ) { return ; } else { error = null ; } } catch ( MockitoAssertionError e ) { error = handleVerifyException ( e ) ; } catch ( AssertionError e ) { error = handleVerifyException ( e ) ; } } if ( error != null ) { throw error ; }
public class ProjectExplorer { /** * Launch the application . * @ param args command line arguments . */ public static void main ( String [ ] args ) { } }
EventQueue . invokeLater ( new Runnable ( ) { @ Override public void run ( ) { try { UIManager . setLookAndFeel ( UIManager . getSystemLookAndFeelClassName ( ) ) ; ProjectExplorer window = new ProjectExplorer ( ) ; window . m_frame . setVisible ( true ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } } } ) ;
public class XExtensionConverter { /** * / * ( non - Javadoc ) * @ see com . thoughtworks . xstream . converters . Converter # marshal ( java . lang . Object , com . thoughtworks . xstream . io . HierarchicalStreamWriter , com . thoughtworks . xstream . converters . MarshallingContext ) */ public void marshal ( Object obj , HierarchicalStreamWriter writer , MarshallingContext context ) { } }
XExtension extension = ( XExtension ) obj ; writer . addAttribute ( "uri" , extension . getUri ( ) . toString ( ) ) ;
public class TrieNode { /** * The amount of words reachable from this node is cached . < br > * This should be called after an outside changed had occurred and the amount was possibly changed . */ private void calcNumWords ( ) { } }
if ( numWordsCalculated ) { return ; } int words = 0 ; // The number of words reachable from this node is considered to be the number of words // reachable from it ' s children , and + 1 if the node itself is a word . if ( value . isPresent ( ) ) { words ++ ; } for ( TrieNode < T > child : children . values ( ) ) { child . calcNumWords ( ) ; words += child . numWords ; } numWords = words ; numWordsCalculated = true ;
public class LBoolToLongFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LBoolToLongFunction boolToLongFunctionFrom ( Consumer < LBoolToLongFunctionBuilder > buildingFunction ) { } }
LBoolToLongFunctionBuilder builder = new LBoolToLongFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class UnitRequest { /** * Get the set . Note you must be sure with the generic type , or a class cast exception will be thrown . * @ param key the argument name * @ param < T > the generic type * @ return the set representing the argument . */ @ SuppressWarnings ( "unchecked" ) public < T > Set < T > getSet ( String key ) { } }
return Reflection . toType ( get ( key ) , Set . class ) ;
public class PackageRevision { /** * Adds additional data related to the package revision * @ param key for additional data * @ param value for additional data * @ throws InvalidPackageRevisionDataException if the key is null or empty */ public void addData ( String key , String value ) throws InvalidPackageRevisionDataException { } }
validateDataKey ( key ) ; data . put ( key , value ) ;
public class DefaultGroovyMethods { /** * A helper method to allow lists to work with subscript operators . * < pre class = " groovyTestCase " > * def list = [ " a " , true ] * { @ code list [ 1 . . < 1 ] = 5} * assert list = = [ " a " , 5 , true ] * < / pre > * @ param self a List * @ param range the ( in this case empty ) subset of the list to set * @ param value the values to put at the given sublist or a Collection of values * @ since 1.0 */ public static void putAt ( List self , EmptyRange range , Object value ) { } }
RangeInfo info = subListBorders ( self . size ( ) , range ) ; List sublist = self . subList ( info . from , info . to ) ; sublist . clear ( ) ; if ( value instanceof Collection ) { Collection col = ( Collection ) value ; if ( col . isEmpty ( ) ) return ; sublist . addAll ( col ) ; } else { sublist . add ( value ) ; }
public class TEEJBInvocationInfoBase { /** * Helper method to convert a String object to its textual representation . * If class is null , the < i > NullDefaultStr < / i > is returned . */ protected static String turnNullString2EmptyString ( String str , String NullDefaultStr ) { } }
return ( str == null ) ? NullDefaultStr : str ;
public class NumberSimplex { private void nullPrintln ( PrintStream out , String line ) { } }
if ( out != null ) { out . println ( line ) ; }
public class S3StorageProvider { /** * { @ inheritDoc } */ protected void doSetSpaceProperties ( String spaceId , Map < String , String > spaceProperties ) { } }
log . debug ( "setSpaceProperties(" + spaceId + ")" ) ; // Will throw if bucket does not exist String bucketName = getBucketName ( spaceId ) ; Map < String , String > originalProperties ; try { originalProperties = getAllSpaceProperties ( spaceId ) ; } catch ( NotFoundException e ) { // Likely adding a new space , so no existing properties yet . originalProperties = new HashMap < > ( ) ; } // Set creation date String creationDate = originalProperties . get ( PROPERTIES_SPACE_CREATED ) ; if ( creationDate == null ) { creationDate = spaceProperties . get ( PROPERTIES_SPACE_CREATED ) ; if ( creationDate == null ) { creationDate = getBucketCreationDate ( bucketName ) ; } } spaceProperties . put ( PROPERTIES_SPACE_CREATED , creationDate ) ; // Handle @ symbol ( change to + ) , to allow for email usernames in ACLs spaceProperties = replaceInMapValues ( spaceProperties , "@" , "+" ) ; // Store properties BucketTaggingConfiguration tagConfig = new BucketTaggingConfiguration ( ) . withTagSets ( new TagSet ( spaceProperties ) ) ; s3Client . setBucketTaggingConfiguration ( bucketName , tagConfig ) ;
public class RuleOptionsImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case SimpleAntlrPackage . RULE_OPTIONS__OPTIONS : setOptions ( ( Options ) newValue ) ; return ; case SimpleAntlrPackage . RULE_OPTIONS__ELEMENT : setElement ( ( RuleElement ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class FlatteningDeserializer { /** * Gets a module wrapping this serializer as an adapter for the Jackson * ObjectMapper . * @ param mapper the object mapper for default deserializations * @ return a simple module to be plugged onto Jackson ObjectMapper . */ public static SimpleModule getModule ( final ObjectMapper mapper ) { } }
SimpleModule module = new SimpleModule ( ) ; module . setDeserializerModifier ( new BeanDeserializerModifier ( ) { @ Override public JsonDeserializer < ? > modifyDeserializer ( DeserializationConfig config , BeanDescription beanDesc , JsonDeserializer < ? > deserializer ) { if ( BeanDeserializer . class . isAssignableFrom ( deserializer . getClass ( ) ) ) { // Apply flattening deserializer on all POJO types . return new FlatteningDeserializer ( beanDesc . getBeanClass ( ) , deserializer , mapper ) ; } else { return deserializer ; } } } ) ; return module ;
public class CmsMessageBundleEditorModel { /** * Initializes the locales that can be selected via the language switcher in the bundle editor . * @ return the locales for which keys can be edited . */ private Collection < Locale > initLocales ( ) { } }
Collection < Locale > locales = null ; switch ( m_bundleType ) { case DESCRIPTOR : locales = new ArrayList < Locale > ( 1 ) ; locales . add ( Descriptor . LOCALE ) ; break ; case XML : case PROPERTY : locales = OpenCms . getLocaleManager ( ) . getAvailableLocales ( m_cms , m_resource ) ; break ; default : throw new IllegalArgumentException ( ) ; } return locales ;
public class ScanCollectionDefault { /** * If the msLevel doesn ' t exist in this ScanCollectionDefault , returns null * @ return null , if msLevel doesn ' t exist . Actual number of scans otherwise . */ @ Override public Integer getScanCountAtMsLevel ( int msLevel ) { } }
TreeMap < Integer , IScan > msLevelScanMap = getMapMsLevel2index ( ) . get ( msLevel ) . getNum2scan ( ) ; if ( msLevelScanMap != null ) { return msLevelScanMap . size ( ) ; } return null ;
public class MessageHandler { /** * write to log - file */ static void printToLogFile ( String str ) { } }
try ( BufferedWriter bw = new BufferedWriter ( new FileWriter ( getLogPath ( ) , true ) ) ) { bw . write ( str + logLineBreak ) ; } catch ( Throwable t ) { showError ( t ) ; }
public class GoogleCredentialsAccessTokenSupplier { /** * Attempt to load Google Credentials with specified scopes . * < ol > * < li > First check to see if the environment variable HELIOS _ GOOGLE _ CREDENTIALS is set * and points to a readable file < / li > * < li > Otherwise check if Google Application Default Credentials ( ADC ) can be loaded < / li > * < / ol > * < p > Note that we use a special environment variable of our own in addition to any environment * variable that the ADC loading uses ( GOOGLE _ APPLICATION _ CREDENTIALS ) in case there is a need * for the user to use the latter env var for some other purpose . * @ return Return a { @ link GoogleCredentials } */ private static GoogleCredentials getCredentialsWithScopes ( final List < String > scopes ) throws IOException { } }
GoogleCredentials credentials = null ; // first check whether the environment variable is set final String googleCredentialsPath = System . getenv ( "HELIOS_GOOGLE_CREDENTIALS" ) ; if ( googleCredentialsPath != null ) { final File file = new File ( googleCredentialsPath ) ; if ( file . exists ( ) ) { try ( final FileInputStream s = new FileInputStream ( file ) ) { credentials = GoogleCredentials . fromStream ( s ) ; LOG . info ( "Using Google Credentials from file: " + file . getAbsolutePath ( ) ) ; } } } // fallback to application default credentials if ( credentials == null ) { credentials = GoogleCredentials . getApplicationDefault ( ) ; LOG . info ( "Using Google Application Default Credentials" ) ; } return scopes . isEmpty ( ) ? credentials : credentials . createScoped ( scopes ) ;
public class DocumentRootImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setTruncatedNormalDistribution ( TruncatedNormalDistributionType newTruncatedNormalDistribution ) { } }
( ( FeatureMap . Internal ) getMixed ( ) ) . set ( BpsimPackage . Literals . DOCUMENT_ROOT__TRUNCATED_NORMAL_DISTRIBUTION , newTruncatedNormalDistribution ) ;
public class LocalFileResource { /** * { @ inheritDoc } */ @ Override public Iterator < String > getChildren ( ) { } }
// Return empty list if the wrapped file is null , if it isn ' t an existing // directory , // or if we don ' t have a root . We will not resolve resources ( or traverse // parent / child ) // if we aren ' t associated with a root if ( type == Type . FILE || ! wrappedFile . isDirectory ( ) || resourceRoot == null ) return ResourceUtils . EMPTY_STRING_LIST . iterator ( ) ; return ResourceUtils . getChildren ( this , wrappedFile ) ;
public class StructuredQueryBuilder { /** * Matches the container specified by the constraint * whose geospatial region appears within one of the criteria regions . * @ param constraintName the constraint definition * @ param operator the geospatial operator to be applied with the regions * in the index and the criteria regions * @ param regions the possible regions containing the point * @ return the StructuredQueryDefinition for the geospatial constraint query */ public StructuredQueryDefinition geospatialRegionConstraint ( String constraintName , GeospatialOperator operator , Region ... regions ) { } }
checkRegions ( regions ) ; return new GeospatialRegionConstraintQuery ( constraintName , operator , regions ) ;
public class BinaryString { /** * Concatenates input strings together into a single string using the separator . * A null input is skipped . For example , concat ( " , " , " a " , null , " c " ) would yield " a , c " . */ public static BinaryString concatWs ( BinaryString separator , Iterable < BinaryString > inputs ) { } }
if ( null == separator || EMPTY_UTF8 . equals ( separator ) ) { return concat ( inputs ) ; } separator . ensureMaterialized ( ) ; int numInputBytes = 0 ; // total number of bytes from the inputs int numInputs = 0 ; // number of non - null inputs for ( BinaryString input : inputs ) { if ( input != null ) { input . ensureMaterialized ( ) ; numInputBytes += input . sizeInBytes ; numInputs ++ ; } } if ( numInputs == 0 ) { // Return an empty string if there is no input , or all the inputs are null . return EMPTY_UTF8 ; } // Allocate a new byte array , and copy the inputs one by one into it . // The size of the new array is the size of all inputs , plus the separators . final byte [ ] result = new byte [ numInputBytes + ( numInputs - 1 ) * separator . sizeInBytes ] ; int offset = 0 ; int j = 0 ; for ( BinaryString input : inputs ) { if ( input != null ) { int len = input . sizeInBytes ; SegmentsUtil . copyToBytes ( input . segments , input . offset , result , offset , len ) ; offset += len ; j ++ ; // Add separator if this is not the last input . if ( j < numInputs ) { SegmentsUtil . copyToBytes ( separator . segments , separator . offset , result , offset , separator . sizeInBytes ) ; offset += separator . sizeInBytes ; } } } return fromBytes ( result ) ;
public class CouchbaseSchemaManager { /** * ( non - Javadoc ) * @ see * com . impetus . kundera . configure . schema . api . AbstractSchemaManager # update * ( java . util . List ) */ @ Override protected void update ( List < TableInfo > tableInfos ) { } }
if ( ! clusterManager . hasBucket ( databaseName ) ) { addBucket ( databaseName ) ; createIdIndex ( databaseName ) ; }
public class Command { /** * Extract a int array from a CORBA Any object . * Remenber that the TANGO DevVarLongArray type is mapped to the java int array * type * @ param in The CORBA Any object * @ return The extracted int array * @ exception DevFailed If the Any object does not contains a data of the * waited type . * Click < a href = " . . / . . / tango _ basic / idl _ html / Tango . html # DevFailed " > here < / a > to read * < b > DevFailed < / b > exception specification */ public int [ ] extract_DevVarLongArray ( Any in ) throws DevFailed { } }
int [ ] data = null ; try { data = DevVarLongArrayHelper . extract ( in ) ; } catch ( BAD_OPERATION ex ) { throw_bad_type ( "DevVarLongArray" ) ; } return data ;
public class SampleWhackyColorFilter { /** * Creates a HUE adjustment ColorFilter + - 180 * http : / / groups . google . com / group / android - developers / browse _ thread / thread / 9e215c83c3819953 * http : / / gskinner . com / blog / archives / 2007/12 / colormatrix _ cla . html * @ param value degrees to shift the hue . * @ return */ public static ColorFilter adjustHue ( float value ) { } }
ColorMatrix cm = new ColorMatrix ( ) ; adjustHue ( cm , value ) ; return new ColorMatrixColorFilter ( cm ) ;
public class UrlUtils { /** * returns the value of an url parameter . * @ param aUrl the url * @ param aParameterName the name of the url parameter * @ return the parameter value */ public static Optional < String > getUrlParameterValue ( final String aUrl , final String aParameterName ) throws UrlParseException { } }
if ( aUrl != null ) { Map < String , String > parameters = getUrlParameters ( aUrl ) ; if ( parameters . containsKey ( aParameterName ) ) { return Optional . of ( parameters . get ( aParameterName ) ) ; } } return Optional . empty ( ) ;
public class Mode { /** * Look for element actions specifically specified * for this namespace . If the current mode does not have * actions for that namespace look at base modes . If the actions * are defined in a base mode we need to get a copy of those actions * associated with this mode , so we call changeCurrentMode on them . * @ param ns The namespace * @ return A set of element actions . */ private ActionSet getElementActionsExplicit ( String ns ) { } }
ActionSet actions = ( ActionSet ) elementMap . get ( ns ) ; if ( actions == null ) { // iterate namespace specifications . for ( Enumeration e = nssElementMap . keys ( ) ; e . hasMoreElements ( ) && actions == null ; ) { NamespaceSpecification nssI = ( NamespaceSpecification ) e . nextElement ( ) ; // If a namespace specification convers the current namespace URI then we get those actions . if ( nssI . covers ( ns ) ) { actions = ( ActionSet ) nssElementMap . get ( nssI ) ; } } // Store them in the element Map for faster access next time . if ( actions != null ) { elementMap . put ( ns , actions ) ; } } // Look into the included modes if ( actions == null && includedModes != null ) { Iterator i = includedModes . iterator ( ) ; while ( actions == null && i . hasNext ( ) ) { Mode includedMode = ( Mode ) i . next ( ) ; actions = includedMode . getElementActionsExplicit ( ns ) ; } if ( actions != null ) { actions = actions . changeCurrentMode ( this ) ; elementMap . put ( ns , actions ) ; } } // No actions specified , look into the base mode . if ( actions == null && baseMode != null ) { actions = baseMode . getElementActionsExplicit ( ns ) ; if ( actions != null ) { actions = actions . changeCurrentMode ( this ) ; elementMap . put ( ns , actions ) ; } } if ( actions != null && actions . getCancelNestedActions ( ) ) { actions = null ; } return actions ;
public class AuthorizationImpl { /** * Returns < code > IAuthorizationPrincipals < / code > that have < code > IPermissions < / code > for the * given owner , activity and target . * @ return IAuthorizationPrincipal [ ] * @ param owner * @ param activity * @ param target */ public IAuthorizationPrincipal [ ] getAuthorizedPrincipals ( String owner , String activity , String target ) throws AuthorizationException { } }
IPermission [ ] permissions = getPermissionsForOwner ( owner , activity , target ) ; return getPrincipalsFromPermissions ( permissions ) ;
public class StreamEx { /** * Drains the stream content into the supplied collection . * This is a < a href = " package - summary . html # StreamOps " > terminal < / a > * operation . * The stream content is added into the collection using either * { @ link Collection # add ( Object ) } or { @ link Collection # addAll ( Collection ) } * method . * @ param < C > type of the resulting collection * @ param collection a mutable collection to add new elements into * @ return the supplied collection , updated from this stream * @ since 0.6.3 */ public < C extends Collection < ? super T > > C into ( C collection ) { } }
if ( isParallel ( ) ) { @ SuppressWarnings ( "unchecked" ) List < T > list = Arrays . asList ( ( T [ ] ) toArray ( ) ) ; collection . addAll ( list ) ; } else { Spliterator < T > spltr = spliterator ( ) ; if ( collection instanceof ArrayList ) { long size = spltr . getExactSizeIfKnown ( ) ; if ( size >= 0 && size < Integer . MAX_VALUE - collection . size ( ) ) ( ( ArrayList < ? > ) collection ) . ensureCapacity ( ( int ) ( collection . size ( ) + size ) ) ; } spltr . forEachRemaining ( collection :: add ) ; } return collection ;
public class FunDapter { /** * Use this method to enable filtering in the adapter . * @ param filter - a filter implementation for your adapter . */ public void initFilter ( FunDapterFilter < T > filter ) { } }
if ( filter == null ) throw new IllegalArgumentException ( "Cannot pass a null filter to FunDapter" ) ; this . funDapterFilter = filter ; mFilter = new Filter ( ) { @ Override protected void publishResults ( CharSequence constraint , FilterResults results ) { @ SuppressWarnings ( "unchecked" ) List < T > list = ( List < T > ) results . values ; if ( results . count == 0 ) { resetData ( ) ; } else { mDataItems = list ; } notifyDataSetChanged ( ) ; } @ Override protected FilterResults performFiltering ( CharSequence constraint ) { FilterResults results = new FilterResults ( ) ; if ( constraint == null || constraint . length ( ) == 0 ) { // No constraint - no point in filtering . results . values = mOrigDataItems ; results . count = mOrigDataItems . size ( ) ; } else { // Perform the filtering operation List < T > filter = funDapterFilter . filter ( constraint . toString ( ) , mOrigDataItems ) ; results . count = filter . size ( ) ; results . values = filter ; } return results ; } } ;
public class GlobalRequestProcessorStats { /** * Updates stats . * @ param aRequestCount request count * @ param aMaxTime max time * @ param aBytesReceived bytes received * @ param aBytesSent bytes sent * @ param aProcessingTime processing time * @ param aErrorCount error count */ public void update ( long aRequestCount , long aMaxTime , long aBytesReceived , long aBytesSent , long aProcessingTime , long aErrorCount ) { } }
requestCount . setValueAsLong ( aRequestCount ) ; maxTime . setValueAsLong ( aMaxTime ) ; bytesReceived . setValueAsLong ( aBytesReceived ) ; bytesSent . setValueAsLong ( aBytesSent ) ; processingTime . setValueAsLong ( aProcessingTime ) ; errorCount . setValueAsLong ( aErrorCount ) ;
public class StringUtils { /** * Escapes all necessary characters in the CharSequence so that it can be used * in an XML doc . * @ param input the CharSequence to escape . * @ return the string with appropriate characters escaped . */ private static CharSequence escapeForXml ( final CharSequence input , final XmlEscapeMode xmlEscapeMode ) { } }
if ( input == null ) { return null ; } final int len = input . length ( ) ; final StringBuilder out = new StringBuilder ( ( int ) ( len * 1.3 ) ) ; CharSequence toAppend ; char ch ; int last = 0 ; int i = 0 ; while ( i < len ) { toAppend = null ; ch = input . charAt ( i ) ; switch ( xmlEscapeMode ) { case safe : switch ( ch ) { case '<' : toAppend = LT_ENCODE ; break ; case '>' : toAppend = GT_ENCODE ; break ; case '&' : toAppend = AMP_ENCODE ; break ; case '"' : toAppend = QUOTE_ENCODE ; break ; case '\'' : toAppend = APOS_ENCODE ; break ; default : break ; } break ; case forAttribute : // No need to escape ' > ' for attributes . switch ( ch ) { case '<' : toAppend = LT_ENCODE ; break ; case '&' : toAppend = AMP_ENCODE ; break ; case '"' : toAppend = QUOTE_ENCODE ; break ; case '\'' : toAppend = APOS_ENCODE ; break ; default : break ; } break ; case forAttributeApos : // No need to escape ' > ' and ' " ' for attributes using ' \ ' ' as quote . switch ( ch ) { case '<' : toAppend = LT_ENCODE ; break ; case '&' : toAppend = AMP_ENCODE ; break ; case '\'' : toAppend = APOS_ENCODE ; break ; default : break ; } break ; case forText : // No need to escape ' " ' , ' \ ' ' , and ' > ' for text . switch ( ch ) { case '<' : toAppend = LT_ENCODE ; break ; case '&' : toAppend = AMP_ENCODE ; break ; default : break ; } break ; } if ( toAppend != null ) { if ( i > last ) { out . append ( input , last , i ) ; } out . append ( toAppend ) ; last = ++ i ; } else { i ++ ; } } if ( last == 0 ) { return input ; } if ( i > last ) { out . append ( input , last , i ) ; } return out ;
public class SesameGraphBuilder { /** * / * ( non - Javadoc ) * @ see org . openprovenance . prov . rdf . GraphBuilder # createDataProperty ( org . openprovenance . prov . model . QualifiedName , org . openprovenance . prov . model . QualifiedName , java . lang . Object ) */ @ Override public org . openrdf . model . Statement createDataProperty ( QualifiedName subject , QualifiedName pred , LiteralImpl literalImpl ) { } }
return createDataProperty ( qualifiedNameToResource ( subject ) , pred , literalImpl ) ;
public class BoneCPConfig { /** * Loads the given class , respecting the given classloader . * @ param clazz class to load * @ return Loaded class * @ throws ClassNotFoundException */ protected Class < ? > loadClass ( String clazz ) throws ClassNotFoundException { } }
if ( this . classLoader == null ) { return Class . forName ( clazz ) ; } return Class . forName ( clazz , true , this . classLoader ) ;
public class PoolManager { /** * Quiesce connections * @ pre quiesceIfPossible has been called . * @ throws ResourceException * @ concurrency concurrent */ private void quiesce ( ) throws ResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( this , tc , "quiesce" , gConfigProps . cfName ) ; } // Remove parked connection if it exists if ( ( gConfigProps . isSmartHandleSupport ( ) == false ) ) { freePool [ 0 ] . removeParkedConnection ( ) ; } if ( parkedMCWrapper != null ) { parkedMCWrapper . clearMCWrapper ( ) ; } pmQuiesced = true ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( this , tc , "quiesce" , gConfigProps . cfName ) ; }
public class CertificateDetailMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CertificateDetail certificateDetail , ProtocolMarshaller protocolMarshaller ) { } }
if ( certificateDetail == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( certificateDetail . getCertificateArn ( ) , CERTIFICATEARN_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getDomainName ( ) , DOMAINNAME_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getSubjectAlternativeNames ( ) , SUBJECTALTERNATIVENAMES_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getDomainValidationOptions ( ) , DOMAINVALIDATIONOPTIONS_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getSerial ( ) , SERIAL_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getSubject ( ) , SUBJECT_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getIssuer ( ) , ISSUER_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getIssuedAt ( ) , ISSUEDAT_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getImportedAt ( ) , IMPORTEDAT_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getRevokedAt ( ) , REVOKEDAT_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getRevocationReason ( ) , REVOCATIONREASON_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getNotBefore ( ) , NOTBEFORE_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getNotAfter ( ) , NOTAFTER_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getKeyAlgorithm ( ) , KEYALGORITHM_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getSignatureAlgorithm ( ) , SIGNATUREALGORITHM_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getInUseBy ( ) , INUSEBY_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getFailureReason ( ) , FAILUREREASON_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getRenewalSummary ( ) , RENEWALSUMMARY_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getKeyUsages ( ) , KEYUSAGES_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getExtendedKeyUsages ( ) , EXTENDEDKEYUSAGES_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getCertificateAuthorityArn ( ) , CERTIFICATEAUTHORITYARN_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getRenewalEligibility ( ) , RENEWALELIGIBILITY_BINDING ) ; protocolMarshaller . marshall ( certificateDetail . getOptions ( ) , OPTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link ConversionType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link ConversionType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "Conversion" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_GeneralConversion" ) public JAXBElement < ConversionType > createConversion ( ConversionType value ) { } }
return new JAXBElement < ConversionType > ( _Conversion_QNAME , ConversionType . class , null , value ) ;
public class ExtensionHookView { /** * Adds the given { @ link DisplayedMessageChangedListener } to the view hook , to be later added to the * the { @ link org . parosproxy . paros . view . View } s { @ link org . zaproxy . zap . extension . httppanel . HttpPanelRequest } ChangeListeners . * @ see org . zaproxy . zap . extension . httppanel . HttpPanelRequest # addDisplayedMessageChangedListener ( DisplayedMessageChangedListener ) * @ param messageChangedListener the listener for the request panel . * @ since TODO add version */ public void addRequestPanelDisplayedMessageChangedListener ( DisplayedMessageChangedListener messageChangedListener ) { } }
if ( requestPanelDisplayedMessageChangedListener == null ) { requestPanelDisplayedMessageChangedListener = createList ( ) ; } requestPanelDisplayedMessageChangedListener . add ( messageChangedListener ) ;
public class HazardCurve { /** * Create a hazard curve from given times and given zero rates using given interpolation and extrapolation methods . * The discount factor is determined by * < code > * givenSurvivalProbabilities [ timeIndex ] = givenSurvivalProbabilities [ timeIndex - 1 ] * Math . exp ( - givenHazardRates [ timeIndex ] * ( times [ timeIndex ] - times [ timeIndex - 1 ] ) ) ; * < / code > * @ param name The name of this discount curve . * @ param referenceDate The reference date for this curve , i . e . , the date which defined t = 0. * @ param times Array of times as doubles . * @ param givenHazardRates Array of corresponding survival probabilities . * @ param isParameter Array of booleans specifying whether this point is served " as as parameter " , e . g . , whether it is calibrates ( e . g . using CalibratedCurves ) . * @ param interpolationMethod The interpolation method used for the curve . * @ param extrapolationMethod The extrapolation method used for the curve . * @ param interpolationEntity The entity interpolated / extrapolated . * @ return A new discount factor object . */ public static HazardCurve createHazardCurveFromHazardRate ( String name , Date referenceDate , double [ ] times , double [ ] givenHazardRates , boolean [ ] isParameter , InterpolationMethod interpolationMethod , ExtrapolationMethod extrapolationMethod , InterpolationEntity interpolationEntity ) { } }
LocalDate referenceDataAsLocalDate = Instant . ofEpochMilli ( referenceDate . getTime ( ) ) . atZone ( ZoneId . systemDefault ( ) ) . toLocalDate ( ) ; return createHazardCurveFromHazardRate ( name , referenceDataAsLocalDate , times , givenHazardRates , isParameter , interpolationMethod , extrapolationMethod , interpolationEntity ) ;
public class BigtableAsyncAdmin { /** * { @ inheritDoc } */ @ Override public CompletableFuture < Void > deleteTable ( TableName tableName ) { } }
return toCompletableFuture ( bigtableTableAdminClient . deleteTableAsync ( tableName . getNameAsString ( ) ) ) . thenAccept ( r -> disabledTables . remove ( tableName ) ) ;
public class CmsContentNotification { /** * Creates the mail to be sent to the responsible user . < p > * @ return the mail to be sent to the responsible user */ @ Override protected String generateHtmlMsg ( ) { } }
// set the messages m_messages = Messages . get ( ) . getBundle ( getLocale ( ) ) ; StringBuffer htmlMsg = new StringBuffer ( ) ; htmlMsg . append ( "<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" width=\"100%\">" ) ; htmlMsg . append ( "<tr><td colspan=\"5\"><br/>" ) ; GregorianCalendar tomorrow = new GregorianCalendar ( TimeZone . getDefault ( ) , CmsLocaleManager . getDefaultLocale ( ) ) ; tomorrow . add ( Calendar . DAY_OF_YEAR , 1 ) ; List < CmsExtendedNotificationCause > outdatedResources = new ArrayList < CmsExtendedNotificationCause > ( ) ; List < CmsExtendedNotificationCause > resourcesNextDay = new ArrayList < CmsExtendedNotificationCause > ( ) ; List < CmsExtendedNotificationCause > resourcesNextWeek = new ArrayList < CmsExtendedNotificationCause > ( ) ; // split all resources into three lists : the resources that expire , will be released or get outdated // within the next 24h , within the next week and the resources unchanged since a long time Iterator < CmsExtendedNotificationCause > notificationCauses = m_notificationCauses . iterator ( ) ; while ( notificationCauses . hasNext ( ) ) { CmsExtendedNotificationCause notificationCause = notificationCauses . next ( ) ; if ( notificationCause . getCause ( ) == CmsExtendedNotificationCause . RESOURCE_OUTDATED ) { outdatedResources . add ( notificationCause ) ; } else if ( notificationCause . getDate ( ) . before ( tomorrow . getTime ( ) ) ) { resourcesNextDay . add ( notificationCause ) ; } else { resourcesNextWeek . add ( notificationCause ) ; } } Collections . sort ( resourcesNextDay ) ; Collections . sort ( resourcesNextWeek ) ; Collections . sort ( outdatedResources ) ; appendResourceList ( htmlMsg , resourcesNextDay , m_messages . key ( Messages . GUI_WITHIN_NEXT_DAY_0 ) ) ; appendResourceList ( htmlMsg , resourcesNextWeek , m_messages . key ( Messages . GUI_WITHIN_NEXT_WEEK_0 ) ) ; appendResourceList ( htmlMsg , outdatedResources , m_messages . key ( Messages . GUI_FILES_NOT_UPDATED_1 , String . valueOf ( OpenCms . getSystemInfo ( ) . getNotificationTime ( ) ) ) ) ; htmlMsg . append ( "</td></tr></table>" ) ; String result = htmlMsg . toString ( ) ; return result ;
public class SQLMultiScopeRecoveryLog { /** * Drives the execution of the cached up database work . * @ exception SQLException thrown if a SQLException is * encountered when accessing the * Database . */ private void executeBatchStatements ( Connection conn ) throws SQLException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "executeBatchStatements" , new java . lang . Object [ ] { conn , this } ) ; PreparedStatement insertStatement = null ; PreparedStatement updateStatement = null ; PreparedStatement removeStatement = null ; try { // Prepare the statements if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Prepare the INSERT statement for " + _inserts + " inserts" ) ; if ( _inserts > 0 ) { String insertString = "INSERT INTO " + _recoveryTableName + _logIdentifierString + _recoveryTableNameSuffix + " (SERVER_NAME, SERVICE_ID, RU_ID, RUSECTION_ID, RUSECTION_DATA_INDEX, DATA)" + " VALUES (?,?,?,?,?,?)" ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "INSERT string - " + insertString ) ; insertStatement = conn . prepareStatement ( insertString ) ; insertStatement . setString ( 1 , _serverName ) ; insertStatement . setShort ( 2 , ( short ) _recoveryAgent . clientIdentifier ( ) ) ; } if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Prepare the UPDATE statement for " + _updates + " updates" ) ; if ( _updates > 0 ) { String updateString = "UPDATE " + _recoveryTableName + _logIdentifierString + _recoveryTableNameSuffix + " SET DATA = ? WHERE " + "SERVER_NAME = ? AND SERVICE_ID = ? AND RU_ID = ? AND RUSECTION_ID = ? AND RUSECTION_DATA_INDEX = 0" ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "UPDATE string - " + updateString ) ; updateStatement = conn . prepareStatement ( updateString ) ; updateStatement . setString ( 2 , _serverName ) ; updateStatement . setShort ( 3 , ( short ) _recoveryAgent . clientIdentifier ( ) ) ; } if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Prepare the DELETE statement for " + _removes + " removes" ) ; if ( _removes > 0 ) { String removeString = "DELETE FROM " + _recoveryTableName + _logIdentifierString + _recoveryTableNameSuffix + " WHERE " + "SERVER_NAME = ? AND SERVICE_ID = ? AND RU_ID = ? " ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "DELETE string - " + removeString ) ; removeStatement = conn . prepareStatement ( removeString ) ; removeStatement . setString ( 1 , _serverName ) ; removeStatement . setShort ( 2 , ( short ) _recoveryAgent . clientIdentifier ( ) ) ; } // Batch the INSERT statements if ( _inserts > 0 ) { for ( ruForReplay element : _cachedInserts ) { insertStatement . setLong ( 3 , element . getRuId ( ) ) ; insertStatement . setLong ( 4 , element . getSectionId ( ) ) ; insertStatement . setShort ( 5 , ( short ) element . getIndex ( ) ) ; insertStatement . setBytes ( 6 , element . getData ( ) ) ; insertStatement . addBatch ( ) ; } } // Batch the UPDATE statements if ( _updates > 0 ) { for ( ruForReplay element : _cachedUpdates ) { updateStatement . setLong ( 4 , element . getRuId ( ) ) ; updateStatement . setLong ( 5 , element . getSectionId ( ) ) ; updateStatement . setBytes ( 1 , element . getData ( ) ) ; updateStatement . addBatch ( ) ; } } // Batch the DELETE statements if ( _removes > 0 ) { for ( ruForReplay element : _cachedRemoves ) { removeStatement . setLong ( 3 , element . getRuId ( ) ) ; removeStatement . addBatch ( ) ; } } // Execute the statements if ( _inserts > 0 ) { int [ ] numUpdates = insertStatement . executeBatch ( ) ; if ( tc . isDebugEnabled ( ) ) { for ( int i = 0 ; i < numUpdates . length ; i ++ ) { if ( numUpdates [ i ] == Statement . SUCCESS_NO_INFO ) Tr . debug ( tc , "Execution " + i + ": unknown number of rows updated" ) ; else Tr . debug ( tc , "Execution " + i + "successful: " + numUpdates [ i ] + " rows updated" ) ; } } } if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "sql tranlog: batch inserts: " + _inserts ) ; if ( _updates > 0 ) { int [ ] numUpdates = updateStatement . executeBatch ( ) ; if ( tc . isDebugEnabled ( ) ) { for ( int i = 0 ; i < numUpdates . length ; i ++ ) { if ( numUpdates [ i ] == Statement . SUCCESS_NO_INFO ) Tr . debug ( tc , "Execution " + i + ": unknown number of rows updated" ) ; else Tr . debug ( tc , "Execution " + i + "successful: " + numUpdates [ i ] + " rows updated" ) ; } } } if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "sql tranlog: batch updates: " + _updates ) ; if ( _removes > 0 ) { int [ ] numUpdates = removeStatement . executeBatch ( ) ; if ( tc . isDebugEnabled ( ) ) { for ( int i = 0 ; i < numUpdates . length ; i ++ ) { if ( numUpdates [ i ] == Statement . SUCCESS_NO_INFO ) Tr . debug ( tc , "Execution " + i + ": unknown number of rows updated" ) ; else Tr . debug ( tc , "Execution " + i + "successful: " + numUpdates [ i ] + " rows updated" ) ; } } } if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "sql tranlog: batch deletes: " + _removes + ", for obj: " + this ) ; } finally { if ( insertStatement != null && ! insertStatement . isClosed ( ) ) insertStatement . close ( ) ; if ( updateStatement != null && ! updateStatement . isClosed ( ) ) updateStatement . close ( ) ; if ( removeStatement != null && ! removeStatement . isClosed ( ) ) removeStatement . close ( ) ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "executeBatchStatements" ) ;
public class ListPlatformApplicationsResult { /** * Platform applications returned when calling ListPlatformApplications action . * @ param platformApplications * Platform applications returned when calling ListPlatformApplications action . */ public void setPlatformApplications ( java . util . Collection < PlatformApplication > platformApplications ) { } }
if ( platformApplications == null ) { this . platformApplications = null ; return ; } this . platformApplications = new com . amazonaws . internal . SdkInternalList < PlatformApplication > ( platformApplications ) ;
public class Http2StreamSinkChannel { /** * This method should be called before sending . It will return the amount of * data that can be sent , taking into account the stream and connection flow * control windows , and the toSend parameter . * It will decrement the flow control windows by the amount that can be sent , * so this method should only be called as a frame is being queued . * @ return The number of bytes that can be sent */ protected int grabFlowControlBytes ( int toSend ) { } }
synchronized ( flowControlLock ) { if ( toSend == 0 ) { return 0 ; } int newWindowSize = this . getChannel ( ) . getInitialSendWindowSize ( ) ; int settingsDelta = newWindowSize - this . initialWindowSize ; // first adjust for any settings frame updates this . initialWindowSize = newWindowSize ; this . flowControlWindow += settingsDelta ; int min = Math . min ( toSend , this . flowControlWindow ) ; int actualBytes = this . getChannel ( ) . grabFlowControlBytes ( min ) ; this . flowControlWindow -= actualBytes ; return actualBytes ; }
public class ArchTaskExecutor { /** * Returns an instance of the task executor . * @ return The singleton ArchTaskExecutor . */ public static ArchTaskExecutor getInstance ( ) { } }
if ( sInstance != null ) { return sInstance ; } synchronized ( ArchTaskExecutor . class ) { if ( sInstance == null ) { sInstance = new ArchTaskExecutor ( ) ; } } return sInstance ;
public class AbstractBigtableAdmin { /** * { @ inheritDoc } */ @ Override public void createTable ( HTableDescriptor desc , byte [ ] startKey , byte [ ] endKey , int numRegions ) throws IOException { } }
createTable ( desc , createSplitKeys ( startKey , endKey , numRegions ) ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CoordinateSystemAxisRefType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link CoordinateSystemAxisRefType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "coordinateSystemAxisRef" ) public JAXBElement < CoordinateSystemAxisRefType > createCoordinateSystemAxisRef ( CoordinateSystemAxisRefType value ) { } }
return new JAXBElement < CoordinateSystemAxisRefType > ( _CoordinateSystemAxisRef_QNAME , CoordinateSystemAxisRefType . class , null , value ) ;
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link AbstractReferenceSystemType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link AbstractReferenceSystemType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "_CRS" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "_ReferenceSystem" ) public JAXBElement < AbstractReferenceSystemType > create_CRS ( AbstractReferenceSystemType value ) { } }
return new JAXBElement < AbstractReferenceSystemType > ( __CRS_QNAME , AbstractReferenceSystemType . class , null , value ) ;
public class ProjectTreeController { /** * Add calendars to the tree . * @ param parentNode parent tree node * @ param file calendar container */ private void addCalendars ( MpxjTreeNode parentNode , ProjectFile file ) { } }
for ( ProjectCalendar calendar : file . getCalendars ( ) ) { addCalendar ( parentNode , calendar ) ; }
public class StreamState { /** * / * This will flush remaining packets into a page ( returning nonzero ) , * even if there is not enough data to trigger a flush normally * ( undersized page ) . If there are no packets or partial packets to * flush , ogg _ stream _ flush returns 0 . Note that ogg _ stream _ flush will * try to flush a normal sized page like ogg _ stream _ pageout ; a call to * ogg _ stream _ flush does not gurantee that all packets have flushed . * Only a return value of 0 from ogg _ stream _ flush indicates all packet * data is flushed into pages . * ogg _ stream _ page will flush the last page in a stream even if it ' s * undersized ; you almost certainly want to use ogg _ stream _ pageout * ( and * not * ogg _ stream _ flush ) unless you need to flush an undersized * page in the middle of a stream for some reason . */ public int flush ( Page og ) { } }
int i ; int vals = 0 ; int maxvals = ( lacing_fill > 255 ? 255 : lacing_fill ) ; int bytes = 0 ; int acc = 0 ; long granule_pos = granule_vals [ 0 ] ; if ( maxvals == 0 ) return ( 0 ) ; /* construct a page */ /* decide how many segments to include */ /* If this is the initial header case , the first page must only include the initial header packet */ if ( b_o_s == 0 ) { /* ' initial header page ' case */ granule_pos = 0 ; for ( vals = 0 ; vals < maxvals ; vals ++ ) { if ( ( lacing_vals [ vals ] & 0x0ff ) < 255 ) { vals ++ ; break ; } } } else { for ( vals = 0 ; vals < maxvals ; vals ++ ) { if ( acc > 4096 ) break ; acc += ( lacing_vals [ vals ] & 0x0ff ) ; granule_pos = granule_vals [ vals ] ; } } /* construct the header in temp storage */ System . arraycopy ( "OggS" . getBytes ( ) , 0 , header , 0 , 4 ) ; /* stream structure version */ header [ 4 ] = 0x00 ; /* continued packet flag ? */ header [ 5 ] = 0x00 ; if ( ( lacing_vals [ 0 ] & 0x100 ) == 0 ) header [ 5 ] |= 0x01 ; /* first page flag ? */ if ( b_o_s == 0 ) header [ 5 ] |= 0x02 ; /* last page flag ? */ if ( e_o_s != 0 && lacing_fill == vals ) header [ 5 ] |= 0x04 ; b_o_s = 1 ; /* 64 bits of PCM position */ for ( i = 6 ; i < 14 ; i ++ ) { header [ i ] = ( byte ) granule_pos ; granule_pos >>>= 8 ; } /* 32 bits of stream serial number */ { int _serialno = serialno ; for ( i = 14 ; i < 18 ; i ++ ) { header [ i ] = ( byte ) _serialno ; _serialno >>>= 8 ; } } /* 32 bits of page counter ( we have both counter and page header because this val can roll over ) */ if ( pageno == - 1 ) pageno = 0 ; /* because someone called stream _ reset ; this would be a strange thing to do in an encode stream , but it has plausible uses */ { int _pageno = pageno ++ ; for ( i = 18 ; i < 22 ; i ++ ) { header [ i ] = ( byte ) _pageno ; _pageno >>>= 8 ; } } /* zero for computation ; filled in later */ header [ 22 ] = 0 ; header [ 23 ] = 0 ; header [ 24 ] = 0 ; header [ 25 ] = 0 ; /* segment table */ header [ 26 ] = ( byte ) vals ; for ( i = 0 ; i < vals ; i ++ ) { header [ i + 27 ] = ( byte ) lacing_vals [ i ] ; bytes += ( header [ i + 27 ] & 0xff ) ; } /* set pointers in the ogg _ page struct */ og . header_base = header ; og . header = 0 ; og . header_len /* = header _ fill */ = vals + 27 ; og . body_base = body_data ; og . body = body_returned ; og . body_len = bytes ; /* advance the lacing data and set the body _ returned pointer */ lacing_fill -= vals ; System . arraycopy ( lacing_vals , vals , lacing_vals , 0 , lacing_fill * 4 ) ; System . arraycopy ( granule_vals , vals , granule_vals , 0 , lacing_fill * 8 ) ; body_returned += bytes ; /* calculate the checksum */ og . checksum ( ) ; /* done */ return ( 1 ) ;
public class FunctionType { /** * Restricted package - accessible version of { @ link # addSubType } , which ensures subtypes are not * duplicated . Generally subtypes are added internally and are guaranteed not to be duplicated , * but this has the possibility of missing unresolved supertypes ( typically from externs ) . To * handle that case , { @ link PrototypeObjectType } also adds subclasses after resolution . This * method only adds a subclass to the list if it didn ' t already add itself to its superclass in * the earlier pass . Ideally , " subclass " here would only refer to classes , but there ' s an edge * case where interfaces have the { @ code Object } constructor added as its " superclass " . */ final void addSubClassAfterResolution ( FunctionType subClass ) { } }
checkArgument ( this == subClass . getSuperClassConstructor ( ) ) ; if ( ! subClass . wasAddedToExtendedConstructorSubtypes ) { addSubType ( subClass ) ; }
public class CmsContentService { /** * Returns the RDF annotations required for in line editing . < p > * @ param parentValue the parent XML content value * @ param childNames the child attribute names separated by ' | ' * @ return the RDFA */ public static String getRdfaAttributes ( I_CmsXmlContentValue parentValue , String childNames ) { } }
String id = CmsContentDefinition . uuidToEntityId ( parentValue . getDocument ( ) . getFile ( ) . getStructureId ( ) , parentValue . getLocale ( ) . toString ( ) ) + "/" + parentValue . getPath ( ) ; String path = "" ; String [ ] children = childNames . split ( "\\|" ) ; for ( int i = 0 ; i < children . length ; i ++ ) { I_CmsXmlSchemaType schemaType = parentValue . getContentDefinition ( ) . getSchemaType ( parentValue . getName ( ) + "/" + children [ i ] ) ; if ( schemaType != null ) { if ( i > 0 ) { path += " " ; } String typePath = parentValue . getPath ( ) ; path += "/" + removePathIndexes ( typePath ) + ":" ; path += getTypeUri ( schemaType . getContentDefinition ( ) ) + "/" + children [ i ] ; } } return String . format ( RDFA_ATTRIBUTES , id , path ) ;
public class MapUtils { /** * Counts the number of entries ( key - value pairs ) in the { @ link Map } accepted by the { @ link Filter } . * @ param < K > Class type of the key . * @ param < V > Class type of the value . * @ param map { @ link Map } to evaluate . * @ param filter { @ link Filter } used to determine the number of entries in the { @ link Map } accepted by * the { @ link Filter } . * @ return an integer value indicating the number of entries in the { @ link Map } accepted by the { @ link Filter } . * @ throws IllegalArgumentException if { @ link Filter } is null . * @ see org . cp . elements . lang . Filter * @ see java . util . Map */ public static < K , V > long count ( Map < K , V > map , Filter < Map . Entry < K , V > > filter ) { } }
Assert . notNull ( filter , "Filter is required" ) ; return nullSafeMap ( map ) . entrySet ( ) . stream ( ) . filter ( filter :: accept ) . count ( ) ;
public class JobServiceClient { /** * Deletes a list of [ Job ] [ google . cloud . talent . v4beta1 . Job ] s by filter . * < p > Sample code : * < pre > < code > * try ( JobServiceClient jobServiceClient = JobServiceClient . create ( ) ) { * TenantOrProjectName parent = TenantName . of ( " [ PROJECT ] " , " [ TENANT ] " ) ; * String filter = " " ; * jobServiceClient . batchDeleteJobs ( parent . toString ( ) , filter ) ; * < / code > < / pre > * @ param parent Required . * < p > The resource name of the tenant under which the job is created . * < p > The format is " projects / { project _ id } / tenants / { tenant _ id } " , for example , * " projects / api - test - project / tenant / foo " . * < p > Tenant id is optional and the default tenant is used if unspecified , for example , * " projects / api - test - project " . * @ param filter Required . * < p > The filter string specifies the jobs to be deleted . * < p > Supported operator : = , AND * < p > The fields eligible for filtering are : * < p > & # 42 ; ` companyName ` ( Required ) & # 42 ; ` requisitionId ` ( Required ) * < p > Sample Query : companyName = " projects / api - test - project / companies / 123 " AND requisitionId * = " req - 1" * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final void batchDeleteJobs ( String parent , String filter ) { } }
BatchDeleteJobsRequest request = BatchDeleteJobsRequest . newBuilder ( ) . setParent ( parent ) . setFilter ( filter ) . build ( ) ; batchDeleteJobs ( request ) ;
public class ULocale { /** * Returns a well - formed IETF BCP 47 language tag representing * this locale . * < p > If this < code > ULocale < / code > has a language , script , country , or * variant that does not satisfy the IETF BCP 47 language tag * syntax requirements , this method handles these fields as * described below : * < p > < b > Language : < / b > If language is empty , or not well - formed * ( for example " a " or " e2 " ) , it will be emitted as " und " ( Undetermined ) . * < p > < b > Script : < / b > If script is not well - formed ( for example " 12" * or " Latin " ) , it will be omitted . * < p > < b > Country : < / b > If country is not well - formed ( for example " 12" * or " USA " ) , it will be omitted . * < p > < b > Variant : < / b > If variant < b > is < / b > well - formed , each sub - segment * ( delimited by ' - ' or ' _ ' ) is emitted as a subtag . Otherwise : * < ul > * < li > if all sub - segments match < code > [ 0-9a - zA - Z ] { 1,8 } < / code > * ( for example " WIN " or " Oracle _ JDK _ Standard _ Edition " ) , the first * ill - formed sub - segment and all following will be appended to * the private use subtag . The first appended subtag will be * " lvariant " , followed by the sub - segments in order , separated by * hyphen . For example , " x - lvariant - WIN " , * " Oracle - x - lvariant - JDK - Standard - Edition " . * < li > if any sub - segment does not match * < code > [ 0-9a - zA - Z ] { 1,8 } < / code > , the variant will be truncated * and the problematic sub - segment and all following sub - segments * will be omitted . If the remainder is non - empty , it will be * emitted as a private use subtag as above ( even if the remainder * turns out to be well - formed ) . For example , * " Solaris _ isjustthecoolestthing " is emitted as * " x - lvariant - Solaris " , not as " solaris " . < / li > < / ul > * < p > < b > Note : < / b > Although the language tag created by this * method is well - formed ( satisfies the syntax requirements * defined by the IETF BCP 47 specification ) , it is not * necessarily a valid BCP 47 language tag . For example , * < pre > * new Locale ( " xx " , " YY " ) . toLanguageTag ( ) ; < / pre > * will return " xx - YY " , but the language subtag " xx " and the * region subtag " YY " are invalid because they are not registered * in the IANA Language Subtag Registry . * @ return a BCP47 language tag representing the locale * @ see # forLanguageTag ( String ) */ public String toLanguageTag ( ) { } }
BaseLocale base = base ( ) ; LocaleExtensions exts = extensions ( ) ; if ( base . getVariant ( ) . equalsIgnoreCase ( "POSIX" ) ) { // special handling for variant POSIX base = BaseLocale . getInstance ( base . getLanguage ( ) , base . getScript ( ) , base . getRegion ( ) , "" ) ; if ( exts . getUnicodeLocaleType ( "va" ) == null ) { // add va - posix InternalLocaleBuilder ilocbld = new InternalLocaleBuilder ( ) ; try { ilocbld . setLocale ( BaseLocale . ROOT , exts ) ; ilocbld . setUnicodeLocaleKeyword ( "va" , "posix" ) ; exts = ilocbld . getLocaleExtensions ( ) ; } catch ( LocaleSyntaxException e ) { // this should not happen throw new RuntimeException ( e ) ; } } } LanguageTag tag = LanguageTag . parseLocale ( base , exts ) ; StringBuilder buf = new StringBuilder ( ) ; String subtag = tag . getLanguage ( ) ; if ( subtag . length ( ) > 0 ) { buf . append ( LanguageTag . canonicalizeLanguage ( subtag ) ) ; } subtag = tag . getScript ( ) ; if ( subtag . length ( ) > 0 ) { buf . append ( LanguageTag . SEP ) ; buf . append ( LanguageTag . canonicalizeScript ( subtag ) ) ; } subtag = tag . getRegion ( ) ; if ( subtag . length ( ) > 0 ) { buf . append ( LanguageTag . SEP ) ; buf . append ( LanguageTag . canonicalizeRegion ( subtag ) ) ; } List < String > subtags = tag . getVariants ( ) ; for ( String s : subtags ) { buf . append ( LanguageTag . SEP ) ; buf . append ( LanguageTag . canonicalizeVariant ( s ) ) ; } subtags = tag . getExtensions ( ) ; for ( String s : subtags ) { buf . append ( LanguageTag . SEP ) ; buf . append ( LanguageTag . canonicalizeExtension ( s ) ) ; } subtag = tag . getPrivateuse ( ) ; if ( subtag . length ( ) > 0 ) { if ( buf . length ( ) > 0 ) { buf . append ( LanguageTag . SEP ) ; } buf . append ( LanguageTag . PRIVATEUSE ) . append ( LanguageTag . SEP ) ; buf . append ( LanguageTag . canonicalizePrivateuse ( subtag ) ) ; } return buf . toString ( ) ;