signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcTendonAnchorType ( ) { } } | if ( ifcTendonAnchorTypeEClass == null ) { ifcTendonAnchorTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 707 ) ; } return ifcTendonAnchorTypeEClass ; |
public class TaskManagementFunctionRequestParser { /** * { @ inheritDoc } */
@ Override protected final void deserializeBytes1to3 ( final int line ) throws InternetSCSIException { } } | functionCode = FunctionCode . valueOf ( ( byte ) ( line & Constants . SECOND_BYTE_MASK ) ) ; Utils . isReserved ( line & Constants . LAST_TWO_BYTES_MASK ) ; |
public class AnchorageSimulator { /** * Starts simulating anchorige .
* @ param locationSource
* @ param period Update rate in millis .
* @ param isDaemon Sets timer thread . In single thread this should be false .
* This parament is used only if external Timer was not provided !
* @ throws IOException */
public void simulate ( LocationSource locationSource , long period , boolean isDaemon ) throws IOException { } } | this . locationSource = locationSource ; dis = new DataInputStream ( new BufferedInputStream ( url . openStream ( ) ) ) ; if ( timer == null ) { timer = new Timer ( "AnchorageSimulator" , isDaemon ) ; } timer . scheduleAtFixedRate ( this , 0 , period ) ; |
public class ProteinModificationXmlReader { /** * Read protein modifications from XML file and register them .
* @ param isXml { @ link InputStream } of the XML file .
* @ throws IOException if failed to read the XML file .
* @ throws ParserConfigurationException if parse errors occur .
* @ throws SAXException the { @ link DocumentBuilder } cannot be created . */
public static void registerProteinModificationFromXml ( InputStream isXml ) throws IOException , ParserConfigurationException , SAXException { } } | if ( isXml == null ) { throw new IllegalArgumentException ( "Null argument." ) ; } DocumentBuilderFactory factory = DocumentBuilderFactory . newInstance ( ) ; DocumentBuilder builder = factory . newDocumentBuilder ( ) ; Document doc = builder . parse ( isXml ) ; NodeList modNodes = doc . getElementsByTagName ( "Entry" ) ; int modSize = modNodes . getLength ( ) ; List < Node > nodes ; for ( int iMod = 0 ; iMod < modSize ; iMod ++ ) { Node modNode = modNodes . item ( iMod ) ; Map < String , List < Node > > infoNodes = getChildNodes ( modNode ) ; // ID
nodes = infoNodes . get ( "Id" ) ; if ( nodes == null || nodes . size ( ) != 1 ) { throw new RuntimeException ( "Each modification must have exact " + "one <Id> field." ) ; } String id = nodes . get ( 0 ) . getTextContent ( ) ; // modification category
nodes = infoNodes . get ( "Category" ) ; if ( nodes == null || nodes . size ( ) != 1 ) { throw new RuntimeException ( "Each modification must have exact " + "one <Category> field. See Modification " + id + "." ) ; } ModificationCategory cat = ModificationCategory . getByLabel ( nodes . get ( 0 ) . getTextContent ( ) ) ; if ( cat == null ) { throw new RuntimeException ( nodes . get ( 0 ) . getTextContent ( ) + " is not defined as an modification category." + " See Modification " + id + "." ) ; } // occurrence type
nodes = infoNodes . get ( "Occurrence" ) ; if ( nodes == null || nodes . size ( ) != 1 ) { throw new RuntimeException ( "Each modification must have exact " + "one <Occurrence> field. See Modification " + id + "." ) ; } ModificationOccurrenceType occType = ModificationOccurrenceType . getByLabel ( nodes . get ( 0 ) . getTextContent ( ) ) ; if ( occType == null ) { throw new RuntimeException ( nodes . get ( 0 ) . getTextContent ( ) + " is not defined as an modification occurence type." + " See Modification " + id + "." ) ; } // condition
ModificationCondition condition = null ; { nodes = infoNodes . get ( "Condition" ) ; if ( nodes == null || nodes . size ( ) != 1 ) { throw new RuntimeException ( "Each modification must have exact " + "one <Condition> field. See Modification " + id + "." ) ; } Node compsNode = nodes . get ( 0 ) ; // keep track of the labels of component indices
Map < String , Integer > mapLabelComp = new HashMap < String , Integer > ( ) ; Map < String , List < Node > > compInfoNodes = getChildNodes ( compsNode ) ; // components
List < Node > compNodes = compInfoNodes . get ( "Component" ) ; int sizeComp = compNodes . size ( ) ; List < Component > comps = new ArrayList < Component > ( sizeComp ) ; for ( int iComp = 0 ; iComp < sizeComp ; iComp ++ ) { Node compNode = compNodes . get ( iComp ) ; // comp label
NamedNodeMap compNodeAttrs = compNode . getAttributes ( ) ; Node labelNode = compNodeAttrs . getNamedItem ( "component" ) ; if ( labelNode == null ) { throw new RuntimeException ( "Each component must have a label." + " See Modification " + id + "." ) ; } String label = labelNode . getTextContent ( ) ; if ( mapLabelComp . containsKey ( label ) ) { throw new RuntimeException ( "Each component must have a unique label." + " See Modification " + id + "." ) ; } // comp PDBCC ID
Set < String > compIds = new HashSet < String > ( ) ; List < Node > compIdNodes = getChildNodes ( compNode ) . get ( "Id" ) ; if ( compIdNodes != null ) { for ( Node compIdNode : compIdNodes ) { NamedNodeMap compIdNodeAttr = compIdNode . getAttributes ( ) ; Node compIdSource = compIdNodeAttr . getNamedItem ( "source" ) ; if ( compIdSource != null && compIdSource . getTextContent ( ) . equals ( "PDBCC" ) ) { String strComps = compIdNode . getTextContent ( ) ; if ( strComps . isEmpty ( ) ) { throw new RuntimeException ( "Empty component." + " See Modification " + id + "." ) ; } compIds . addAll ( Arrays . asList ( strComps . split ( "," ) ) ) ; } } } if ( compIds . isEmpty ( ) ) { throw new RuntimeException ( "Each component must have a PDBCC ID." + " See Modification " + id + "." ) ; } // terminal
boolean nTerminal = false ; boolean cTerminal = false ; List < Node > compTermNode = getChildNodes ( compNode ) . get ( "Terminal" ) ; if ( compTermNode != null ) { if ( compTermNode . size ( ) != 1 ) { throw new RuntimeException ( "Only one <Terminal> condition is allowed for " + "each component. See Modification " + id + "." ) ; } String nc = compTermNode . get ( 0 ) . getTextContent ( ) ; if ( nc . equals ( "N" ) ) { nTerminal = true ; } else if ( nc . equals ( "C" ) ) { cTerminal = true ; } else { throw new RuntimeException ( "Only N or C is allowed for <Terminal>." + " See Modification " + id + "." ) ; } } // register
Component comp = Component . of ( compIds , nTerminal , cTerminal ) ; comps . add ( comp ) ; mapLabelComp . put ( label , comps . size ( ) - 1 ) ; } // bonds
List < Node > bondNodes = compInfoNodes . get ( "Bond" ) ; List < ModificationLinkage > linkages = null ; if ( bondNodes != null ) { int sizeBonds = bondNodes . size ( ) ; linkages = new ArrayList < ModificationLinkage > ( sizeBonds ) ; for ( int iBond = 0 ; iBond < sizeBonds ; iBond ++ ) { Node bondNode = bondNodes . get ( iBond ) ; Map < String , List < Node > > bondChildNodes = getChildNodes ( bondNode ) ; if ( bondChildNodes == null ) { throw new RuntimeException ( "Each bond must contain two atoms" + " See Modification " + id + "." ) ; } List < Node > atomNodes = bondChildNodes . get ( "Atom" ) ; if ( atomNodes == null || atomNodes . size ( ) != 2 ) { throw new RuntimeException ( "Each bond must contain two atoms" + " See Modification " + id + "." ) ; } // atom 1
NamedNodeMap atomNodeAttrs = atomNodes . get ( 0 ) . getAttributes ( ) ; Node compNode = atomNodeAttrs . getNamedItem ( "component" ) ; if ( compNode == null ) { throw new RuntimeException ( "Each atom must on a component." + " See Modification " + id + "." ) ; } String labelComp1 = compNode . getTextContent ( ) ; int iComp1 = mapLabelComp . get ( labelComp1 ) ; Node labelNode = atomNodeAttrs . getNamedItem ( "atom" ) ; String labelAtom1 = labelNode == null ? null : labelNode . getTextContent ( ) ; String atom1 = atomNodes . get ( 0 ) . getTextContent ( ) ; if ( atom1 . isEmpty ( ) ) { throw new RuntimeException ( "Each atom must have a name. Please use wildcard * if unknown." + " See Modification " + id + "." ) ; } List < String > potentialAtoms1 = Arrays . asList ( atom1 . split ( "," ) ) ; // atom 2
atomNodeAttrs = atomNodes . get ( 1 ) . getAttributes ( ) ; compNode = atomNodeAttrs . getNamedItem ( "component" ) ; if ( compNode == null ) { throw new RuntimeException ( "Each atom must on a component." + " See Modification " + id + "." ) ; } String labelComp2 = compNode . getTextContent ( ) ; int iComp2 = mapLabelComp . get ( labelComp2 ) ; labelNode = atomNodeAttrs . getNamedItem ( "atom" ) ; String labelAtom2 = labelNode == null ? null : labelNode . getTextContent ( ) ; String atom2 = atomNodes . get ( 1 ) . getTextContent ( ) ; if ( atom2 . isEmpty ( ) ) { throw new RuntimeException ( "Each atom must have a name. Please use wildcard * if unknown." + " See Modification " + id + "." ) ; } List < String > potentialAtoms2 = Arrays . asList ( atom2 . split ( "," ) ) ; // add linkage
ModificationLinkage linkage = new ModificationLinkage ( comps , iComp1 , potentialAtoms1 , labelAtom1 , iComp2 , potentialAtoms2 , labelAtom2 ) ; linkages . add ( linkage ) ; } } condition = new ModificationConditionImpl ( comps , linkages ) ; } // end of condition
ProteinModificationImpl . Builder modBuilder = new ProteinModificationImpl . Builder ( id , cat , occType , condition ) ; // description
nodes = infoNodes . get ( "Description" ) ; if ( nodes != null && ! nodes . isEmpty ( ) ) { modBuilder . setDescription ( nodes . get ( 0 ) . getTextContent ( ) ) ; } // cross references
nodes = infoNodes . get ( "CrossReference" ) ; if ( nodes != null ) { for ( Node node : nodes ) { Map < String , List < Node > > xrefInfoNodes = getChildNodes ( node ) ; // source
List < Node > xrefNode = xrefInfoNodes . get ( "Source" ) ; if ( xrefNode == null || xrefNode . size ( ) != 1 ) { throw new RuntimeException ( "Error in XML file: " + "a cross reference must contain exactly one <Source> field." + " See Modification " + id + "." ) ; } String xrefDb = xrefNode . get ( 0 ) . getTextContent ( ) ; // id
xrefNode = xrefInfoNodes . get ( "Id" ) ; if ( xrefNode == null || xrefNode . size ( ) != 1 ) { throw new RuntimeException ( "Error in XML file: " + "a cross reference must contain exactly one <Id> field." + " See Modification " + id + "." ) ; } String xrefId = xrefNode . get ( 0 ) . getTextContent ( ) ; // name
String xrefName = null ; xrefNode = xrefInfoNodes . get ( "Name" ) ; if ( xrefNode != null && ! xrefNode . isEmpty ( ) ) { xrefName = xrefNode . get ( 0 ) . getTextContent ( ) ; } if ( xrefDb . equals ( "PDBCC" ) ) { modBuilder . setPdbccId ( xrefId ) . setPdbccName ( xrefName ) ; } else if ( xrefDb . equals ( "RESID" ) ) { modBuilder . setResidId ( xrefId ) . setResidName ( xrefName ) ; } else if ( xrefDb . equals ( "PSI-MOD" ) ) { modBuilder . setPsimodId ( xrefId ) . setPsimodName ( xrefName ) ; } } } // end of cross references
// formula
nodes = infoNodes . get ( "Formula" ) ; if ( nodes != null && ! nodes . isEmpty ( ) ) { modBuilder . setFormula ( nodes . get ( 0 ) . getTextContent ( ) ) ; } // keywords
nodes = infoNodes . get ( "Keyword" ) ; if ( nodes != null && ! nodes . isEmpty ( ) ) { for ( Node node : nodes ) { modBuilder . addKeyword ( node . getTextContent ( ) ) ; } } ProteinModificationRegistry . register ( modBuilder . build ( ) ) ; } |
public class DistributedCache { /** * Clear the entire contents of the cache and delete the backing files . This
* should only be used when the server is reinitializing , because the users
* are going to lose their files . */
public static void purgeCache ( Configuration conf , MRAsyncDiskService service ) throws IOException { } } | synchronized ( cachedArchives ) { LocalFileSystem localFs = FileSystem . getLocal ( conf ) ; for ( Map . Entry < String , CacheStatus > f : cachedArchives . entrySet ( ) ) { try { deleteLocalPath ( service , localFs , f . getValue ( ) . localizedLoadPath ) ; } catch ( IOException ie ) { LOG . debug ( "Error cleaning up cache" , ie ) ; } } cachedArchives . clear ( ) ; } |
public class GUIObjectDetails { /** * Method to validate the keys against the { @ link HtmlSeLionElementSet } or { @ link IOSSeLionElementSet } as per the
* { @ link TestPlatform }
* @ param keysToValidate
* the keys from the Page Yaml input
* @ param dataFileName
* The file name containing the keys
* @ param currentPlatform
* the platform specified in the Page Yaml input */
public static void validateKeysInDataFile ( List < String > keysToValidate , String dataFileName , TestPlatform currentPlatform ) { } } | for ( String currentKey : keysToValidate ) { // For case : Invalid element inside a container , the key inside a container is split using delimiter .
// It will be assigned to the currentKey to proceed with the validation .
if ( currentKey . contains ( DELIMITER ) ) { String [ ] keyInContainer = currentKey . split ( DELIMITER ) ; // assigning the key to the current key to proceed with the validation
currentKey = keyInContainer [ 1 ] ; } if ( ! validForWebPlatform ( currentPlatform , currentKey ) || ! validForMobilePlatforms ( currentPlatform , currentKey ) ) { throw new IllegalArgumentException ( String . format ( "Detected an invalid key [%s] in data file %s for Platform %s" , currentKey , dataFileName , currentPlatform . getPlatformName ( ) ) ) ; } } |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link PrimeMeridianType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link PrimeMeridianType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "PrimeMeridian" , substitutionHeadNamespace = "http://www.opengis.net/gml" , substitutionHeadName = "Definition" ) public JAXBElement < PrimeMeridianType > createPrimeMeridian ( PrimeMeridianType value ) { } } | return new JAXBElement < PrimeMeridianType > ( _PrimeMeridian_QNAME , PrimeMeridianType . class , null , value ) ; |
public class DefaultPojoBindingFactory { @ Override @ SuppressWarnings ( "unchecked" ) public synchronized < P > PojoBinding < P > createPojoBinding ( Class < P > pojoType ) { } } | DefaultPojoBinding < P > binding = ( DefaultPojoBinding < P > ) bindingMap . get ( pojoType ) ; if ( binding == null ) { binding = new DefaultPojoBinding < > ( pojoType ) ; bindingMap . put ( pojoType , binding ) ; List < Facet > keyFacets = injectFacetBindings ( binding , pojoType ) ; injectKeyBinding ( binding , pojoType , keyFacets ) ; } return binding ; |
public class StopWatch { /** * Stop ( or re - stop ) stopwatch
* @ return */
public synchronized void stop ( ) { } } | if ( state != State . STOPPED ) { nanoTimeStop = System . nanoTime ( ) ; state = State . STOPPED ; } |
public class ResolvedType { /** * Method that will try to find type parameterization this type
* has for specified super type
* @ return List of type parameters for specified supertype ( which may
* be empty , if supertype is not a parametric type ) ; null if specified
* type is not a super type of this type */
public List < ResolvedType > typeParametersFor ( Class < ? > erasedSupertype ) { } } | ResolvedType type = findSupertype ( erasedSupertype ) ; if ( type != null ) { return type . typeParams ( ) ; } return null ; |
public class SignUpRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( SignUpRequest signUpRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( signUpRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( signUpRequest . getClientId ( ) , CLIENTID_BINDING ) ; protocolMarshaller . marshall ( signUpRequest . getSecretHash ( ) , SECRETHASH_BINDING ) ; protocolMarshaller . marshall ( signUpRequest . getUsername ( ) , USERNAME_BINDING ) ; protocolMarshaller . marshall ( signUpRequest . getPassword ( ) , PASSWORD_BINDING ) ; protocolMarshaller . marshall ( signUpRequest . getUserAttributes ( ) , USERATTRIBUTES_BINDING ) ; protocolMarshaller . marshall ( signUpRequest . getValidationData ( ) , VALIDATIONDATA_BINDING ) ; protocolMarshaller . marshall ( signUpRequest . getAnalyticsMetadata ( ) , ANALYTICSMETADATA_BINDING ) ; protocolMarshaller . marshall ( signUpRequest . getUserContextData ( ) , USERCONTEXTDATA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ICalComponent { /** * Replaces all sub - components of a given class with the given component .
* @ param component the component
* @ return the replaced sub - components ( this list is immutable ) */
public List < ICalComponent > setComponent ( ICalComponent component ) { } } | return components . replace ( component . getClass ( ) , component ) ; |
public class Accounts { /** * This method allows you to check if a person already has a Moip account , by it ' s tax document or e - mail .
* The tax document must be write with punctuation , for example : 123.456.789-00.
* @ param argument
* { @ code String } the person ' s tax document or e - mail .
* @ param setup
* { @ code Setup } the setup object .
* @ return { @ code Map < String , Object > } */
public Map < String , Object > checkExistence ( String argument , Setup setup ) { } } | this . requestMaker = new RequestMaker ( setup ) ; String argumentType ; if ( isTaxDocument ( argument ) ) argumentType = "tax_document" ; else argumentType = "email" ; RequestProperties props = new RequestPropertiesBuilder ( ) . method ( "GET" ) . endpoint ( String . format ( "%s/exists?%s=%s" , ENDPOINT , argumentType , argument ) ) . type ( Accounts . class ) . contentType ( CONTENT_TYPE ) . build ( ) ; return this . requestMaker . doRequest ( props ) ; |
public class BaseVarExpander { /** * Expand a variable reference
* @ param data multi context data
* @ param viewMap factory of ViewTraverse type
* @ param step step text
* @ param group
* @ param key
* @ param node
* @ param < T >
* @ return */
public static < T extends ViewTraverse < T > > String expandVariable ( final MultiDataContext < T , DataContext > data , final T currentContext , final BiFunction < Integer , String , T > viewMap , final String step , final String group , final String key , final String node ) { } } | Integer t = null ; if ( null != step ) { try { t = Integer . parseInt ( step ) ; } catch ( NumberFormatException e ) { return null ; } } T view = viewMap . apply ( t , node ) ; T mergedview = view . merge ( currentContext ) . getView ( ) ; return data . resolve ( mergedview , view , group , key , null ) ; |
public class IIRFilter { /** * This will perform the filter on the samples
* @ param ringBuffer
* @ param preAmpedResultBuffer
* @ param start
* @ param length
* @ since 12.01.2012 */
public int doFilter ( final float [ ] ringBuffer , final int start , final int length , final int useBands ) { } } | final float internalPreAmp = 1f / useBands ; final float rest = 1.0f - internalPreAmp ; final int end = start + length ; int index = start ; while ( index < end ) { for ( int c = 0 ; c < channels ; c ++ ) { final int sampleIndex = ( index ++ ) % sampleBufferSize ; float sample = 0 ; // Run the difference equation
final float preAmpedSample = ringBuffer [ sampleIndex ] * preAmp * internalPreAmp ; for ( int f = 0 ; f < useBands ; f ++ ) { IIRFilterBase filter = filters [ f ] ; sample += filter . performFilterCalculation ( preAmpedSample , c , iIndex , jIndex , kIndex ) * filter . amplitudeAdj ; } sample += ( ringBuffer [ sampleIndex ] * rest ) ; ringBuffer [ sampleIndex ] = ( sample > 1.0f ) ? 1.0f : ( ( sample < - 1.0f ) ? - 1.0f : sample ) ; } // Do indices maintenance
iIndex = ( iIndex + 1 ) % IIRFilterBase . HISTORYSIZE ; jIndex = ( jIndex + 1 ) % IIRFilterBase . HISTORYSIZE ; kIndex = ( kIndex + 1 ) % IIRFilterBase . HISTORYSIZE ; } return length ; |
public class OjbMetadataTransferable { /** * Returns whether or not the specified data flavor is supported for
* this object .
* @ param flavor the requested flavor for the data
* @ return boolean indicating whether or not the data flavor is supported */
public boolean isDataFlavorSupported ( DataFlavor flavor ) { } } | return java . util . Arrays . asList ( _flavors ) . contains ( flavor ) ; |
public class WatchDir { /** * Process a single key queued to the watcher
* @ param delayQueue */
void processEvent ( DelayQueue < Delayed > delayQueue ) { } } | // wait for key to be signaled
WatchKey key ; try { key = watcher . poll ( 250 , TimeUnit . MILLISECONDS ) ; } catch ( InterruptedException | ClosedWatchServiceException x ) { return ; } if ( key == null ) { return ; } Path dir = keys . get ( key ) ; if ( dir == null ) { return ; } for ( WatchEvent < ? > event : key . pollEvents ( ) ) { WatchEvent . Kind < ? > kind = event . kind ( ) ; if ( kind == OVERFLOW ) { continue ; } // Context for directory entry event is the file name of entry
@ SuppressWarnings ( "unchecked" ) WatchEvent < Path > ev = ( WatchEvent < Path > ) event ; Path name = ev . context ( ) ; Path child = dir . resolve ( name ) ; if ( ignore . contains ( child ) ) { return ; } if ( ! ignorePattern . stream ( ) . anyMatch ( m -> child . getFileSystem ( ) . getPathMatcher ( m ) . matches ( child . getFileName ( ) ) ) ) { delayQueue . add ( new WatchDirDelay ( ) ) ; } // if directory is created , and watching recursively , then
// register it and its sub - directories
if ( kind == ENTRY_CREATE ) { try { if ( Files . isDirectory ( child , NOFOLLOW_LINKS ) ) { registerAll ( child ) ; } } catch ( IOException x ) { } } } // reset key and remove from set if directory no longer accessible
boolean valid = key . reset ( ) ; if ( ! valid ) { keys . remove ( key ) ; // all directories are inaccessible
if ( keys . isEmpty ( ) ) { return ; } } |
public class SyncGroupsInner { /** * Gets a collection of sync group logs .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; SyncGroupLogPropertiesInner & gt ; object */
public Observable < Page < SyncGroupLogPropertiesInner > > listLogsNextAsync ( final String nextPageLink ) { } } | return listLogsNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < SyncGroupLogPropertiesInner > > , Page < SyncGroupLogPropertiesInner > > ( ) { @ Override public Page < SyncGroupLogPropertiesInner > call ( ServiceResponse < Page < SyncGroupLogPropertiesInner > > response ) { return response . body ( ) ; } } ) ; |
public class ScrollHelper { /** * Will perform a detection whether the element is in the View Port Scope or not , providing
* the { @ link # containerElement } as the wrapper or container of the target element .
* Note : By default if you didn ' t provide a container , the html or body element should be
* applied as the target ' s container .
* @ param element The element you are checking if it ' s inside the viewport scope . */
public boolean isInViewPort ( Element element ) { } } | double elementTop = $ ( element ) . offset ( ) . top ; double elementBottom = elementTop + $ ( element ) . outerHeight ( ) ; JQueryElement target = getContainerElement ( ) ; double viewportTop = target . scrollTop ( ) ; if ( target . asElement ( ) != getDefaultContainer ( ) ) { viewportTop = target . offset ( ) . top ; } double viewportBottom = viewportTop + target . height ( ) ; return elementBottom > viewportTop && elementTop < viewportBottom ; |
public class Graphs { /** * Returns { @ code true } if { @ code graphA } and { @ code graphB } have the same elements ( including
* edge values ) and the same relationships between elements , as exposed via the { @ link ValueGraph }
* interface .
* < p > Thus , two value graphs A and B are equivalent if both are null or < b > all < / b > of the
* following are true :
* < ul >
* < li > A and B have equal { @ link Graph # isDirected ( ) directedness } .
* < li > A and B have equal { @ link Graph # nodes ( ) node sets } .
* < li > A and B have equal { @ link Graph # edges ( ) edge sets } .
* < li > Each edge in A has a { @ link ValueGraph # edgeValue ( Object , Object ) value } equal to the { @ link
* ValueGraph # edgeValue ( Object , Object ) value } of the corresponding edge in B .
* < / ul >
* < p > Graph properties besides { @ link Graph # isDirected ( ) directedness } do < b > not < / b > affect
* equivalence . For example , two graphs may be considered equivalent even if one allows self - loops
* and the other doesn ' t . Additionally , the order in which nodes or edges are added to the graph ,
* and the order in which they are iterated over , are irrelevant . */
public static boolean equivalent ( @ Nullable ValueGraph < ? , ? > graphA , @ Nullable ValueGraph < ? , ? > graphB ) { } } | if ( graphA == graphB ) { return true ; } if ( graphA == null || graphB == null ) { return false ; } if ( graphA . isDirected ( ) != graphB . isDirected ( ) || ! graphA . nodes ( ) . equals ( graphB . nodes ( ) ) || ! graphA . edges ( ) . equals ( graphB . edges ( ) ) ) { return false ; } for ( EndpointPair < ? > edge : graphA . edges ( ) ) { if ( ! graphA . edgeValue ( edge . nodeU ( ) , edge . nodeV ( ) ) . equals ( graphB . edgeValue ( edge . nodeU ( ) , edge . nodeV ( ) ) ) ) { return false ; } } return true ; |
public class SSTableRewriter { /** * cleanup all our temporary readers and swap in our new ones */
private void replaceWithFinishedReaders ( List < SSTableReader > finished ) { } } | if ( isOffline ) { for ( SSTableReader reader : discard ) { if ( reader . getCurrentReplacement ( ) == reader ) reader . markObsolete ( ) ; reader . selfRef ( ) . release ( ) ; } } else { dataTracker . replaceEarlyOpenedFiles ( discard , finished ) ; dataTracker . unmarkCompacting ( discard ) ; } discard . clear ( ) ; |
public class HttpChannelPool { /** * Adds a { @ link Channel } to this pool . */
private void addToPool ( SessionProtocol actualProtocol , PoolKey key , PooledChannel pooledChannel ) { } } | assert eventLoop . inEventLoop ( ) : Thread . currentThread ( ) . getName ( ) ; getOrCreatePool ( actualProtocol , key ) . addLast ( pooledChannel ) ; |
public class AbstractMappableValidator { /** * Disconnects the specified result handler from all rules .
* @ param resultHandler Result handler to be unmapped . */
private void unmapResultHandlerFromAllRules ( final RH resultHandler ) { } } | if ( resultHandler != null ) { for ( final List < RH > mappedResultHandlers : rulesToResultHandlers . values ( ) ) { mappedResultHandlers . remove ( resultHandler ) ; } } |
public class DefaultWhenFileSystem { /** * Returns the path representing the file that the symbolic link specified by { @ code link } points to , asynchronously .
* @ param link the link
* @ return a promise for the path */
@ Override public Promise < String > readSymlink ( String link ) { } } | return adapter . toPromise ( handler -> vertx . fileSystem ( ) . readSymlink ( link , handler ) ) ; |
public class ElementCollectionImpl { /** * If not already created , a new < code > map - key - class < / code > element with the given value will be created .
* Otherwise , the existing < code > map - key - class < / code > element will be returned .
* @ return a new or existing instance of < code > MapKeyClass < ElementCollection < T > > < / code > */
public MapKeyClass < ElementCollection < T > > getOrCreateMapKeyClass ( ) { } } | Node node = childNode . getOrCreate ( "map-key-class" ) ; MapKeyClass < ElementCollection < T > > mapKeyClass = new MapKeyClassImpl < ElementCollection < T > > ( this , "map-key-class" , childNode , node ) ; return mapKeyClass ; |
public class WindowsPreferencesBuilder { /** * Returns this java string as a null - terminated byte array */
private static byte [ ] stringToByteArray ( String str ) { } } | byte [ ] result = new byte [ str . length ( ) + 1 ] ; for ( int i = 0 ; i < str . length ( ) ; i ++ ) { result [ i ] = ( byte ) str . charAt ( i ) ; } result [ str . length ( ) ] = 0 ; return result ; |
public class AttributeHelper { /** * Extract data from DeviceAttribute to a Long
* @ param deviceAttributeRead
* the DeviceAttribute attribute to read
* @ return Long , the result in Long format
* @ throws DevFailed */
public static Long extractToLong ( final DeviceAttribute deviceAttributeRead ) throws DevFailed { } } | final Object value = AttributeHelper . extract ( deviceAttributeRead ) ; Long argout = null ; if ( value instanceof Short ) { argout = Long . valueOf ( ( ( Short ) value ) . longValue ( ) ) ; } else if ( value instanceof String ) { try { argout = Long . valueOf ( ( String ) value ) ; } catch ( final Exception e ) { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "output type " + value + " is not a numerical" , "AttributeHelper.extractToLong(deviceAttributeWritten)" ) ; } } else if ( value instanceof Integer ) { argout = Long . valueOf ( ( ( Integer ) value ) . longValue ( ) ) ; } else if ( value instanceof Long ) { argout = Long . valueOf ( ( ( Long ) value ) . longValue ( ) ) ; } else if ( value instanceof Float ) { argout = Long . valueOf ( ( ( Float ) value ) . longValue ( ) ) ; } else if ( value instanceof Boolean ) { if ( ( ( Boolean ) value ) . booleanValue ( ) ) { argout = Long . valueOf ( 1 ) ; } else { argout = Long . valueOf ( 0 ) ; } } else if ( value instanceof Double ) { argout = Long . valueOf ( ( ( Double ) value ) . longValue ( ) ) ; } else if ( value instanceof DevState ) { argout = Long . valueOf ( Integer . valueOf ( ( ( DevState ) value ) . value ( ) ) . longValue ( ) ) ; } else { Except . throw_exception ( "TANGO_WRONG_DATA_ERROR" , "output type " + value . getClass ( ) + " not supported" , "AttributeHelper.extractToLong(Object value,deviceAttributeWritten)" ) ; } return argout ; |
public class Op { /** * Creates an < i > operation expression < / i > on the specified target object .
* @ param target the target object on which the expression will execute
* @ return an operator , ready for chaining */
public static < T > Level0ListOperator < List < T > , T > on ( final List < T > target ) { } } | return onList ( target ) ; |
public class PageFlowUtils { /** * Create a fully - rewritten URI given a path and parameters .
* < p > Calls the rewriter service using a type of { @ link URLType # ACTION } . < / p >
* @ param servletContext the current ServletContext .
* @ param request the current HttpServletRequest .
* @ param response the current HttpServletResponse .
* @ param path the path to process into a fully - rewritten URI .
* @ param params the additional parameters to include in the URI query .
* @ param fragment the fragment ( anchor or location ) for this URI .
* @ param forXML flag indicating that the query of the uri should be written
* using the & quot ; & amp ; amp ; & quot ; entity , rather than the character , ' & amp ; ' .
* @ return a fully - rewritten URI for the given action .
* @ throws URISyntaxException if there ' s a problem converting the action URI ( derived
* from processing the given action name ) into a MutableURI . */
public static String getRewrittenHrefURI ( ServletContext servletContext , HttpServletRequest request , HttpServletResponse response , String path , Map params , String fragment , boolean forXML ) throws URISyntaxException { } } | return rewriteResourceOrHrefURL ( servletContext , request , response , path , params , fragment , forXML , URLType . ACTION ) ; |
public class HashUtil { /** * 对输入字符串进行sha1散列 , 带salt达到更高的安全性 . */
public static byte [ ] sha1 ( @ NotNull byte [ ] input , @ Nullable byte [ ] salt ) { } } | return digest ( input , get ( SHA_1_DIGEST ) , salt , 1 ) ; |
public class vpnsessionaction { /** * Use this API to fetch vpnsessionaction resource of given name . */
public static vpnsessionaction get ( nitro_service service , String name ) throws Exception { } } | vpnsessionaction obj = new vpnsessionaction ( ) ; obj . set_name ( name ) ; vpnsessionaction response = ( vpnsessionaction ) obj . get_resource ( service ) ; return response ; |
public class ParameterValue { /** * Adds environmental variables for the builds to the given map .
* This provides a means for a parameter to pass the parameter
* values to the build to be performed .
* When this method is invoked , the map already contains the
* current " planned export " list . The implementation is
* expected to add more values to this map ( or do nothing )
* Formerly , environment variables would be by convention all upper case .
* ( This is so that a Windows / Unix heterogeneous environment
* won ' t get inconsistent result depending on which platform to
* execute . ) But now see { @ link EnvVars } why upper casing is a bad idea .
* @ param env
* never null .
* @ param build
* The build for which this parameter is being used . Never null .
* @ deprecated as of 1.344
* Use { @ link # buildEnvironment ( Run , EnvVars ) } instead . */
@ Deprecated public void buildEnvVars ( AbstractBuild < ? , ? > build , Map < String , String > env ) { } } | if ( env instanceof EnvVars ) { if ( Util . isOverridden ( ParameterValue . class , getClass ( ) , "buildEnvironment" , Run . class , EnvVars . class ) ) { // if the subtype already derives buildEnvironment , then delegate to it
buildEnvironment ( build , ( EnvVars ) env ) ; } else if ( Util . isOverridden ( ParameterValue . class , getClass ( ) , "buildEnvVars" , AbstractBuild . class , EnvVars . class ) ) { buildEnvVars ( build , ( EnvVars ) env ) ; } } // otherwise no - op by default |
public class DescriptorList { /** * Creates a new instance of a { @ link Describable }
* from the structured form submission data posted
* by a radio button group .
* @ param parent JSON , which contains the configuration entry for the radio list
* @ param name Name of the configuration entry for the radio list
* @ return new instance or { @ code null } if none was selected in the radio list
* @ throws FormException Data submission error */
@ CheckForNull public T newInstanceFromRadioList ( JSONObject parent , String name ) throws FormException { } } | return newInstanceFromRadioList ( parent . getJSONObject ( name ) ) ; |
public class DescribeAccountModificationsResult { /** * The list of modifications to the configuration of BYOL .
* @ return The list of modifications to the configuration of BYOL . */
public java . util . List < AccountModification > getAccountModifications ( ) { } } | if ( accountModifications == null ) { accountModifications = new com . amazonaws . internal . SdkInternalList < AccountModification > ( ) ; } return accountModifications ; |
public class KeyField { /** * Get the field that this KeyField points to .
* @ param areaDesc KeyArea type File / Temp / Start / End .
* @ return The field . */
public BaseField getField ( int iAreaDesc ) { } } | switch ( iAreaDesc ) { default : case DBConstants . FILE_KEY_AREA : return m_field ; case DBConstants . TEMP_KEY_AREA : if ( m_fieldTempParam == null ) { try { m_fieldTempParam = ( BaseField ) m_field . clone ( ) ; // Buffer areas
m_fieldTempParam . setFieldName ( new String ( m_field . getFieldName ( false , false ) + "Temp" ) ) ; m_fieldTempParam . setNullable ( true ) ; } catch ( CloneNotSupportedException ex ) { m_fieldTempParam = null ; } } return m_fieldTempParam ; case DBConstants . START_SELECT_KEY : if ( m_fieldStartParam == null ) { try { m_fieldStartParam = ( BaseField ) m_field . clone ( ) ; // Buffer areas
m_fieldStartParam . setFieldName ( new String ( m_field . getFieldName ( false , false ) + "Start" ) ) ; m_fieldStartParam . setNullable ( true ) ; } catch ( CloneNotSupportedException ex ) { m_fieldStartParam = null ; } } return m_fieldStartParam ; case DBConstants . END_SELECT_KEY : if ( m_fieldEndParam == null ) { try { m_fieldEndParam = ( BaseField ) m_field . clone ( ) ; m_fieldEndParam . setFieldName ( new String ( m_field . getFieldName ( false , false ) + "End" ) ) ; m_fieldEndParam . setNullable ( true ) ; } catch ( CloneNotSupportedException ex ) { m_fieldEndParam = null ; } } return m_fieldEndParam ; } |
public class ConcurrentGrouper { /** * Merge dictionaries of { @ link Grouper . KeySerde } s of { @ link Grouper } s . The result dictionary contains unique string
* keys .
* @ return merged dictionary if its size does not exceed max dictionary size . Otherwise null . */
@ Nullable private List < String > tryMergeDictionary ( ) { } } | final Set < String > mergedDictionary = new HashSet < > ( ) ; long totalDictionarySize = 0L ; for ( SpillingGrouper < KeyType > grouper : groupers ) { final List < String > dictionary = grouper . mergeAndGetDictionary ( ) ; for ( String key : dictionary ) { if ( mergedDictionary . add ( key ) ) { totalDictionarySize += RowBasedGrouperHelper . estimateStringKeySize ( key ) ; if ( totalDictionarySize > maxDictionarySizeForCombiner ) { return null ; } } } } return ImmutableList . copyOf ( mergedDictionary ) ; |
public class RemediationParameterValueMarshaller { /** * Marshall the given parameter object . */
public void marshall ( RemediationParameterValue remediationParameterValue , ProtocolMarshaller protocolMarshaller ) { } } | if ( remediationParameterValue == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( remediationParameterValue . getResourceValue ( ) , RESOURCEVALUE_BINDING ) ; protocolMarshaller . marshall ( remediationParameterValue . getStaticValue ( ) , STATICVALUE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class MethodInfoCache { /** * Put a method into the MethodCache .
* @ param beanName the name of the bean
* @ param clazz the class of the bean
* @ param method the method
* @ param context the Spring application context */
public void put ( String beanName , Class < ? > clazz , Method method , ApplicationContext context ) { } } | MethodInfo info = new MethodInfo ( clazz , context , beanName , method ) ; this . cache . put ( new Key ( beanName , method . getName ( ) ) , info ) ; |
public class MtasSolrComponentPrefix { /** * ( non - Javadoc )
* @ see
* mtas . solr . handler . component . util . MtasSolrComponent # create ( mtas . codec . util .
* CodecComponent . BasicComponent , java . lang . Boolean ) */
public SimpleOrderedMap < Object > create ( ComponentPrefix prefix , Boolean encode ) throws IOException { } } | SimpleOrderedMap < Object > mtasPrefixResponse = new SimpleOrderedMap < > ( ) ; mtasPrefixResponse . add ( "key" , prefix . key ) ; if ( encode ) { mtasPrefixResponse . add ( "_encoded_singlePosition" , MtasSolrResultUtil . encode ( prefix . singlePositionList ) ) ; mtasPrefixResponse . add ( "_encoded_multiplePosition" , MtasSolrResultUtil . encode ( prefix . multiplePositionList ) ) ; mtasPrefixResponse . add ( "_encoded_setPosition" , MtasSolrResultUtil . encode ( prefix . setPositionList ) ) ; mtasPrefixResponse . add ( "_encoded_intersecting" , MtasSolrResultUtil . encode ( prefix . intersectingList ) ) ; } else { mtasPrefixResponse . add ( "singlePosition" , prefix . singlePositionList ) ; mtasPrefixResponse . add ( "multiplePosition" , prefix . multiplePositionList ) ; mtasPrefixResponse . add ( "setPosition" , prefix . setPositionList ) ; mtasPrefixResponse . add ( "intersecting" , prefix . intersectingList ) ; } return mtasPrefixResponse ; |
public class DateUtils { /** * Returns a String representation of a { @ code date } object
* @ param date
* as Date
* @ return String representation of a { @ code date } object . The String is in the form { @ code 2010-02-28T16:11:08} */
public static String toLongFormatFromDate ( Date date ) { } } | if ( date == null ) { return null ; } return strategy . formatFor ( FACEBOOK_LONG_DATE_FORMAT_WITHOUT_TIMEZONE ) . format ( date ) ; |
public class DataObjectFontDescriptorImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setEncID ( Integer newEncID ) { } } | Integer oldEncID = encID ; encID = newEncID ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . DATA_OBJECT_FONT_DESCRIPTOR__ENC_ID , oldEncID , encID ) ) ; |
public class BaseCalendar { /** * Returns the end of the given day < code > { @ link Calendar } < / code > . This
* calculation will take the < code > BaseCalendar < / code > time zone into account
* if it is not < code > null < / code > .
* @ param timeInMillis
* a time containing the desired date for the end - of - day time .
* @ return A < code > { @ link Calendar } < / code > set to the end of the given day . */
protected Calendar getEndOfDayJavaCalendar ( final long timeInMillis ) { } } | final Calendar endOfDay = createJavaCalendar ( timeInMillis ) ; endOfDay . set ( Calendar . HOUR_OF_DAY , 23 ) ; endOfDay . set ( Calendar . MINUTE , 59 ) ; endOfDay . set ( Calendar . SECOND , 59 ) ; endOfDay . set ( Calendar . MILLISECOND , 999 ) ; return endOfDay ; |
public class LocaleUtility { /** * A helper function to convert a string of the form
* aa _ BB _ CC to a locale object . Why isn ' t this in Locale ? */
public static Locale getLocaleFromName ( String name ) { } } | String language = "" ; String country = "" ; String variant = "" ; int i1 = name . indexOf ( '_' ) ; if ( i1 < 0 ) { language = name ; } else { language = name . substring ( 0 , i1 ) ; ++ i1 ; int i2 = name . indexOf ( '_' , i1 ) ; if ( i2 < 0 ) { country = name . substring ( i1 ) ; } else { country = name . substring ( i1 , i2 ) ; variant = name . substring ( i2 + 1 ) ; } } return new Locale ( language , country , variant ) ; |
public class CharsetEncoder { /** * Changes this encoder ' s action for unmappable - character errors .
* < p > This method invokes the { @ link # implOnUnmappableCharacter
* implOnUnmappableCharacter } method , passing the new action . < / p >
* @ param newAction The new action ; must not be < tt > null < / tt >
* @ return This encoder
* @ throws IllegalArgumentException
* If the precondition on the parameter does not hold */
public final CharsetEncoder onUnmappableCharacter ( CodingErrorAction newAction ) { } } | if ( newAction == null ) throw new IllegalArgumentException ( "Null action" ) ; unmappableCharacterAction = newAction ; implOnUnmappableCharacter ( newAction ) ; return this ; |
public class CmsAppWorkplaceUi { /** * Navigates to the current URI fragment . < p > */
private void navigateToFragment ( ) { } } | String fragment = getPage ( ) . getUriFragment ( ) ; if ( fragment != null ) { getNavigator ( ) . navigateTo ( fragment ) ; } else { CmsObject cms = getCmsObject ( ) ; String target = CmsLoginHelper . getStartView ( cms ) ; if ( target != null ) { if ( target . startsWith ( "#" ) ) { getNavigator ( ) . navigateTo ( target . substring ( 1 ) ) ; } else { Page . getCurrent ( ) . setLocation ( OpenCms . getLinkManager ( ) . substituteLink ( cms , target ) ) ; } } else { showHome ( ) ; } } |
public class FilePath { /** * Creates a zip file from this directory by using the specified filter ,
* and sends the result to the given output stream .
* @ param filter
* Must be serializable since it may be executed remotely . Can be null to add all files .
* @ since 1.315 */
public void zip ( OutputStream os , FileFilter filter ) throws IOException , InterruptedException { } } | archive ( ArchiverFactory . ZIP , os , filter ) ; |
public class HQMessaging { /** * HornetQ 2.3 has single - arity destroy functions , 2.4 has double - arity */
private void invokeDestroy ( String method , String name ) { } } | Class clazz = this . jmsServerManager ( ) . getClass ( ) ; Method destroy = null ; for ( Method each : clazz . getMethods ( ) ) { if ( method . equals ( each . getName ( ) ) ) { destroy = each ; break ; } } if ( destroy == null ) { throw new IllegalStateException ( String . format ( "Class %s has no %s method" , clazz , method ) ) ; } try { if ( destroy . getParameterTypes ( ) . length == 1 ) { destroy . invoke ( this . jmsServerManager ( ) , name ) ; } else { destroy . invoke ( this . jmsServerManager ( ) , name , true ) ; } } catch ( IllegalAccessException | InvocationTargetException e ) { throw new RuntimeException ( "Failed to destroy destination " + name , e ) ; } |
public class AbstractMatcher { /** * Obtain all the matching resources that have a MatchTyoe with the URIs of { @ code origin } of the type provided ( inclusive ) or less .
* @ param origins URIs to match
* @ param maxType the maximum MatchType we want to obtain
* @ return a { @ link com . google . common . collect . Table } with the result of the matching indexed by origin URI and then destination URI . */
@ Override public Table < URI , URI , MatchResult > listMatchesAtMostOfType ( Set < URI > origins , MatchType maxType ) { } } | return listMatchesWithinRange ( origins , this . matchTypesSupported . getLowest ( ) , maxType ) ; |
public class Ec2MachineConfigurator { /** * Associates an elastic IP with the VM .
* @ return true if there is nothing more to do about elastic IP configuration , false otherwise */
private boolean associateElasticIp ( ) { } } | String elasticIp = this . targetProperties . get ( Ec2Constants . ELASTIC_IP ) ; if ( ! Utils . isEmptyOrWhitespaces ( elasticIp ) ) { this . logger . fine ( "Associating an elastic IP with the instance. IP = " + elasticIp ) ; AssociateAddressRequest associateAddressRequest = new AssociateAddressRequest ( this . machineId , elasticIp ) ; this . ec2Api . associateAddress ( associateAddressRequest ) ; } return true ; |
public class TagletWriterImpl { /** * { @ inheritDoc } */
public Content commentTagsToOutput ( DocTree holderTag , Element holder , List < ? extends DocTree > tags , boolean isFirstSentence ) { } } | return htmlWriter . commentTagsToContent ( holderTag , holder , tags , isFirstSentence ) ; |
public class CliParser { /** * Adds the advanced command line options to the given options collection .
* These are split out for purposes of being able to display two different
* help messages .
* @ param options a collection of command line arguments */
@ SuppressWarnings ( "static-access" ) private void addAdvancedOptions ( final Options options ) { } } | final Option cveBase = Option . builder ( ) . argName ( "url" ) . hasArg ( ) . longOpt ( ARGUMENT . CVE_BASE_URL ) . desc ( "Base URL for each year’s CVE files (json.gz), the %d will be replaced with the year. " ) . build ( ) ; final Option cveModified = Option . builder ( ) . argName ( "url" ) . hasArg ( ) . longOpt ( ARGUMENT . CVE_MODIFIED_URL ) . desc ( "URL for the modified CVE (json.gz)." ) . build ( ) ; final Option updateOnly = Option . builder ( ) . longOpt ( ARGUMENT . UPDATE_ONLY ) . desc ( "Only update the local NVD data cache; no scan will be executed." ) . build ( ) ; final Option data = Option . builder ( ARGUMENT . DATA_DIRECTORY_SHORT ) . argName ( "path" ) . hasArg ( ) . longOpt ( ARGUMENT . DATA_DIRECTORY ) . desc ( "The location of the H2 Database file. This option should generally not be set." ) . build ( ) ; final Option nexusUrl = Option . builder ( ) . argName ( "url" ) . hasArg ( ) . longOpt ( ARGUMENT . NEXUS_URL ) . desc ( "The url to the Nexus Server's REST API Endpoint (http://domain/nexus/service/local). " + "If not set the Nexus Analyzer will be disabled." ) . build ( ) ; final Option nexusUsername = Option . builder ( ) . argName ( "username" ) . hasArg ( ) . longOpt ( ARGUMENT . NEXUS_USERNAME ) . desc ( "The username to authenticate to the Nexus Server's REST API Endpoint. " + "If not set the Nexus Analyzer will use an unauthenticated connection." ) . build ( ) ; final Option nexusPassword = Option . builder ( ) . argName ( "password" ) . hasArg ( ) . longOpt ( ARGUMENT . NEXUS_PASSWORD ) . desc ( "The password to authenticate to the Nexus Server's REST API Endpoint. " + "If not set the Nexus Analyzer will use an unauthenticated connection." ) . build ( ) ; final Option nexusUsesProxy = Option . builder ( ) . argName ( "true/false" ) . hasArg ( ) . longOpt ( ARGUMENT . NEXUS_USES_PROXY ) . desc ( "Whether or not the configured proxy should be used when connecting to Nexus." ) . build ( ) ; final Option additionalZipExtensions = Option . builder ( ) . argName ( "extensions" ) . hasArg ( ) . longOpt ( ARGUMENT . ADDITIONAL_ZIP_EXTENSIONS ) . desc ( "A comma separated list of additional extensions to be scanned as ZIP files " + "(ZIP, EAR, WAR are already treated as zip files)" ) . build ( ) ; final Option pathToCore = Option . builder ( ) . argName ( "path" ) . hasArg ( ) . longOpt ( ARGUMENT . PATH_TO_CORE ) . desc ( "The path to dotnet core." ) . build ( ) ; final Option pathToBundleAudit = Option . builder ( ) . argName ( "path" ) . hasArg ( ) . longOpt ( ARGUMENT . PATH_TO_BUNDLE_AUDIT ) . desc ( "The path to bundle-audit for Gem bundle analysis." ) . build ( ) ; final Option connectionTimeout = Option . builder ( ARGUMENT . CONNECTION_TIMEOUT_SHORT ) . argName ( "timeout" ) . hasArg ( ) . longOpt ( ARGUMENT . CONNECTION_TIMEOUT ) . desc ( "The connection timeout (in milliseconds) to use when downloading resources." ) . build ( ) ; final Option proxyServer = Option . builder ( ) . argName ( "server" ) . hasArg ( ) . longOpt ( ARGUMENT . PROXY_SERVER ) . desc ( "The proxy server to use when downloading resources." ) . build ( ) ; final Option proxyPort = Option . builder ( ) . argName ( "port" ) . hasArg ( ) . longOpt ( ARGUMENT . PROXY_PORT ) . desc ( "The proxy port to use when downloading resources." ) . build ( ) ; final Option proxyUsername = Option . builder ( ) . argName ( "user" ) . hasArg ( ) . longOpt ( ARGUMENT . PROXY_USERNAME ) . desc ( "The proxy username to use when downloading resources." ) . build ( ) ; final Option proxyPassword = Option . builder ( ) . argName ( "pass" ) . hasArg ( ) . longOpt ( ARGUMENT . PROXY_PASSWORD ) . desc ( "The proxy password to use when downloading resources." ) . build ( ) ; final Option connectionString = Option . builder ( ) . argName ( "connStr" ) . hasArg ( ) . longOpt ( ARGUMENT . CONNECTION_STRING ) . desc ( "The connection string to the database." ) . build ( ) ; final Option dbUser = Option . builder ( ) . argName ( "user" ) . hasArg ( ) . longOpt ( ARGUMENT . DB_NAME ) . desc ( "The username used to connect to the database." ) . build ( ) ; final Option dbPassword = Option . builder ( ) . argName ( "password" ) . hasArg ( ) . longOpt ( ARGUMENT . DB_PASSWORD ) . desc ( "The password for connecting to the database." ) . build ( ) ; final Option dbDriver = Option . builder ( ) . argName ( "driver" ) . hasArg ( ) . longOpt ( ARGUMENT . DB_DRIVER ) . desc ( "The database driver name." ) . build ( ) ; final Option dbDriverPath = Option . builder ( ) . argName ( "path" ) . hasArg ( ) . longOpt ( ARGUMENT . DB_DRIVER_PATH ) . desc ( "The path to the database driver; note, this does not need to be set unless the JAR is outside of the classpath." ) . build ( ) ; final Option disableJarAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_JAR ) . desc ( "Disable the Jar Analyzer." ) . build ( ) ; final Option disableArchiveAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_ARCHIVE ) . desc ( "Disable the Archive Analyzer." ) . build ( ) ; final Option disableNuspecAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_NUSPEC ) . desc ( "Disable the Nuspec Analyzer." ) . build ( ) ; final Option disableNugetconfAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_NUGETCONF ) . desc ( "Disable the Nuget packages.config Analyzer." ) . build ( ) ; final Option disableAssemblyAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_ASSEMBLY ) . desc ( "Disable the .NET Assembly Analyzer." ) . build ( ) ; final Option disablePythonDistributionAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_PY_DIST ) . desc ( "Disable the Python Distribution Analyzer." ) . build ( ) ; final Option disablePythonPackageAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_PY_PKG ) . desc ( "Disable the Python Package Analyzer." ) . build ( ) ; final Option disableComposerAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_COMPOSER ) . desc ( "Disable the PHP Composer Analyzer." ) . build ( ) ; final Option disableAutoconfAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_AUTOCONF ) . desc ( "Disable the Autoconf Analyzer." ) . build ( ) ; final Option disableOpenSSLAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_OPENSSL ) . desc ( "Disable the OpenSSL Analyzer." ) . build ( ) ; final Option disableCmakeAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_CMAKE ) . desc ( "Disable the Cmake Analyzer." ) . build ( ) ; final Option cocoapodsAnalyzerEnabled = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_COCOAPODS ) . desc ( "Disable the CocoaPods Analyzer." ) . build ( ) ; final Option swiftPackageManagerAnalyzerEnabled = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_SWIFT ) . desc ( "Disable the swift package Analyzer." ) . build ( ) ; final Option disableCentralAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_CENTRAL ) . desc ( "Disable the Central Analyzer. If this analyzer is disabled it is likely you also want to disable " + "the Nexus Analyzer." ) . build ( ) ; final Option disableNexusAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_NEXUS ) . desc ( "Disable the Nexus Analyzer." ) . build ( ) ; final Option disableOssIndexAnalyzer = Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_OSSINDEX ) . desc ( "Disable the Sonatype OSS Index Analyzer." ) . build ( ) ; final Option purge = Option . builder ( ) . longOpt ( ARGUMENT . PURGE_NVD ) . desc ( "Purges the local NVD data cache" ) . build ( ) ; final Option retireJsFilters = Option . builder ( ) . argName ( "pattern" ) . hasArg ( ) . longOpt ( ARGUMENT . RETIREJS_FILTERS ) . desc ( "Specify Retire JS content filter used to exclude files from analysis based on their content; most commonly used " + "to exclude based on your applications own copyright line. This option can be specified multiple times." ) . build ( ) ; options . addOption ( updateOnly ) . addOption ( cveBase ) . addOption ( cveModified ) . addOption ( proxyPort ) . addOption ( proxyServer ) . addOption ( proxyUsername ) . addOption ( proxyPassword ) . addOption ( connectionTimeout ) . addOption ( connectionString ) . addOption ( dbUser ) . addOption ( data ) . addOption ( dbPassword ) . addOption ( dbDriver ) . addOption ( dbDriverPath ) . addOption ( disableJarAnalyzer ) . addOption ( disableArchiveAnalyzer ) . addOption ( disableAssemblyAnalyzer ) . addOption ( pathToBundleAudit ) . addOption ( disablePythonDistributionAnalyzer ) . addOption ( disableCmakeAnalyzer ) . addOption ( disablePythonPackageAnalyzer ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_RUBYGEMS ) . desc ( "Disable the Ruby Gemspec Analyzer." ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_BUNDLE_AUDIT ) . desc ( "Disable the Ruby Bundler-Audit Analyzer." ) . build ( ) ) . addOption ( disableAutoconfAnalyzer ) . addOption ( disableComposerAnalyzer ) . addOption ( disableOpenSSLAnalyzer ) . addOption ( disableNuspecAnalyzer ) . addOption ( disableNugetconfAnalyzer ) . addOption ( disableCentralAnalyzer ) . addOption ( disableNexusAnalyzer ) . addOption ( disableOssIndexAnalyzer ) . addOption ( cocoapodsAnalyzerEnabled ) . addOption ( swiftPackageManagerAnalyzerEnabled ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_NODE_JS ) . desc ( "Disable the Node.js Package Analyzer." ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_NODE_AUDIT ) . desc ( "Disable the Node Audit Analyzer." ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . DISABLE_RETIRE_JS ) . desc ( "Disable the RetireJS Analyzer." ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . RETIREJS_URL ) . desc ( "The Retire JS Respository URL" ) . argName ( "url" ) . hasArg ( true ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . RETIREJS_FILTER_NON_VULNERABLE ) . desc ( "Specifies that the Retire JS Analyzer should filter out non-vulnerable JS files from the report." ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . ARTIFACTORY_ENABLED ) . desc ( "Whether the Artifactory Analyzer should be enabled." ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . ARTIFACTORY_PARALLEL_ANALYSIS ) . desc ( "Whether the Artifactory Analyzer should use parallel analysis." ) . argName ( "true/false" ) . hasArg ( true ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . ARTIFACTORY_USES_PROXY ) . desc ( "Whether the Artifactory Analyzer should use the proxy." ) . argName ( "true/false" ) . hasArg ( true ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . ARTIFACTORY_USERNAME ) . desc ( "The Artifactory username for authentication." ) . argName ( "username" ) . hasArg ( true ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . ARTIFACTORY_API_TOKEN ) . desc ( "The Artifactory API token." ) . argName ( "token" ) . hasArg ( true ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . ARTIFACTORY_BEARER_TOKEN ) . desc ( "The Artifactory bearer token." ) . argName ( "token" ) . hasArg ( true ) . build ( ) ) . addOption ( Option . builder ( ) . longOpt ( ARGUMENT . ARTIFACTORY_URL ) . desc ( "The Artifactory URL." ) . argName ( "url" ) . hasArg ( true ) . build ( ) ) . addOption ( retireJsFilters ) . addOption ( nexusUrl ) . addOption ( nexusUsername ) . addOption ( nexusPassword ) . addOption ( nexusUsesProxy ) . addOption ( additionalZipExtensions ) . addOption ( pathToCore ) . addOption ( pathToBundleAudit ) . addOption ( purge ) ; |
public class StructuredQueryBuilder { /** * Matches a path specifying a geospatial region , which is indexed via
* geospatial region index , that has the relationship given by the operator
* with at least one of the criteria regions .
* @ param index the container for the geospatial regions
* @ param operator the geospatial operator to be applied with the regions in the
* index and the specified regions
* @ param regions the possible regions containing the region
* @ return the StructuredQueryDefinition for the geospatial query */
public StructuredQueryDefinition geospatial ( GeospatialRegionIndex index , GeospatialOperator operator , Region ... regions ) { } } | checkRegions ( regions ) ; return new GeospatialRegionQuery ( ( GeoRegionPathImpl ) index , operator , null , regions , null ) ; |
public class SessionDataManager { /** * { @ inheritDoc } */
public List < PropertyData > listChildPropertiesData ( NodeData parent ) throws RepositoryException { } } | long start = 0 ; if ( LOG . isDebugEnabled ( ) ) { start = System . currentTimeMillis ( ) ; LOG . debug ( "listChildPropertiesData(" + parent . getQPath ( ) . getAsString ( ) + ") >>>>>" ) ; } try { return ( List < PropertyData > ) mergeProps ( parent , true , transactionableManager ) ; } finally { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "listChildPropertiesData(" + parent . getQPath ( ) . getAsString ( ) + ") <<<<< " + ( ( System . currentTimeMillis ( ) - start ) / 1000d ) + "sec" ) ; } } |
public class DesignClockSkin { /** * * * * * * Graphics * * * * * */
@ Override public void updateTime ( final ZonedDateTime TIME ) { } } | double rotationAngle = 0.5 * ( 60 * TIME . getHour ( ) + TIME . getMinute ( ) + TIME . getSecond ( ) * 0.0166666667 + TIME . get ( ChronoField . MILLI_OF_SECOND ) * 0.0000166667 ) ; double rotationX = size * 0.5 + rotationRadius * Math . sin ( Math . toRadians ( rotationAngle + 180 ) ) ; double rotationY = size * 0.5 - rotationRadius * Math . cos ( Math . toRadians ( rotationAngle + 180 ) ) ; tickCanvas . relocate ( rotationX - tickCanvas . getHeight ( ) * 0.5 , rotationY - tickCanvas . getHeight ( ) * 0.5 ) ; needle . setRotate ( rotationAngle ) ; double canvasCenterX = tickCanvas . getWidth ( ) * 0.5 ; double canvasCenterY = tickCanvas . getHeight ( ) * 0.5 ; double radius = tickCanvas . getHeight ( ) * 0.372 ; double rotX = canvasCenterX + radius * Math . sin ( Math . toRadians ( rotationAngle ) ) ; double rotY = canvasCenterY - radius * Math . cos ( Math . toRadians ( rotationAngle ) ) ; clip . setCenterX ( rotX ) ; clip . setCenterY ( rotY ) ; |
public class SipParser { /** * Helper method that counts the number of bytes that are considered part of
* the next token in the { @ link Buffer } .
* @ param buffer
* @ return a count of the number of bytes the next token contains or zero if
* no token is to be found within the buffer .
* @ throws IOException
* @ throws IndexOutOfBoundsException */
public static int getTokenCount ( final Buffer buffer ) throws IndexOutOfBoundsException , IOException { } } | boolean done = false ; int count = 0 ; buffer . markReaderIndex ( ) ; while ( buffer . hasReadableBytes ( ) && ! done ) { final byte b = buffer . readByte ( ) ; final boolean ok = isAlphaNum ( b ) || b == DASH || b == PERIOD || b == EXCLAMATIONPOINT || b == PERCENT || b == STAR || b == UNDERSCORE || b == PLUS || b == BACKTICK || b == TICK || b == TILDE ; if ( ok ) { ++ count ; } else { done = true ; } } buffer . resetReaderIndex ( ) ; return count ; |
public class UTCDateBox { /** * Converts a gwt Date in the timezone of the current browser to a time in
* UTC .
* @ return A Long corresponding to the number of milliseconds since January
* 1 , 1970 , 00:00:00 GMT or null if the specified Date is null . */
public static final Long date2utc ( Date date ) { } } | // use null for a null date
if ( date == null ) return null ; long time = date . getTime ( ) ; // remove the timezone offset
time -= timezoneOffsetMillis ( date ) ; return time ; |
public class CUjitInputType { /** * Returns the String identifying the given CUjitInputType
* @ param n The CUjitInputType
* @ return The String identifying the given CUjitInputType */
public static String stringFor ( int n ) { } } | switch ( n ) { case CU_JIT_INPUT_CUBIN : return "CU_JIT_INPUT_CUBIN" ; case CU_JIT_INPUT_PTX : return "CU_JIT_INPUT_PTX" ; case CU_JIT_INPUT_FATBINARY : return "CU_JIT_INPUT_FATBINARY" ; case CU_JIT_INPUT_OBJECT : return "CU_JIT_INPUT_OBJECT" ; case CU_JIT_INPUT_LIBRARY : return "CU_JIT_INPUT_LIBRARY" ; } return "INVALID CUjitInputType: " + n ; |
public class EncodingUtilImpl { /** * This method gets the singleton instance of this { @ link EncodingUtilImpl } . < br >
* < b > ATTENTION : < / b > < br >
* Please prefer dependency - injection instead of using this method .
* @ return the singleton instance . */
public static EncodingUtil getInstance ( ) { } } | if ( instance == null ) { synchronized ( EncodingUtilImpl . class ) { if ( instance == null ) { EncodingUtilImpl util = new EncodingUtilImpl ( ) ; util . initialize ( ) ; instance = util ; } } } return instance ; |
public class AbstractConsumerManager { /** * Get the BaseDestinationHandler object to which this CD belongs
* @ return the BaseDestinationHandler object to which this CD belongs */
public BaseDestinationHandler getDestination ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getDestination" ) ; SibTr . exit ( tc , "getDestination" , _baseDestHandler ) ; } return _baseDestHandler ; |
public class CoronaJobTrackerRunner { /** * Delete all of the temporary map output files . */
@ Override public void close ( ) throws IOException { } } | LOG . info ( getTask ( ) + " done; removing files." ) ; mapOutputFile . removeAll ( getTask ( ) . getTaskID ( ) ) ; |
public class BsfUtils { /** * Selects the Date Pattern to use based on the given Locale if the input
* format is null
* @ param locale
* Locale ( may be the result of a call to selectLocale )
* @ param format
* Input format String
* @ return Date Pattern eg . dd / MM / yyyy */
public static String selectDateFormat ( Locale locale , String format ) { } } | String selFormat ; if ( format == null ) { selFormat = ( ( SimpleDateFormat ) DateFormat . getDateInstance ( DateFormat . SHORT , locale ) ) . toPattern ( ) ; // Since DateFormat . SHORT is silly , return a smart format
if ( selFormat . equals ( "M/d/yy" ) ) { return "MM/dd/yyyy" ; } if ( selFormat . equals ( "d/M/yy" ) ) { return "dd/MM/yyyy" ; } } else { selFormat = format ; } return selFormat ; |
public class RuleTreeRewriter { /** * Create a new rule based tree rewriter with the given parameters .
* @ see TreeRewriteRule # compile ( String )
* @ param rule the rewriter rule applied for this rewriter
* @ throws IllegalArgumentException if the rewrite { @ code rule } is invalid
* @ throws NullPointerException if the given { @ code rule } is { @ code null } */
public static RuleTreeRewriter < String > compile ( final String rule ) { } } | return new RuleTreeRewriter < > ( TreeRewriteRule . compile ( rule ) , Objects :: equals , Function . identity ( ) ) ; |
public class HttpConnectorUtil { /** * Extract transport properties from transport configurations .
* @ param transportsConfiguration transportsConfiguration { @ link TransportsConfiguration } which transport
* properties should be extracted .
* @ return Map of transport properties . */
public static Map < String , Object > getTransportProperties ( TransportsConfiguration transportsConfiguration ) { } } | Map < String , Object > transportProperties = new HashMap < > ( ) ; Set < TransportProperty > transportPropertiesSet = transportsConfiguration . getTransportProperties ( ) ; if ( transportPropertiesSet != null && ! transportPropertiesSet . isEmpty ( ) ) { transportProperties = transportPropertiesSet . stream ( ) . collect ( Collectors . toMap ( TransportProperty :: getName , TransportProperty :: getValue ) ) ; } return transportProperties ; |
public class StorageAccountsInner { /** * Failover request can be triggered for a storage account in case of availability issues . The failover occurs from the storage account ' s primary cluster to secondary cluster for RA - GRS accounts . The secondary cluster will become primary after failover .
* @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive .
* @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < Void > failoverAsync ( String resourceGroupName , String accountName , final ServiceCallback < Void > serviceCallback ) { } } | return ServiceFuture . fromResponse ( failoverWithServiceResponseAsync ( resourceGroupName , accountName ) , serviceCallback ) ; |
public class Value { /** * expects to be a json object named ' name ' ,
* so does not include name */
public JSONObject getJson ( ) throws JSONException { } } | JSONObject json = create ( ) ; if ( value != null ) json . put ( "value" , value ) ; if ( label != null ) json . put ( "label" , label ) ; if ( type != null ) json . put ( "type" , type ) ; if ( display != null ) json . put ( "display" , display . toString ( ) ) ; if ( sequence != 0 ) json . put ( "sequence" , sequence ) ; if ( indexKey != null ) json . put ( "indexKey" , indexKey ) ; return json ; |
public class CssAtRule { /** * { @ inheritDoc } */
@ Override public void appendTo ( CssFormatter formatter ) { } } | if ( css . startsWith ( "@charset" ) ) { if ( formatter . isCharsetDirective ( ) ) { return ; // write charset only once
} formatter . setCharsetDirective ( ) ; formatter = formatter . getHeader ( ) ; } else if ( css . startsWith ( "@import" ) ) { formatter = formatter . getHeader ( ) ; } formatter . getOutput ( ) ; SelectorUtils . appendToWithPlaceHolder ( formatter , css , 1 , false , this ) ; formatter . newline ( ) ; |
public class DescribeParserFactory { /** * Create describe parser instance .
* @ param dbType database type
* @ param shardingRule databases and tables sharding rule
* @ param lexerEngine lexical analysis engine .
* @ return describe parser instance */
public static AbstractDescribeParser newInstance ( final DatabaseType dbType , final ShardingRule shardingRule , final LexerEngine lexerEngine ) { } } | switch ( dbType ) { case H2 : case MySQL : return new MySQLDescribeParser ( shardingRule , lexerEngine ) ; default : throw new UnsupportedOperationException ( String . format ( "Cannot support database [%s]." , dbType ) ) ; } |
public class ServerControllerImpl { /** * Here ' s where Undertow is being rebuild at { @ link Undertow } level ( not { @ link ServletContainer } level ) .
* This is were < em > global < / em > objects are configured ( listeners , global filters , . . . ) */
private void doConfigure ( ) { } } | Undertow . Builder builder = Undertow . builder ( ) ; // if no configuration method change root handler , simple path - > HttpHandler will be used
// where each HttpHandler is created in separate org . ops4j . pax . web . service . undertow . internal . Context
HttpHandler rootHandler = path ; URL undertowResource = detectUndertowConfiguration ( ) ; ConfigSource source = ConfigSource . kind ( undertowResource ) ; switch ( source ) { case XML : LOG . info ( "Using \"" + undertowResource + "\" to configure Undertow" ) ; rootHandler = configureUndertow ( configuration , builder , rootHandler , undertowResource ) ; break ; case PROPERTIES : LOG . info ( "Using \"" + undertowResource + "\" to read additional configuration for Undertow" ) ; configureIdentityManager ( undertowResource ) ; // do not break - go to standard PID configuration
case PID : LOG . info ( "Using \"org.ops4j.pax.url.web\" PID to configure Undertow" ) ; rootHandler = configureUndertow ( configuration , builder , rootHandler ) ; break ; } for ( Context context : contextMap . values ( ) ) { try { context . setSessionPersistenceManager ( sessionPersistenceManager ) ; context . setDefaultSessionTimeoutInMinutes ( defaultSessionTimeoutInMinutes ) ; context . start ( ) ; } catch ( Exception e ) { LOG . error ( "Could not start the servlet context for context path [" + context . getContextModel ( ) . getContextName ( ) + "]" , e ) ; } } builder . setHandler ( rootHandler ) ; server = builder . build ( ) ; |
public class RouteDispatcher { /** * Removes a Flash instance from the session , binds it to the RouteContext ,
* and creates a new Flash instance .
* @ param routeContext */
private void processFlash ( RouteContext routeContext ) { } } | Flash flash = null ; if ( routeContext . hasSession ( ) ) { // get flash from session
flash = routeContext . removeSession ( "flash" ) ; // put an empty flash ( outgoing flash ) in session ; defense against session . get ( " flash " )
routeContext . setSession ( "flash" , new Flash ( ) ) ; } if ( flash == null ) { flash = new Flash ( ) ; } // make current flash available to templates
routeContext . setLocal ( "flash" , flash ) ; |
public class IPv4Address { /** * Produces an array of blocks that are sequential that cover the same set of addresses as this .
* This array can be shorter than that produced by { @ link # spanWithPrefixBlocks ( ) } and is never longer .
* Unlike { @ link # spanWithSequentialBlocks ( IPAddress ) } this method only includes addresses that are a part of this subnet . */
@ Override public IPv4Address [ ] spanWithSequentialBlocks ( ) throws AddressConversionException { } } | if ( isSequential ( ) ) { return new IPv4Address [ ] { withoutPrefixLength ( ) } ; } @ SuppressWarnings ( "unchecked" ) ArrayList < IPv4Address > list = ( ArrayList < IPv4Address > ) spanWithBlocks ( false ) ; return list . toArray ( new IPv4Address [ list . size ( ) ] ) ; |
public class HtmlDocletWriter { /** * this is a hack to delay dealing with Annotations in the writers , the assumption
* is that all necessary checks have been made to get here . */
public void addReceiverAnnotationInfo ( ExecutableElement method , TypeMirror rcvrTypeMirror , List < ? extends AnnotationMirror > annotationMirrors , Content htmltree ) { } } | TypeMirror rcvrType = method . getReceiverType ( ) ; List < ? extends AnnotationMirror > annotationMirrors1 = rcvrType . getAnnotationMirrors ( ) ; addAnnotationInfo ( 0 , method , annotationMirrors1 , false , htmltree ) ; |
public class JniSocketImpl { /** * Returns a stream impl for the socket encapsulating the
* input and output stream . */
@ Override public StreamImpl stream ( ) throws IOException { } } | if ( _stream == null ) { _stream = new JniStream ( this ) ; } _stream . init ( ) ; return _stream ; |
public class UpdateLicenseSpecificationsForResourceRequest { /** * License configuration ARNs to be removed from a resource .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRemoveLicenseSpecifications ( java . util . Collection ) } or
* { @ link # withRemoveLicenseSpecifications ( java . util . Collection ) } if you want to override the existing values .
* @ param removeLicenseSpecifications
* License configuration ARNs to be removed from a resource .
* @ return Returns a reference to this object so that method calls can be chained together . */
public UpdateLicenseSpecificationsForResourceRequest withRemoveLicenseSpecifications ( LicenseSpecification ... removeLicenseSpecifications ) { } } | if ( this . removeLicenseSpecifications == null ) { setRemoveLicenseSpecifications ( new java . util . ArrayList < LicenseSpecification > ( removeLicenseSpecifications . length ) ) ; } for ( LicenseSpecification ele : removeLicenseSpecifications ) { this . removeLicenseSpecifications . add ( ele ) ; } return this ; |
public class DisplayOptions { @ Nonnull public static DisplayOptions _default ( @ Nonnull DisplayNoOpts _default ) { } } | DisplayOptions self = new DisplayOptions ( ) ; self . type = DisplayType . _default ; self . _default = _default ; return self ; |
public class PropertyToAttributePropertiesManager { /** * Convert a request attribute name to a portlet property name */
private String getPropertyName ( final String windowIdStr , final String fullAttributeName ) { } } | final String attributeName ; if ( this . nonNamespacedProperties . contains ( fullAttributeName ) ) { attributeName = fullAttributeName ; } else if ( fullAttributeName . startsWith ( windowIdStr ) ) { attributeName = fullAttributeName . substring ( windowIdStr . length ( ) ) ; } else { return null ; } final String mappedPropertyName = this . attributeToPropertyMappings . get ( attributeName ) ; if ( mappedPropertyName == null ) { logger . warn ( "Attribute {} found that matches the portlet window ID but it is not listed in the propertyMappings or nonNamespacedProperties and will not be returned to the portlet" , attributeName ) ; return null ; } return mappedPropertyName ; |
public class PackageManagerUtils { /** * Checks if the device has a location feature .
* @ param manager the package manager .
* @ return { @ code true } if the device has a location feature . */
@ TargetApi ( Build . VERSION_CODES . FROYO ) public static boolean hasLocationFeature ( PackageManager manager ) { } } | return manager . hasSystemFeature ( PackageManager . FEATURE_LOCATION ) ; |
public class CmsDetailPageConfigurationWriter { /** * Helper method for getting the locale from which to read the configuration data . < p >
* @ return the locale from which to read the configuration data
* @ throws CmsException if something goes wrong */
private Locale getLocale ( ) throws CmsException { } } | getDocument ( ) ; List < Locale > locales = m_document . getLocales ( ) ; if ( locales . contains ( Locale . ENGLISH ) || locales . isEmpty ( ) ) { return Locale . ENGLISH ; } return locales . get ( 0 ) ; |
public class Dataset { /** * Returns an array containing the timestamps of the elements in this
* dataset in proper sequence ( from first to last element ) . If the dataset
* fits in the specified array , it is returned therein . Otherwise , a new
* array is allocated with the size of this dataset .
* If the dataset fits in the specified array with room to spare ( i . e . , the
* array has more elements than the dataset ) , the element in the array
* immediately following the end of the dataset is set to null .
* @ param a the array into which the timestamps of the elements in this
* dataset are to be stored , if it is big enough ; otherwise , a new array
* is allocated for this purpose .
* @ return an array containing the timestamps of the elements in this dataset . */
public Timestamp [ ] toArray ( Timestamp [ ] a ) { } } | int n = data . size ( ) ; if ( a . length < n ) { a = new Timestamp [ n ] ; } for ( int i = 0 ; i < n ; i ++ ) { a [ i ] = data . get ( i ) . timestamp ; } for ( int i = n ; i < a . length ; i ++ ) { a [ i ] = null ; } return a ; |
public class MoreAnnotations { /** * Converts the given attribute to an enum value . */
public static < T extends Enum < T > > Optional < T > asEnumValue ( Class < T > clazz , Attribute a ) { } } | class Visitor extends SimpleAnnotationValueVisitor8 < T , Void > { @ Override public T visitEnumConstant ( VariableElement c , Void unused ) { return Enum . valueOf ( clazz , c . getSimpleName ( ) . toString ( ) ) ; } } return Optional . ofNullable ( a . accept ( new Visitor ( ) , null ) ) ; |
public class CmsDefaultWorkflowManager { /** * The implementation of the " publish " workflow action . < p >
* @ param userCms the user CMS context
* @ param options the publish options
* @ param resources the resources which the action should process
* @ return the workflow response
* @ throws CmsException if something goes wrong */
protected CmsWorkflowResponse actionPublish ( CmsObject userCms , CmsPublishOptions options , final List < CmsResource > resources ) throws CmsException { } } | final CmsPublish publish = new CmsPublish ( userCms , options ) ; // use FutureTask to get the broken links , because we can then use a different thread if it takes too long
final FutureTask < List < CmsPublishResource > > brokenResourcesGetter = new FutureTask < List < CmsPublishResource > > ( new Callable < List < CmsPublishResource > > ( ) { public List < CmsPublishResource > call ( ) throws Exception { return publish . getBrokenResources ( resources ) ; } } ) ; Thread brokenResourcesThread = new Thread ( brokenResourcesGetter ) ; brokenResourcesThread . start ( ) ; try { List < CmsPublishResource > brokenResources = brokenResourcesGetter . get ( 10 , TimeUnit . SECONDS ) ; if ( brokenResources . size ( ) == 0 ) { publish . publishResources ( resources ) ; CmsWorkflowResponse response = new CmsWorkflowResponse ( true , "" , new ArrayList < CmsPublishResource > ( ) , new ArrayList < CmsWorkflowAction > ( ) , null ) ; return response ; } else { String brokenResourcesLabel = getLabel ( userCms , Messages . GUI_BROKEN_LINKS_0 ) ; boolean canForcePublish = OpenCms . getWorkplaceManager ( ) . getDefaultUserSettings ( ) . isAllowBrokenRelations ( ) || OpenCms . getRoleManager ( ) . hasRole ( userCms , CmsRole . VFS_MANAGER ) ; List < CmsWorkflowAction > actions = new ArrayList < CmsWorkflowAction > ( ) ; if ( canForcePublish ) { String forceLabel = getLabel ( userCms , Messages . GUI_WORKFLOW_ACTION_FORCE_PUBLISH_0 ) ; actions . add ( new CmsWorkflowAction ( ACTION_FORCE_PUBLISH , forceLabel , true , true ) ) ; } CmsWorkflowResponse response = new CmsWorkflowResponse ( false , brokenResourcesLabel , brokenResources , actions , null ) ; return response ; } } catch ( TimeoutException e ) { // Things are taking too long , do them in a different thread and just return " OK " to the client
Thread thread = new Thread ( ) { @ SuppressWarnings ( "synthetic-access" ) @ Override public void run ( ) { LOG . info ( "Checking broken relations is taking too long, using a different thread for checking and publishing now." ) ; try { // Make sure the computation is finished by calling get ( ) without a timeout parameter
// We don ' t need the actual result of the get ( ) , though ; we just get the set of resource paths from the validator object
brokenResourcesGetter . get ( ) ; List < CmsResource > resourcesToPublish = new ArrayList < CmsResource > ( resources ) ; Iterator < CmsResource > resIter = resourcesToPublish . iterator ( ) ; while ( resIter . hasNext ( ) ) { CmsResource currentRes = resIter . next ( ) ; if ( publish . getRelationValidator ( ) . keySet ( ) . contains ( currentRes . getRootPath ( ) ) ) { resIter . remove ( ) ; LOG . info ( "Excluding resource from publish list because relations would be broken: " + currentRes . getRootPath ( ) ) ; } } publish . publishResources ( resourcesToPublish ) ; } catch ( Exception ex ) { LOG . error ( ex . getLocalizedMessage ( ) , ex ) ; } } } ; thread . start ( ) ; CmsWorkflowResponse response = new CmsWorkflowResponse ( true , "" , new ArrayList < CmsPublishResource > ( ) , new ArrayList < CmsWorkflowAction > ( ) , null ) ; return response ; } catch ( InterruptedException e ) { // shouldn ' t happen ; log exception
LOG . error ( e . getLocalizedMessage ( ) ) ; return null ; } catch ( ExecutionException e ) { // shouldn ' t happen ; log exception
LOG . error ( e . getLocalizedMessage ( ) ) ; return null ; } |
public class DocIdResponseHandler { /** * The token is required to be on the START _ ARRAY value for rows .
* @ param jp
* @ param result
* @ return The results found in the rows object */
private List < String > parseRows ( JsonParser jp , List < String > result ) throws IOException { } } | while ( jp . nextToken ( ) == JsonToken . START_OBJECT ) { while ( jp . nextToken ( ) == JsonToken . FIELD_NAME ) { String fieldName = jp . getCurrentName ( ) ; jp . nextToken ( ) ; if ( "id" . equals ( fieldName ) ) { result . add ( jp . getText ( ) ) ; } else { jp . skipChildren ( ) ; } } } return result ; |
public class AppFramework { /** * ApplicationContextAware interface to allow container to inject itself . Sets the active
* application context .
* @ param appContext The active application context . */
@ Override public void setApplicationContext ( ApplicationContext appContext ) throws BeansException { } } | if ( this . appContext != null ) { throw new ApplicationContextException ( "Attempt to reinitialize application context." ) ; } this . appContext = appContext ; |
public class ModifyVpcEndpointRequest { /** * ( Gateway endpoint ) One or more route table IDs to disassociate from the endpoint .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setRemoveRouteTableIds ( java . util . Collection ) } or { @ link # withRemoveRouteTableIds ( java . util . Collection ) }
* if you want to override the existing values .
* @ param removeRouteTableIds
* ( Gateway endpoint ) One or more route table IDs to disassociate from the endpoint .
* @ return Returns a reference to this object so that method calls can be chained together . */
public ModifyVpcEndpointRequest withRemoveRouteTableIds ( String ... removeRouteTableIds ) { } } | if ( this . removeRouteTableIds == null ) { setRemoveRouteTableIds ( new com . amazonaws . internal . SdkInternalList < String > ( removeRouteTableIds . length ) ) ; } for ( String ele : removeRouteTableIds ) { this . removeRouteTableIds . add ( ele ) ; } return this ; |
public class ExpressRouteCircuitsInner { /** * Gets information about the specified express route circuit .
* @ param resourceGroupName The name of the resource group .
* @ param circuitName The name of express route circuit .
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < ExpressRouteCircuitInner > getByResourceGroupAsync ( String resourceGroupName , String circuitName , final ServiceCallback < ExpressRouteCircuitInner > serviceCallback ) { } } | return ServiceFuture . fromResponse ( getByResourceGroupWithServiceResponseAsync ( resourceGroupName , circuitName ) , serviceCallback ) ; |
public class ValidationSession { /** * The clean method ensures that the object and its attached objects are up to date
* ie that any mapping is updated . This means the getters on the object are safe to use
* @ param object */
public void clean ( final ValidationObject object ) { } } | if ( m_enabled ) { m_validationEngine . clean ( object ) ; for ( ProxyField proxyField : m_history ) { proxyField . expire ( ) ; } } |
public class BidiOrder { /** * 7 ) resolving implicit embedding levels
* Rules I1 , I2. */
private void resolveImplicitLevels ( int start , int limit , byte level , byte sor , byte eor ) { } } | if ( ( level & 1 ) == 0 ) { // even level
for ( int i = start ; i < limit ; ++ i ) { byte t = resultTypes [ i ] ; // Rule I1.
if ( t == L ) { // no change
} else if ( t == R ) { resultLevels [ i ] += 1 ; } else { // t = = AN | | t = = EN
resultLevels [ i ] += 2 ; } } } else { // odd level
for ( int i = start ; i < limit ; ++ i ) { byte t = resultTypes [ i ] ; // Rule I2.
if ( t == R ) { // no change
} else { // t = = L | | t = = AN | | t = = EN
resultLevels [ i ] += 1 ; } } } |
public class JCublasNDArrayFactory { /** * Symmetric in place shuffle of an ndarray
* along a specified set of dimensions . Each array in list should have it ' s own dimension at the same index of dimensions array
* @ param arrays the ndarrays to shuffle
* @ param dimensions the dimensions to do the shuffle
* @ return */
@ Override public void shuffle ( List < INDArray > arrays , Random rnd , List < int [ ] > dimensions ) { } } | // no dimension - no shuffle
if ( dimensions == null || dimensions . size ( ) == 0 ) throw new RuntimeException ( "Dimension can't be null or 0-length" ) ; if ( arrays == null || arrays . size ( ) == 0 ) throw new RuntimeException ( "No input arrays provided" ) ; if ( dimensions . size ( ) > 1 && arrays . size ( ) != dimensions . size ( ) ) throw new IllegalStateException ( "Number of dimensions do not match number of arrays to shuffle" ) ; Nd4j . getExecutioner ( ) . push ( ) ; // first we build TAD for input array and dimensions
AtomicAllocator allocator = AtomicAllocator . getInstance ( ) ; CudaContext context = null ; for ( int x = 0 ; x < arrays . size ( ) ; x ++ ) { context = allocator . getFlowController ( ) . prepareAction ( arrays . get ( x ) ) ; } val zero = arrays . get ( 0 ) ; int tadLength = 1 ; if ( zero . rank ( ) > 1 ) for ( int i = 0 ; i < dimensions . get ( 0 ) . length ; i ++ ) { tadLength *= zero . shape ( ) [ dimensions . get ( 0 ) [ i ] ] ; } val numTads = zero . length ( ) / tadLength ; val map = ArrayUtil . buildInterleavedVector ( rnd , ( int ) numTads ) ; val shuffle = new CudaIntDataBuffer ( map ) ; val shuffleMap = allocator . getPointer ( shuffle , context ) ; val extras = new PointerPointer ( null , // not used
context . getOldStream ( ) , allocator . getDeviceIdPointer ( ) ) ; long [ ] hPointers = new long [ arrays . size ( ) ] ; long [ ] xPointers = new long [ arrays . size ( ) ] ; long [ ] xShapes = new long [ arrays . size ( ) ] ; long [ ] tadShapes = new long [ arrays . size ( ) ] ; long [ ] tadOffsets = new long [ arrays . size ( ) ] ; for ( int i = 0 ; i < arrays . size ( ) ; i ++ ) { val array = arrays . get ( i ) ; val x = AtomicAllocator . getInstance ( ) . getPointer ( array , context ) ; val xShapeInfo = AtomicAllocator . getInstance ( ) . getPointer ( array . shapeInfoDataBuffer ( ) , context ) ; val tadManager = Nd4j . getExecutioner ( ) . getTADManager ( ) ; int [ ] dimension = dimensions . size ( ) > 1 ? dimensions . get ( i ) : dimensions . get ( 0 ) ; val tadBuffers = tadManager . getTADOnlyShapeInfo ( array , dimension ) ; // log . info ( " Original shape : { } ; dimension : { } ; TAD shape : { } " , array . shapeInfoDataBuffer ( ) . asInt ( ) , dimension , tadBuffers . getFirst ( ) . asInt ( ) ) ;
val tadShapeInfo = AtomicAllocator . getInstance ( ) . getPointer ( tadBuffers . getFirst ( ) , context ) ; val offsets = tadBuffers . getSecond ( ) ; if ( zero . rank ( ) != 1 && offsets . length ( ) != numTads ) throw new ND4JIllegalStateException ( "Can't symmetrically shuffle arrays with non-equal number of TADs" ) ; val tadOffset = AtomicAllocator . getInstance ( ) . getPointer ( offsets , context ) ; hPointers [ i ] = AtomicAllocator . getInstance ( ) . getHostPointer ( array . shapeInfoDataBuffer ( ) ) . address ( ) ; xPointers [ i ] = x . address ( ) ; xShapes [ i ] = xShapeInfo . address ( ) ; tadShapes [ i ] = tadShapeInfo . address ( ) ; tadOffsets [ i ] = tadOffset . address ( ) ; } val hostPointers = new LongPointer ( hPointers ) ; val hosthost = new PointerPointerWrapper ( hostPointers ) ; val tempX = new CudaDoubleDataBuffer ( arrays . size ( ) ) ; val tempShapes = new CudaDoubleDataBuffer ( arrays . size ( ) ) ; val tempTAD = new CudaDoubleDataBuffer ( arrays . size ( ) ) ; val tempOffsets = new CudaDoubleDataBuffer ( arrays . size ( ) ) ; AtomicAllocator . getInstance ( ) . memcpyBlocking ( tempX , new LongPointer ( xPointers ) , xPointers . length * 8 , 0 ) ; AtomicAllocator . getInstance ( ) . memcpyBlocking ( tempShapes , new LongPointer ( xShapes ) , xPointers . length * 8 , 0 ) ; AtomicAllocator . getInstance ( ) . memcpyBlocking ( tempTAD , new LongPointer ( tadShapes ) , xPointers . length * 8 , 0 ) ; AtomicAllocator . getInstance ( ) . memcpyBlocking ( tempOffsets , new LongPointer ( tadOffsets ) , xPointers . length * 8 , 0 ) ; nativeOps . shuffle ( extras , null , hosthost , new PointerPointer ( allocator . getPointer ( tempX , context ) ) , new PointerPointer ( allocator . getPointer ( tempShapes , context ) ) , null , null , new PointerPointer ( allocator . getPointer ( tempX , context ) ) , new PointerPointer ( allocator . getPointer ( tempShapes , context ) ) , arrays . size ( ) , ( IntPointer ) shuffleMap , new PointerPointer ( allocator . getPointer ( tempTAD , context ) ) , new PointerPointer ( allocator . getPointer ( tempOffsets , context ) ) ) ; for ( int f = 0 ; f < arrays . size ( ) ; f ++ ) { allocator . getFlowController ( ) . registerAction ( context , arrays . get ( f ) ) ; } // just to keep reference
shuffle . address ( ) ; hostPointers . address ( ) ; tempX . dataType ( ) ; tempShapes . dataType ( ) ; tempOffsets . dataType ( ) ; tempTAD . dataType ( ) ; |
public class SerdesManagerImpl { /** * Register a serializer of the given type .
* @ param serializer The serializer of T .
* @ param < T > The type of the object to be serialized .
* @ return The { @ link HandlerRegistration } object , capable of cancelling this HandlerRegistration
* to the { @ link SerdesManagerImpl } . */
@ SuppressWarnings ( "unchecked" ) public < T > HandlerRegistration register ( Serializer < T > serializer ) { } } | final HandlerRegistration reg = bindSerializerToType ( serializer , serializer . handledType ( ) ) ; if ( serializer instanceof HasImpl ) { Class [ ] impls = ( ( HasImpl ) serializer ) . implTypes ( ) ; final HandlerRegistration [ ] regs = new HandlerRegistration [ impls . length + 1 ] ; regs [ 0 ] = reg ; for ( int i = 0 ; i < impls . length ; i ++ ) { Class impl = impls [ i ] ; regs [ i + 1 ] = bindSerializerToType ( serializer , impl ) ; } return new HandlerRegistration ( ) { public void removeHandler ( ) { for ( HandlerRegistration reg : regs ) { reg . removeHandler ( ) ; } } } ; } return reg ; |
public class IndexedSet { /** * Gets the object from the set of objects with the specified field value .
* @ param indexDefinition the field index definition
* @ param value the field value
* @ param < V > the field type
* @ return the object or null if there is no such object */
public < V > T getFirstByField ( IndexDefinition < T , V > indexDefinition , V value ) { } } | FieldIndex < T , V > index = ( FieldIndex < T , V > ) mIndices . get ( indexDefinition ) ; if ( index == null ) { throw new IllegalStateException ( "the given index isn't defined for this IndexedSet" ) ; } return index . getFirst ( value ) ; |
public class AbstractBooleanList { /** * Appends the part of the specified list between < code > from < / code > ( inclusive ) and < code > to < / code > ( inclusive ) to the receiver .
* @ param other the list to be added to the receiver .
* @ param from the index of the first element to be appended ( inclusive ) .
* @ param to the index of the last element to be appended ( inclusive ) .
* @ exception IndexOutOfBoundsException index is out of range ( < tt > other . size ( ) & gt ; 0 & & ( from & lt ; 0 | | from & gt ; to | | to & gt ; = other . size ( ) ) < / tt > ) . */
public void addAllOfFromTo ( AbstractBooleanList other , int from , int to ) { } } | beforeInsertAllOfFromTo ( size , other , from , to ) ; |
public class ConnectedIconsProvider { /** * Determines the connections available at this position for the specified < b > side < / b > .
* @ param world the world
* @ param pos the pos
* @ param facing the facing
* @ return the connections */
private int getConnections ( IBlockAccess world , BlockPos pos , EnumFacing facing ) { } } | Block block = world . getBlockState ( pos ) . getBlock ( ) ; int connection = 0 ; for ( int i = 0 ; i < 4 ; i ++ ) { if ( world . getBlockState ( pos . offset ( sides [ facing . getIndex ( ) ] [ i ] ) ) . getBlock ( ) == block ) connection |= ( 1 << i ) ; } return ~ connection & 15 ; |
public class HierarchicalContextRunner { /** * Initializes all dependencies for the { @ link HierarchicalContextRunner } .
* Note : Clients may override this method to provide other dependencies . */
protected void initialize ( ) { } } | final StatementExecutorFactory statementExecutorFactory = StatementExecutorFactory . getDefault ( ) ; final StatementBuilderFactory statementBuilderFactory = StatementBuilderFactory . getDefault ( ) ; methodResolver = new MethodResolver ( ) ; methodDescriber = new MethodDescriber ( ) ; methodRunner = new MethodExecutor ( methodDescriber , statementExecutorFactory . getExecutorForMethods ( ) , statementBuilderFactory . getBuildersForMethods ( ) ) ; contextResolver = new ContextResolver ( ) ; contextDescriber = new ContextDescriber ( contextResolver , methodResolver , methodDescriber ) ; contextRunner = new ContextExecutor ( contextDescriber ) ; statementExecutor = statementExecutorFactory . getExecutorForClasses ( ) ; statementBuilders = statementBuilderFactory . getBuildersForClasses ( ) ; |
public class Promises { /** * Returns a { @ link CompleteExceptionallyPromise } with { @ link StacklessException } ,
* since this method doesn ' t accept any { @ code Promise } s
* @ see # any ( Iterator ) */
@ Contract ( pure = true ) @ NotNull public static < T > Promise < T > any ( ) { } } | return Promise . ofException ( new StacklessException ( Promises . class , "All promises completed exceptionally" ) ) ; |
public class RSA { /** * 从KeyStore获取私钥
* @ param location
* @ param alias
* @ param keyStore
* @ param storePass
* @ param keyPass
* @ return */
public static PrivateKey loadPrivateKeyFromKeyStore ( KeyStore keyStore , String alias , String keyPass ) { } } | try { return ( PrivateKey ) keyStore . getKey ( alias , keyPass . toCharArray ( ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } |
public class InternalSARLParser { /** * InternalSARL . g : 7516:1 : ruleAssertExpression returns [ EObject current = null ] : ( ( ) otherlv _ 1 = ' assert ' ( ( ' abstract ' | ' annotation ' | ' class ' | ' create ' | ' def ' | ' dispatch ' | ' enum ' | ' extends ' | ' final ' | ' implements ' | ' import ' | ' interface ' | ' override ' | ' package ' | ' public ' | ' private ' | ' protected ' | ' static ' | ' throws ' | ' strictfp ' | ' native ' | ' volatile ' | ' synchronized ' | ' transient ' | ' AFTER ' | ' BEFORE ' | ' SEPARATOR ' | ' extension ' | ' ! ' | ' - ' | ' + ' | ' break ' | ' continue ' | ' assert ' | ' assume ' | ' new ' | ' { ' | ' switch ' | ' < ' | ' super ' | ' # ' | ' [ ' | ' false ' | ' true ' | ' null ' | ' typeof ' | ' if ' | ' for ' | ' while ' | ' do ' | ' throw ' | ' return ' | ' try ' | ' ( ' | RULE _ ID | RULE _ HEX | RULE _ INT | RULE _ DECIMAL | RULE _ STRING | RULE _ RICH _ TEXT | RULE _ RICH _ TEXT _ START ) = > ( lv _ condition _ 2_0 = ruleXExpression ) ) ( ( ( ' , ' ) = > otherlv _ 3 = ' , ' ) ( ( lv _ message _ 4_0 = RULE _ STRING ) ) ) ? ) ; */
public final EObject ruleAssertExpression ( ) throws RecognitionException { } } | EObject current = null ; Token otherlv_1 = null ; Token otherlv_3 = null ; Token lv_message_4_0 = null ; EObject lv_condition_2_0 = null ; enterRule ( ) ; try { // InternalSARL . g : 7522:2 : ( ( ( ) otherlv _ 1 = ' assert ' ( ( ' abstract ' | ' annotation ' | ' class ' | ' create ' | ' def ' | ' dispatch ' | ' enum ' | ' extends ' | ' final ' | ' implements ' | ' import ' | ' interface ' | ' override ' | ' package ' | ' public ' | ' private ' | ' protected ' | ' static ' | ' throws ' | ' strictfp ' | ' native ' | ' volatile ' | ' synchronized ' | ' transient ' | ' AFTER ' | ' BEFORE ' | ' SEPARATOR ' | ' extension ' | ' ! ' | ' - ' | ' + ' | ' break ' | ' continue ' | ' assert ' | ' assume ' | ' new ' | ' { ' | ' switch ' | ' < ' | ' super ' | ' # ' | ' [ ' | ' false ' | ' true ' | ' null ' | ' typeof ' | ' if ' | ' for ' | ' while ' | ' do ' | ' throw ' | ' return ' | ' try ' | ' ( ' | RULE _ ID | RULE _ HEX | RULE _ INT | RULE _ DECIMAL | RULE _ STRING | RULE _ RICH _ TEXT | RULE _ RICH _ TEXT _ START ) = > ( lv _ condition _ 2_0 = ruleXExpression ) ) ( ( ( ' , ' ) = > otherlv _ 3 = ' , ' ) ( ( lv _ message _ 4_0 = RULE _ STRING ) ) ) ? ) )
// InternalSARL . g : 7523:2 : ( ( ) otherlv _ 1 = ' assert ' ( ( ' abstract ' | ' annotation ' | ' class ' | ' create ' | ' def ' | ' dispatch ' | ' enum ' | ' extends ' | ' final ' | ' implements ' | ' import ' | ' interface ' | ' override ' | ' package ' | ' public ' | ' private ' | ' protected ' | ' static ' | ' throws ' | ' strictfp ' | ' native ' | ' volatile ' | ' synchronized ' | ' transient ' | ' AFTER ' | ' BEFORE ' | ' SEPARATOR ' | ' extension ' | ' ! ' | ' - ' | ' + ' | ' break ' | ' continue ' | ' assert ' | ' assume ' | ' new ' | ' { ' | ' switch ' | ' < ' | ' super ' | ' # ' | ' [ ' | ' false ' | ' true ' | ' null ' | ' typeof ' | ' if ' | ' for ' | ' while ' | ' do ' | ' throw ' | ' return ' | ' try ' | ' ( ' | RULE _ ID | RULE _ HEX | RULE _ INT | RULE _ DECIMAL | RULE _ STRING | RULE _ RICH _ TEXT | RULE _ RICH _ TEXT _ START ) = > ( lv _ condition _ 2_0 = ruleXExpression ) ) ( ( ( ' , ' ) = > otherlv _ 3 = ' , ' ) ( ( lv _ message _ 4_0 = RULE _ STRING ) ) ) ? )
{ // InternalSARL . g : 7523:2 : ( ( ) otherlv _ 1 = ' assert ' ( ( ' abstract ' | ' annotation ' | ' class ' | ' create ' | ' def ' | ' dispatch ' | ' enum ' | ' extends ' | ' final ' | ' implements ' | ' import ' | ' interface ' | ' override ' | ' package ' | ' public ' | ' private ' | ' protected ' | ' static ' | ' throws ' | ' strictfp ' | ' native ' | ' volatile ' | ' synchronized ' | ' transient ' | ' AFTER ' | ' BEFORE ' | ' SEPARATOR ' | ' extension ' | ' ! ' | ' - ' | ' + ' | ' break ' | ' continue ' | ' assert ' | ' assume ' | ' new ' | ' { ' | ' switch ' | ' < ' | ' super ' | ' # ' | ' [ ' | ' false ' | ' true ' | ' null ' | ' typeof ' | ' if ' | ' for ' | ' while ' | ' do ' | ' throw ' | ' return ' | ' try ' | ' ( ' | RULE _ ID | RULE _ HEX | RULE _ INT | RULE _ DECIMAL | RULE _ STRING | RULE _ RICH _ TEXT | RULE _ RICH _ TEXT _ START ) = > ( lv _ condition _ 2_0 = ruleXExpression ) ) ( ( ( ' , ' ) = > otherlv _ 3 = ' , ' ) ( ( lv _ message _ 4_0 = RULE _ STRING ) ) ) ? )
// InternalSARL . g : 7524:3 : ( ) otherlv _ 1 = ' assert ' ( ( ' abstract ' | ' annotation ' | ' class ' | ' create ' | ' def ' | ' dispatch ' | ' enum ' | ' extends ' | ' final ' | ' implements ' | ' import ' | ' interface ' | ' override ' | ' package ' | ' public ' | ' private ' | ' protected ' | ' static ' | ' throws ' | ' strictfp ' | ' native ' | ' volatile ' | ' synchronized ' | ' transient ' | ' AFTER ' | ' BEFORE ' | ' SEPARATOR ' | ' extension ' | ' ! ' | ' - ' | ' + ' | ' break ' | ' continue ' | ' assert ' | ' assume ' | ' new ' | ' { ' | ' switch ' | ' < ' | ' super ' | ' # ' | ' [ ' | ' false ' | ' true ' | ' null ' | ' typeof ' | ' if ' | ' for ' | ' while ' | ' do ' | ' throw ' | ' return ' | ' try ' | ' ( ' | RULE _ ID | RULE _ HEX | RULE _ INT | RULE _ DECIMAL | RULE _ STRING | RULE _ RICH _ TEXT | RULE _ RICH _ TEXT _ START ) = > ( lv _ condition _ 2_0 = ruleXExpression ) ) ( ( ( ' , ' ) = > otherlv _ 3 = ' , ' ) ( ( lv _ message _ 4_0 = RULE _ STRING ) ) ) ?
{ // InternalSARL . g : 7524:3 : ( )
// InternalSARL . g : 7525:4:
{ if ( state . backtracking == 0 ) { current = forceCreateModelElement ( grammarAccess . getAssertExpressionAccess ( ) . getSarlAssertExpressionAction_0 ( ) , current ) ; } } otherlv_1 = ( Token ) match ( input , 62 , FOLLOW_45 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getAssertExpressionAccess ( ) . getAssertKeyword_1 ( ) ) ; } // InternalSARL . g : 7535:3 : ( ( ' abstract ' | ' annotation ' | ' class ' | ' create ' | ' def ' | ' dispatch ' | ' enum ' | ' extends ' | ' final ' | ' implements ' | ' import ' | ' interface ' | ' override ' | ' package ' | ' public ' | ' private ' | ' protected ' | ' static ' | ' throws ' | ' strictfp ' | ' native ' | ' volatile ' | ' synchronized ' | ' transient ' | ' AFTER ' | ' BEFORE ' | ' SEPARATOR ' | ' extension ' | ' ! ' | ' - ' | ' + ' | ' break ' | ' continue ' | ' assert ' | ' assume ' | ' new ' | ' { ' | ' switch ' | ' < ' | ' super ' | ' # ' | ' [ ' | ' false ' | ' true ' | ' null ' | ' typeof ' | ' if ' | ' for ' | ' while ' | ' do ' | ' throw ' | ' return ' | ' try ' | ' ( ' | RULE _ ID | RULE _ HEX | RULE _ INT | RULE _ DECIMAL | RULE _ STRING | RULE _ RICH _ TEXT | RULE _ RICH _ TEXT _ START ) = > ( lv _ condition _ 2_0 = ruleXExpression ) )
// InternalSARL . g : 7536:4 : ( ' abstract ' | ' annotation ' | ' class ' | ' create ' | ' def ' | ' dispatch ' | ' enum ' | ' extends ' | ' final ' | ' implements ' | ' import ' | ' interface ' | ' override ' | ' package ' | ' public ' | ' private ' | ' protected ' | ' static ' | ' throws ' | ' strictfp ' | ' native ' | ' volatile ' | ' synchronized ' | ' transient ' | ' AFTER ' | ' BEFORE ' | ' SEPARATOR ' | ' extension ' | ' ! ' | ' - ' | ' + ' | ' break ' | ' continue ' | ' assert ' | ' assume ' | ' new ' | ' { ' | ' switch ' | ' < ' | ' super ' | ' # ' | ' [ ' | ' false ' | ' true ' | ' null ' | ' typeof ' | ' if ' | ' for ' | ' while ' | ' do ' | ' throw ' | ' return ' | ' try ' | ' ( ' | RULE _ ID | RULE _ HEX | RULE _ INT | RULE _ DECIMAL | RULE _ STRING | RULE _ RICH _ TEXT | RULE _ RICH _ TEXT _ START ) = > ( lv _ condition _ 2_0 = ruleXExpression )
{ // InternalSARL . g : 7537:4 : ( lv _ condition _ 2_0 = ruleXExpression )
// InternalSARL . g : 7538:5 : lv _ condition _ 2_0 = ruleXExpression
{ if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getAssertExpressionAccess ( ) . getConditionXExpressionParserRuleCall_2_0 ( ) ) ; } pushFollow ( FOLLOW_72 ) ; lv_condition_2_0 = ruleXExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getAssertExpressionRule ( ) ) ; } set ( current , "condition" , lv_condition_2_0 , "org.eclipse.xtext.xbase.Xbase.XExpression" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalSARL . g : 7555:3 : ( ( ( ' , ' ) = > otherlv _ 3 = ' , ' ) ( ( lv _ message _ 4_0 = RULE _ STRING ) ) ) ?
int alt215 = 2 ; int LA215_0 = input . LA ( 1 ) ; if ( ( LA215_0 == 32 ) ) { int LA215_1 = input . LA ( 2 ) ; if ( ( synpred2_InternalSARL ( ) ) ) { alt215 = 1 ; } } switch ( alt215 ) { case 1 : // InternalSARL . g : 7556:4 : ( ( ' , ' ) = > otherlv _ 3 = ' , ' ) ( ( lv _ message _ 4_0 = RULE _ STRING ) )
{ // InternalSARL . g : 7556:4 : ( ( ' , ' ) = > otherlv _ 3 = ' , ' )
// InternalSARL . g : 7557:5 : ( ' , ' ) = > otherlv _ 3 = ' , '
{ otherlv_3 = ( Token ) match ( input , 32 , FOLLOW_73 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_3 , grammarAccess . getAssertExpressionAccess ( ) . getCommaKeyword_3_0 ( ) ) ; } } // InternalSARL . g : 7563:4 : ( ( lv _ message _ 4_0 = RULE _ STRING ) )
// InternalSARL . g : 7564:5 : ( lv _ message _ 4_0 = RULE _ STRING )
{ // InternalSARL . g : 7564:5 : ( lv _ message _ 4_0 = RULE _ STRING )
// InternalSARL . g : 7565:6 : lv _ message _ 4_0 = RULE _ STRING
{ lv_message_4_0 = ( Token ) match ( input , RULE_STRING , FOLLOW_2 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( lv_message_4_0 , grammarAccess . getAssertExpressionAccess ( ) . getMessageSTRINGTerminalRuleCall_3_1_0 ( ) ) ; } if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getAssertExpressionRule ( ) ) ; } setWithLastConsumed ( current , "message" , lv_message_4_0 , "org.eclipse.xtext.xbase.Xtype.STRING" ) ; } } } } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ; |
public class ByteArray { /** * 将本对象的内容引用复制给array
* @ param array ByteArray */
public void directTo ( ByteArray array ) { } } | if ( array != null ) { array . content = this . content ; array . count = this . count ; } |
public class WeldCollections { /** * Utility method for working with maps . Unlike { @ link Map # putIfAbsent ( Object , Object ) } this method always returns the value that ends up store in the map
* which is either the old value ( if any was present ) or the new value ( if it was stored in the map ) .
* @ param map the map
* @ param key the key
* @ param value the value
* @ return the value that ends up store in the map which is either the old value ( if any was present ) or the new value ( if it was stored in the map ) */
public static < K , V > V putIfAbsent ( Map < K , V > map , K key , V value ) { } } | V old = map . putIfAbsent ( key , value ) ; if ( old != null ) { return old ; } return value ; |
public class Strings { /** * Converts a word sequence into a single camel - case sequence .
* @ param text - a word sequence with the given separator
* @ param separator - a word separator
* @ param strict - if true , all letters following the first are forced into lower case in each word
* @ return a single camel - case word */
public static String toCamelCase ( String text , char separator , boolean strict ) { } } | char [ ] chars = text . toCharArray ( ) ; int base = 0 , top = 0 ; while ( top < chars . length ) { while ( top < chars . length && chars [ top ] == separator ) { ++ top ; } if ( top < chars . length ) { chars [ base ++ ] = Character . toUpperCase ( chars [ top ++ ] ) ; } if ( strict ) { while ( top < chars . length && chars [ top ] != separator ) { chars [ base ++ ] = Character . toLowerCase ( chars [ top ++ ] ) ; } } else { while ( top < chars . length && chars [ top ] != separator ) { chars [ base ++ ] = chars [ top ++ ] ; } } } return new String ( chars , 0 , base ) ; |
public class IndianCalendar { /** * The following function is not needed for basic calendar functioning .
* This routine converts a julian day ( jd ) to the corresponding date in Gregorian calendar "
* @ param jd The Julian date in Julian Calendar which is to be converted to Indian date " */
private static int [ ] jdToGregorian ( double jd ) { } } | double JULIAN_EPOCH = 1721425.5 ; double wjd , depoch , quadricent , dqc , cent , dcent , quad , dquad , yindex , yearday , leapadj ; int year , month , day ; wjd = Math . floor ( jd - 0.5 ) + 0.5 ; depoch = wjd - JULIAN_EPOCH ; quadricent = Math . floor ( depoch / 146097 ) ; dqc = depoch % 146097 ; cent = Math . floor ( dqc / 36524 ) ; dcent = dqc % 36524 ; quad = Math . floor ( dcent / 1461 ) ; dquad = dcent % 1461 ; yindex = Math . floor ( dquad / 365 ) ; year = ( int ) ( ( quadricent * 400 ) + ( cent * 100 ) + ( quad * 4 ) + yindex ) ; if ( ! ( ( cent == 4 ) || ( yindex == 4 ) ) ) { year ++ ; } yearday = wjd - gregorianToJD ( year , 1 , 1 ) ; leapadj = ( ( wjd < gregorianToJD ( year , 3 , 1 ) ) ? 0 : ( isGregorianLeap ( year ) ? 1 : 2 ) ) ; month = ( int ) Math . floor ( ( ( ( yearday + leapadj ) * 12 ) + 373 ) / 367 ) ; day = ( int ) ( wjd - gregorianToJD ( year , month , 1 ) ) + 1 ; int [ ] julianDate = new int [ 3 ] ; julianDate [ 0 ] = year ; julianDate [ 1 ] = month ; julianDate [ 2 ] = day ; return julianDate ; |
public class Syncer { /** * Gets the { @ link ISyncHandler } for the specified object .
* @ param < T > the generic type
* @ param caller the caller
* @ return the handler */
public < T > ISyncHandler < ? super T , ? extends ISyncableData > getHandler ( T caller ) { } } | @ SuppressWarnings ( "unchecked" ) ISyncHandler < ? super T , ? extends ISyncableData > handler = ( ISyncHandler < ? super T , ? extends ISyncableData > ) handlers . get ( caller . getClass ( ) ) ; if ( handler == null ) { MalisisCore . log . error ( "No ISyncHandler registered for type '{}'" , caller . getClass ( ) ) ; return null ; } return handler ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.