signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SelectorThread { /** * ( non - Javadoc ) * @ see java . lang . Runnable # run ( ) */ @ Override public void run ( ) { } }
if ( logger . isInfoEnabled ( ) ) { logger . info ( String . format ( "SelectorThread for Management=%s started." , this . management . getName ( ) ) ) ; } while ( this . started ) { try { FastList < ChangeRequest > pendingChanges = this . management . getPendingChanges ( ) ; // Process any pending changes synchronized ( pendingChanges ) { Iterator < ChangeRequest > changes = pendingChanges . iterator ( ) ; while ( changes . hasNext ( ) ) { ChangeRequest change = changes . next ( ) ; switch ( change . getType ( ) ) { case ChangeRequest . CHANGEOPS : pendingChanges . remove ( change ) ; SelectionKey key = change . getSocketChannel ( ) . keyFor ( this . selector ) ; key . interestOps ( change . getOps ( ) ) ; break ; case ChangeRequest . REGISTER : pendingChanges . remove ( change ) ; SelectionKey key1 = change . getSocketChannel ( ) . register ( this . selector , change . getOps ( ) ) ; key1 . attach ( change . getAssociation ( ) ) ; break ; case ChangeRequest . CONNECT : if ( ! change . getAssociation ( ) . isStarted ( ) ) { // if Association is stopped - remove pending connection requests pendingChanges . remove ( change ) ; } else { if ( change . getExecutionTime ( ) <= System . currentTimeMillis ( ) ) { pendingChanges . remove ( change ) ; change . getAssociation ( ) . initiateConnection ( ) ; } } break ; case ChangeRequest . CLOSE : pendingChanges . remove ( change ) ; change . getAssociation ( ) . close ( ) ; } } // end of while } // Wait for an event one of the registered channels this . selector . select ( 500 ) ; // System . out . println ( " Done selecting " + // this . selector . selectedKeys ( ) . size ( ) ) ; // Iterate over the set of keys for which events are available Iterator < SelectionKey > selectedKeys = this . selector . selectedKeys ( ) . iterator ( ) ; while ( selectedKeys . hasNext ( ) ) { SelectionKey key = selectedKeys . next ( ) ; selectedKeys . remove ( ) ; if ( ! key . isValid ( ) ) { continue ; } // Check what event is available and deal with it if ( key . isConnectable ( ) ) { this . finishConnection ( key ) ; } else if ( key . isAcceptable ( ) ) { this . accept ( key ) ; } else if ( key . isReadable ( ) ) { this . read ( key ) ; } else if ( key . isWritable ( ) ) { this . write ( key ) ; } } } catch ( Exception e ) { logger . error ( "Error while selecting the ready keys" , e ) ; } } try { this . selector . close ( ) ; } catch ( IOException e ) { logger . error ( String . format ( "Error while closing Selector for SCTP Management=%s" , this . management . getName ( ) ) ) ; } if ( logger . isInfoEnabled ( ) ) { logger . info ( String . format ( "SelectorThread for Management=%s stopped." , this . management . getName ( ) ) ) ; }
public class ReceivedMessageRequestInfo { /** * Return the completion time for this request . * - 1 means infinite */ public long getCompletionTime ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getCompletionTime" ) ; long completionTime = - 1 ; // only calculate if the timeout is not infinite long timeOut = getTimeout ( ) ; if ( timeOut != SIMPConstants . INFINITE_TIMEOUT ) { completionTime = getIssueTime ( ) + timeOut ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getCompletionTime" , new Long ( completionTime ) ) ; return completionTime ;
public class ElementImpl { /** * { @ inheritDoc } */ @ Override public boolean isDefaultNamespace ( String namespaceURI ) { } }
String namespace = this . getNamespaceURI ( ) ; String prefix = this . getPrefix ( ) ; if ( prefix == null || prefix . length ( ) == 0 ) { if ( namespaceURI == null ) { return ( namespace == namespaceURI ) ; } return namespaceURI . equals ( namespace ) ; } if ( this . hasAttributes ( ) ) { Attr attr = this . getAttributeNodeNS ( "http://www.w3.org/2000/xmlns/" , "xmlns" ) ; if ( attr != null ) { String value = attr . getNodeValue ( ) ; if ( namespaceURI == null ) { return ( namespace == value ) ; } return namespaceURI . equals ( value ) ; } } Node ancestor = getParentNode ( ) ; if ( ancestor != null ) { short type = ancestor . getNodeType ( ) ; if ( type == NodeKind . ELEM ) { return ancestor . isDefaultNamespace ( namespaceURI ) ; } // otherwise , current node is root already } return false ;
public class EventCheckActivity { /** * Method that executes the logic based on the work */ public void execute ( ) throws ActivityException { } }
EventWaitInstance received = registerWaitEvents ( false , true ) ; if ( received != null ) { setReturnCodeAndExitStatus ( received . getCompletionCode ( ) ) ; processMessage ( getExternalEventInstanceDetails ( received . getMessageDocumentId ( ) ) ) ; boolean toFinish = handleCompletionCode ( ) ; if ( toFinish && exitStatus == null ) exitStatus = WorkStatus . STATUS_COMPLETED ; } else { exitStatus = WorkStatus . STATUS_COMPLETED ; setReturnCode ( null ) ; }
public class PathManagerService { /** * Removes any { @ code Service < String > } for the given path . * @ param operationContext the operation context associated with the management operation making this request . Cannot be { @ code null } * @ param pathName the name of the relevant path . Cannot be { @ code null } */ final void removePathService ( final OperationContext operationContext , final String pathName ) { } }
final ServiceController < ? > serviceController = operationContext . getServiceRegistry ( true ) . getService ( AbstractPathService . pathNameOf ( pathName ) ) ; if ( serviceController != null ) { operationContext . removeService ( serviceController ) ; }
public class SerializerFactory { /** * Returns the serializer for a class . * @ param cl the class of the object that needs to be serialized . * @ return a serializer object for the serialization . */ public Serializer getSerializer ( Class cl ) throws HessianProtocolException { } }
Serializer serializer ; serializer = ( Serializer ) _cachedSerializerMap . get ( cl ) ; if ( serializer != null ) { return serializer ; } serializer = loadSerializer ( cl ) ; _cachedSerializerMap . put ( cl , serializer ) ; return serializer ;
public class Levenshtein { /** * Searches the given collection of strings and returns a collection of at * most < code > n < / code > strings that have the lowest Levenshtein distance * to a given string < code > t < / code > . The returned collection will be * sorted according to the distance with the string with the lowest * distance at the first position . * @ param < T > the type of the strings in the given collection * @ param ss the collection to search * @ param t the string to compare to * @ param n the maximum number of strings to return * @ param threshold a threshold for individual item distances . Only items * with a distance below this threshold will be included in the result . * @ return the strings with the lowest Levenshtein distance */ public static < T extends CharSequence > Collection < T > findMinimum ( Collection < T > ss , CharSequence t , int n , int threshold ) { } }
LinkedList < Item < T > > result = new LinkedList < > ( ) ; for ( T s : ss ) { int d = StringUtils . getLevenshteinDistance ( s , t ) ; if ( d < threshold ) { result . offer ( new Item < > ( s , d ) ) ; if ( result . size ( ) > n + 10 ) { // resort , but not too often Collections . sort ( result ) ; while ( result . size ( ) > n ) result . removeLast ( ) ; } } } Collections . sort ( result ) ; while ( result . size ( ) > n ) result . removeLast ( ) ; List < T > arr = new ArrayList < > ( n ) ; for ( Item < T > i : result ) { arr . add ( i . str ) ; } return arr ;
public class TransitionFactory { /** * Add a builder for a VM . * Every builder that supports the same transition will be replaced . * @ param b the builder to add */ public void add ( VMTransitionBuilder b ) { } }
Map < VMState , VMTransitionBuilder > m = vmAMB2 . get ( b . getDestinationState ( ) ) ; for ( VMState src : b . getSourceStates ( ) ) { m . put ( src , b ) ; }
public class NanoHTTPD { /** * Start the server . * @ throws IOException * if the socket is in use . */ public void start ( ) throws IOException { } }
myServerSocket = new ServerSocket ( ) ; myServerSocket . bind ( ( hostname != null ) ? new InetSocketAddress ( hostname , myPort ) : new InetSocketAddress ( myPort ) ) ; myThread = new Thread ( new Runnable ( ) { @ Override public void run ( ) { do { try { final Socket finalAccept = myServerSocket . accept ( ) ; registerConnection ( finalAccept ) ; finalAccept . setSoTimeout ( SOCKET_READ_TIMEOUT ) ; final InputStream inputStream = finalAccept . getInputStream ( ) ; asyncRunner . exec ( new Runnable ( ) { @ Override public void run ( ) { OutputStream outputStream = null ; try { outputStream = finalAccept . getOutputStream ( ) ; TempFileManager tempFileManager = tempFileManagerFactory . create ( ) ; HTTPSession session = new HTTPSession ( tempFileManager , inputStream , outputStream , finalAccept . getInetAddress ( ) ) ; while ( ! finalAccept . isClosed ( ) ) { session . execute ( ) ; } } catch ( Exception e ) { // When the socket is closed by the client , // we throw our own SocketException // to break the " keep alive " loop above . if ( ! ( e instanceof SocketException && "NanoHttpd Shutdown" . equals ( e . getMessage ( ) ) ) ) { e . printStackTrace ( ) ; } } finally { safeClose ( outputStream ) ; safeClose ( inputStream ) ; safeClose ( finalAccept ) ; unRegisterConnection ( finalAccept ) ; } } } ) ; } catch ( IOException e ) { } } while ( ! myServerSocket . isClosed ( ) ) ; } } ) ; myThread . setDaemon ( true ) ; myThread . setName ( "NanoHttpd Main Listener" ) ; myThread . start ( ) ;
public class KeyBundle { /** * The key identifier . * @ return identifier for the key */ public KeyIdentifier keyIdentifier ( ) { } }
if ( key ( ) == null || key ( ) . kid ( ) == null || key ( ) . kid ( ) . length ( ) == 0 ) { return null ; } return new KeyIdentifier ( key ( ) . kid ( ) ) ;
public class CharArrayList { /** * Unwraps an iterator into an array starting at a given offset for a given number of elements . * < br > This method iterates over the given type - specific iterator and stores the elements returned , up to a maximum of < code > length < / code > , in the given array starting at < code > offset < / code > . The * number of actually unwrapped elements is returned ( it may be less than < code > max < / code > if the iterator emits less than < code > max < / code > elements ) . * @ param i a type - specific iterator . * @ param array an array to contain the output of the iterator . * @ return the number of elements unwrapped . */ private static < K > int objectUnwrap ( final Iterator < ? extends K > i , final K array [ ] ) { } }
int j = array . length , offset = 0 ; while ( j -- != 0 && i . hasNext ( ) ) array [ offset ++ ] = i . next ( ) ; return array . length - j - 1 ;
public class PartialResponseWriter { /** * { @ inheritDoc } */ @ Override public void endDocument ( ) throws IOException { } }
if ( hasChanges ) { // Close the < insert > element , if any . // error close the last op if any endInsert ( ) ; _wrapped . endElement ( "changes" ) ; hasChanges = false ; } _wrapped . endElement ( "partial-response" ) ;
public class HelloWorldClient { /** * Say hello to server . */ public void greet ( String name ) { } }
logger . info ( "Will try to greet " + name + " ..." ) ; HelloRequest request = HelloRequest . newBuilder ( ) . setName ( name ) . build ( ) ; HelloReply response ; try { response = blockingStub . sayHello ( request ) ; } catch ( StatusRuntimeException e ) { logger . log ( Level . WARNING , "RPC failed: {0}" , e . getStatus ( ) ) ; return ; } logger . info ( "Greeting: " + response . getMessage ( ) ) ;
public class MultiLevelIterator { /** * Advance the iterator stack to the next item or return false if there are * none left . * If this function returns true , then there are { @ code depth } iterators in * the iterator stack and { @ code hasNext ( ) } is true for the last iterator . * If this function returns false , then there are no more items in the multi - * level iterator . The top - most iterator , and the tree under it , have been * exhausted . * @ return true if there is a next item , false if this iterator is exhausted */ private boolean advance ( ) { } }
// done when there are depth iterators and the last iterator has an item while ( iterators . size ( ) < depth || ! iterators . getLast ( ) . hasNext ( ) ) { // each iteration : add an iterator for the next level from the current // last iterator , or remove the last iterator because it is empty if ( iterators . getLast ( ) . hasNext ( ) ) { current . add ( iterators . getLast ( ) . next ( ) ) ; iterators . add ( getLevel ( current ) . iterator ( ) ) ; } else { iterators . removeLast ( ) ; if ( iterators . isEmpty ( ) ) { return false ; } else { current . removeLast ( ) ; } } } return true ;
public class EqualsRule { /** * { @ inheritDoc } */ public boolean evaluate ( final LoggingEvent event , Map matches ) { } }
Object p2 = RESOLVER . getValue ( field , event ) ; boolean result = ( p2 != null ) && p2 . toString ( ) . equals ( value ) ; if ( result && matches != null ) { Set entries = ( Set ) matches . get ( field . toUpperCase ( ) ) ; if ( entries == null ) { entries = new HashSet ( ) ; matches . put ( field . toUpperCase ( ) , entries ) ; } entries . add ( value ) ; } return result ;
public class TimePickerSettings { /** * generatePotentialMenuTimes , This will generate a list of menu times for populating the combo * box menu , using a TimePickerSettings . TimeIncrement value . The menu times will always start at * Midnight , and increase according to the increment until the last time before 11:59pm . * Note : This function can be called before or after setting an optional veto policy . Vetoed * times will never be added to the time picker menu , regardless of whether they are generated * by this function . * Example usage : generatePotentialMenuTimes ( TimeIncrement . FifteenMinutes ) ; * Number of entries : If no veto policy has been created , the number of entries in the drop down * menu would be determined by the size of the increment as follows ; FiveMinutes has 288 * entries . TenMinutes has 144 entries . FifteenMinutes has 96 entries . TwentyMinutes has 72 * entries . ThirtyMinutes has 48 entries . OneHour has 24 entries . */ public void generatePotentialMenuTimes ( TimeIncrement timeIncrement , LocalTime optionalStartTime , LocalTime optionalEndTime ) { } }
// If either bounding time does does not already exist , then set it to the maximum range . LocalTime startTime = ( optionalStartTime == null ) ? LocalTime . MIN : optionalStartTime ; LocalTime endTime = ( optionalEndTime == null ) ? LocalTime . MAX : optionalEndTime ; // Initialize our needed variables . potentialMenuTimes = new ArrayList < LocalTime > ( ) ; int increment = timeIncrement . minutes ; // Start at midnight , which is the earliest time of day for LocalTime values . LocalTime entry = LocalTime . MIDNIGHT ; boolean continueLoop = true ; while ( continueLoop ) { if ( PickerUtilities . isLocalTimeInRange ( entry , startTime , endTime , true ) ) { potentialMenuTimes . add ( entry ) ; } entry = entry . plusMinutes ( increment ) ; // Note : This stopping criteria works as long as as ( ( 60 % increment ) = = 0 ) . continueLoop = ( ! ( LocalTime . MIDNIGHT . equals ( entry ) ) ) ; }
public class ARCReader { /** * Skip over any trailing new lines at end of the record so we ' re lined up * ready to read the next . * @ param record * @ throws IOException */ protected void gotoEOR ( ArchiveRecord record ) throws IOException { } }
if ( getIn ( ) . available ( ) <= 0 ) { return ; } // Remove any trailing LINE _ SEPARATOR int c = - 1 ; while ( getIn ( ) . available ( ) > 0 ) { if ( getIn ( ) . markSupported ( ) ) { getIn ( ) . mark ( 1 ) ; } c = getIn ( ) . read ( ) ; if ( c != - 1 ) { if ( c == LINE_SEPARATOR ) { continue ; } if ( getIn ( ) . markSupported ( ) ) { // We ' ve overread . We ' re probably in next record . There is // no way of telling for sure . It may be dross at end of // current record . Backup . getIn ( ) . reset ( ) ; break ; } ArchiveRecordHeader h = ( getCurrentRecord ( ) != null ) ? record . getHeader ( ) : null ; throw new IOException ( "Read " + ( char ) c + " when only " + LINE_SEPARATOR + " expected. " + getReaderIdentifier ( ) + ( ( h != null ) ? h . getHeaderFields ( ) . toString ( ) : "" ) ) ; } }
public class PolicyTypeDescription { /** * The description of the policy attributes associated with the policies defined by Elastic Load Balancing . * @ return The description of the policy attributes associated with the policies defined by Elastic Load Balancing . */ public java . util . List < PolicyAttributeTypeDescription > getPolicyAttributeTypeDescriptions ( ) { } }
if ( policyAttributeTypeDescriptions == null ) { policyAttributeTypeDescriptions = new com . amazonaws . internal . SdkInternalList < PolicyAttributeTypeDescription > ( ) ; } return policyAttributeTypeDescriptions ;
public class CreateDiskRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateDiskRequest createDiskRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createDiskRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createDiskRequest . getDiskName ( ) , DISKNAME_BINDING ) ; protocolMarshaller . marshall ( createDiskRequest . getAvailabilityZone ( ) , AVAILABILITYZONE_BINDING ) ; protocolMarshaller . marshall ( createDiskRequest . getSizeInGb ( ) , SIZEINGB_BINDING ) ; protocolMarshaller . marshall ( createDiskRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class A_CmsToolbarListMenuButton { /** * Updates the dialog size according to the current tab content . < p > */ public void updateSize ( ) { } }
int width = CmsToolbarPopup . getAvailableWidth ( ) ; getPopup ( ) . setWidth ( width ) ; CmsListTab tab = m_tabs . getWidget ( m_tabs . getSelectedIndex ( ) ) ; tab . truncate ( TM_LITST_MENU , width ) ; int availableHeight = CmsToolbarPopup . getAvailableHeight ( ) ; int requiredHeight = tab . getRequiredHeight ( ) + 44 ; int height = ( availableHeight > requiredHeight ) && ( requiredHeight > 50 ) ? requiredHeight : availableHeight ; m_tabs . getParent ( ) . setHeight ( height + "px" ) ; tab . getScrollPanel ( ) . onResizeDescendant ( ) ;
public class Form { /** * Write a hidden field for a paramter * @ param paramName The name of the parameter * @ param paramValue The value of the paramter * @ param results The InternalStringBuilder to append the result to * @ param req THe servlet request */ private void writeHiddenParam ( String paramName , String paramValue , AbstractRenderAppender results , ServletRequest req , boolean newLine ) { } }
// put each hidden on a new line if ( newLine ) results . append ( "\n" ) ; // create the state _hiddenState . clear ( ) ; _hiddenState . name = paramName ; _hiddenState . value = paramValue ; TagRenderingBase hiddenTag = TagRenderingBase . Factory . getRendering ( TagRenderingBase . INPUT_HIDDEN_TAG , req ) ; hiddenTag . doStartTag ( results , _hiddenState ) ; hiddenTag . doEndTag ( results ) ;
public class WhileyFileParser { /** * Determine whether or not the given type can be parsed as an expression . * In many cases , a type can ( e . g . < code > { x } < / code > is both a valid type and * expression ) . However , some types are not also expressions ( e . g . * < code > int < / code > , < code > { int f } < / code > , < code > & int < / code > , etc ) . * This function * must * return false if what the given type could not be * parsed as an expression . However , if what it can be parsed as an * expression , then this function must return false ( even if we will * eventually treat this as a type ) . This function is called from either the * beginning of a statement ( i . e . to disambiguate variable declarations ) , or * after matching a left brace ( i . e . to disambiguate casts ) . * @ param index * Position in the token stream to begin looking from . * @ return */ private boolean mustParseAsType ( Type type ) { } }
if ( type instanceof Type . Primitive ) { // All primitive types must be parsed as types , since their // identifiers are keywords . return true ; } else if ( type instanceof Type . Record ) { // Record types must be parsed as types , since e . g . { int f } is not a // valid expression . return true ; } else if ( type instanceof Type . Callable ) { // " function " and " method " are keywords , cannot parse as expression . return true ; } else if ( type instanceof Type . Array ) { return true ; } else if ( type instanceof Type . Nominal ) { return false ; // always can be an expression } else if ( type instanceof Type . Reference ) { Type . Reference tt = ( Type . Reference ) type ; if ( tt . hasLifetime ( ) ) { Identifier lifetime = tt . getLifetime ( ) ; String lifetimeStr = lifetime . get ( ) ; if ( lifetimeStr . equals ( "this" ) || lifetimeStr . equals ( "*" ) ) { // & this and & * is not a valid expression because " this " is // keyword & ident could also be an address expression return true ; } } return mustParseAsType ( tt . getElement ( ) ) ; } else if ( type instanceof Type . Union ) { Type . Union tt = ( Type . Union ) type ; boolean result = false ; for ( int i = 0 ; i != tt . size ( ) ; ++ i ) { result |= mustParseAsType ( tt . get ( i ) ) ; } return result ; } else { // Error ! throw new SyntacticException ( "unknown syntactic type encountered" , parent . getEntry ( ) , type ) ; }
public class AudioFactory { /** * Add a supported audio format . * @ param format The supported format ( must not be < code > null < / code > ) . * @ throws LionEngineException If invalid argument or format already exists . */ public static synchronized void addFormat ( AudioFormat format ) { } }
Check . notNull ( format ) ; for ( final String current : format . getFormats ( ) ) { if ( FACTORIES . put ( current , format ) != null ) { throw new LionEngineException ( ERROR_EXISTS + current ) ; } }
public class DescribeFileSystemsResult { /** * An array of file system descriptions . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setFileSystems ( java . util . Collection ) } or { @ link # withFileSystems ( java . util . Collection ) } if you want to * override the existing values . * @ param fileSystems * An array of file system descriptions . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeFileSystemsResult withFileSystems ( FileSystem ... fileSystems ) { } }
if ( this . fileSystems == null ) { setFileSystems ( new java . util . ArrayList < FileSystem > ( fileSystems . length ) ) ; } for ( FileSystem ele : fileSystems ) { this . fileSystems . add ( ele ) ; } return this ;
public class FragmentManager { /** * This method clears the trace fragment builder for the * current thread of execution . */ public void clear ( ) { } }
int currentCount = threadCounter . decrementAndGet ( ) ; if ( log . isLoggable ( Level . FINEST ) ) { log . finest ( "Clear: Disassociate Thread from FragmentBuilder(1): current thread count=" + currentCount ) ; synchronized ( threadNames ) { threadNames . remove ( Thread . currentThread ( ) . getName ( ) ) ; } } FragmentBuilder currentBuilder = builders . get ( ) ; if ( currentBuilder != null ) { currentBuilder . decrementThreadCount ( ) ; } builders . remove ( ) ;
public class GeneralVarTagBinding { /** * 按照标签申明的变量名字来绑定 , 建议使用binds方法 , 因为此方法是按照顺序绑定 * @ param name * @ param value */ @ Deprecated public void bind ( String name , Object value ) { } }
if ( name2Index == null ) { throw new RuntimeException ( "申明的绑定和代码里实际绑定不一致" ) ; } Integer index = name2Index . get ( name ) ; if ( index == null ) { throw new RuntimeException ( "申明的绑定和代码里实际绑定不一致:试图绑定未申明的变量" + name ) ; } ctx . vars [ index ] = value ;
public class SheetOfPaperRaw { /** * ( non - Javadoc ) * @ see com . sporniket . libre . ui . swing . paper . SheetOfPaper # setHeight ( int ) */ public void setHeight ( int height ) { } }
if ( 1 > height ) { String _message = "The specified height ({0}) is incorrect, it must be greater or equal than 1." ; Object [ ] _parameters = { new Integer ( height ) } ; throw new IllegalArgumentException ( MessageFormat . format ( _message , _parameters ) ) ; } myHeight = height ;
public class CacheSetUtil { /** * retrial the cached set * @ param key key * @ return the value set */ public static Single < Set < String > > values ( String key ) { } }
return values ( CacheService . CACHE_CONFIG_BEAN , key ) ;
public class Node { /** * Update the configuration with the contents of the new { @ link Form } . * @ param submitForm * @ throws XMPPErrorException * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException */ public void sendConfigurationForm ( Form submitForm ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
PubSub packet = createPubsubPacket ( Type . set , new FormNode ( FormNodeType . CONFIGURE_OWNER , getId ( ) , submitForm ) ) ; pubSubManager . getConnection ( ) . createStanzaCollectorAndSend ( packet ) . nextResultOrThrow ( ) ;
public class LdapServices { /** * Get a { @ link Map } of { @ link ILdapServer } instances from the spring configuration . * @ return A { @ link Map } of { @ link ILdapServer } instances . */ @ SuppressWarnings ( "unchecked" ) public static Map < String , ILdapServer > getLdapServerMap ( ) { } }
final ApplicationContext applicationContext = PortalApplicationContextLocator . getApplicationContext ( ) ; final Map < String , ILdapServer > ldapServers = applicationContext . getBeansOfType ( ILdapServer . class ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Found Map of ILdapServers=" + ldapServers + "'" ) ; } return Collections . unmodifiableMap ( ldapServers ) ;
public class WebFragmentTypeImpl { /** * Returns all < code > ejb - local - ref < / code > elements * @ return list of < code > ejb - local - ref < / code > */ public List < EjbLocalRefType < WebFragmentType < T > > > getAllEjbLocalRef ( ) { } }
List < EjbLocalRefType < WebFragmentType < T > > > list = new ArrayList < EjbLocalRefType < WebFragmentType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "ejb-local-ref" ) ; for ( Node node : nodeList ) { EjbLocalRefType < WebFragmentType < T > > type = new EjbLocalRefTypeImpl < WebFragmentType < T > > ( this , "ejb-local-ref" , childNode , node ) ; list . add ( type ) ; } return list ;
public class ObjectWritable { /** * Read a { @ link Writable } , { @ link String } , primitive type , or an array of * the preceding . */ @ SuppressWarnings ( "unchecked" ) public static Object readObject ( DataInput in , ObjectWritable objectWritable , Configuration conf ) throws IOException { } }
String className = UTF8 . readString ( in ) ; // fast processing of ShortVoid if ( FastWritableRegister . isVoidType ( className ) ) { setObjectWritable ( objectWritable , ShortVoid . class , ShortVoid . instance ) ; return ShortVoid . instance ; } // handle fast writable objects first FastWritable fwInstance = FastWritableRegister . tryGetInstance ( className , conf ) ; if ( fwInstance != null ) { fwInstance . readFields ( in ) ; setObjectWritable ( objectWritable , fwInstance . getClass ( ) , fwInstance ) ; return fwInstance ; } Class < ? > declaredClass = getClassWithCaching ( className , conf ) ; // read once whether we should support jobconf // HDFS does not need to support this , which saves on Writable . newInstance if ( ! initializedSupportJobConf ) { supportJobConf = conf . getBoolean ( "rpc.support.jobconf" , true ) ; // since this is not synchronized , there is a race condition here // but we are ok with two instances initializing this // if the operations are re - ordered , it is still fine , another thread // might use " true " instead of false for one time initializedSupportJobConf = true ; } Object instance ; if ( declaredClass . isPrimitive ( ) ) { // primitive types if ( declaredClass == Boolean . TYPE ) { // boolean instance = Boolean . valueOf ( in . readBoolean ( ) ) ; } else if ( declaredClass == Character . TYPE ) { // char instance = Character . valueOf ( in . readChar ( ) ) ; } else if ( declaredClass == Byte . TYPE ) { // byte instance = Byte . valueOf ( in . readByte ( ) ) ; } else if ( declaredClass == Short . TYPE ) { // short instance = Short . valueOf ( in . readShort ( ) ) ; } else if ( declaredClass == Integer . TYPE ) { // int instance = Integer . valueOf ( in . readInt ( ) ) ; } else if ( declaredClass == Long . TYPE ) { // long instance = Long . valueOf ( in . readLong ( ) ) ; } else if ( declaredClass == Float . TYPE ) { // float instance = Float . valueOf ( in . readFloat ( ) ) ; } else if ( declaredClass == Double . TYPE ) { // double instance = Double . valueOf ( in . readDouble ( ) ) ; } else if ( declaredClass == Void . TYPE ) { // void instance = null ; } else { throw new IllegalArgumentException ( "Not a primitive: " + declaredClass ) ; } } else if ( declaredClass . isArray ( ) ) { // array int length = in . readInt ( ) ; instance = Array . newInstance ( declaredClass . getComponentType ( ) , length ) ; for ( int i = 0 ; i < length ; i ++ ) { Array . set ( instance , i , readObject ( in , conf ) ) ; } } else if ( declaredClass == String . class ) { // String instance = UTF8 . readString ( in ) ; } else if ( declaredClass . isEnum ( ) ) { // enum instance = Enum . valueOf ( ( Class < ? extends Enum > ) declaredClass , UTF8 . readString ( in ) ) ; } else { // Writable String str = UTF8 . readString ( in ) ; Class instanceClass = getClassWithCaching ( str , conf ) ; Writable writable = WritableFactories . newInstance ( instanceClass , conf , supportJobConf ) ; writable . readFields ( in ) ; instance = writable ; if ( instanceClass == NullInstance . class ) { // null declaredClass = ( ( NullInstance ) instance ) . declaredClass ; instance = null ; } } setObjectWritable ( objectWritable , declaredClass , instance ) ; return instance ;
public class DisruptorForCommandFactory { /** * one event one EventDisruptor * @ param topic * @ return */ public Disruptor createDisruptor ( String topic ) { } }
Disruptor disruptor = createDisruptorWithEventHandler ( topic ) ; if ( disruptor != null ) disruptor . start ( ) ; return disruptor ;
public class DescribeComputeEnvironmentsResult { /** * The list of compute environments . * @ param computeEnvironments * The list of compute environments . */ public void setComputeEnvironments ( java . util . Collection < ComputeEnvironmentDetail > computeEnvironments ) { } }
if ( computeEnvironments == null ) { this . computeEnvironments = null ; return ; } this . computeEnvironments = new java . util . ArrayList < ComputeEnvironmentDetail > ( computeEnvironments ) ;
public class Cooccurrence { /** * getter for confidence - gets To which degree we are confident about this being a true co - occurrence * @ generated * @ return value of the feature */ public float getConfidence ( ) { } }
if ( Cooccurrence_Type . featOkTst && ( ( Cooccurrence_Type ) jcasType ) . casFeat_confidence == null ) jcasType . jcas . throwFeatMissing ( "confidence" , "ch.epfl.bbp.uima.types.Cooccurrence" ) ; return jcasType . ll_cas . ll_getFloatValue ( addr , ( ( Cooccurrence_Type ) jcasType ) . casFeatCode_confidence ) ;
public class InputStreamReaderSource { /** * Returns a new Reader on the underlying source object . */ public Reader getReader ( ) throws IOException { } }
if ( stream != null ) { Reader reader = new InputStreamReader ( stream , configuration . getSourceEncoding ( ) ) ; stream = null ; return reader ; } return null ;
public class AOStream { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . gd . Stream # getStateStream ( ) */ public StateStream getStateStream ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getStateStream" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getStateStream" , stream ) ; return stream ;
public class StringSupport { /** * tells whether all characters in a String are letters or digits or part of a given String * @ param inString to evaluate * @ param chars characters which are allowed in the given String */ public static boolean isAlphaNumericOrContainsOnlyCharacters ( String in , String chars ) { } }
char c = 0 ; for ( int i = 0 ; i < in . length ( ) ; i ++ ) { c = in . charAt ( i ) ; if ( Character . isLetterOrDigit ( c ) == ( chars . indexOf ( c ) != - 1 ) ) { return false ; } } return true ;
public class JavadocFixTool { /** * Main procedure - proceed with the searching and / or fixing depending on * the command line parameters * @ param name Path to the document root */ public void proceed ( String name ) { } }
try { File folder = new File ( name ) ; if ( folder . exists ( ) && folder . isDirectory ( ) && folder . canRead ( ) ) { searchAndPatch ( folder ) ; } else { System . err . println ( "Invalid folder in parameter \"" + name + "\"" ) ; printUsage ( System . err ) ; } } catch ( Exception ignored ) { } // Die silently
public class PiwikRequest { /** * Get the url encoded query string represented by this object . * @ return the url encoded query string represented by this object */ public String getUrlEncodedQueryString ( ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( Entry < String , Object > parameter : parameters . entrySet ( ) ) { if ( sb . length ( ) > 0 ) { sb . append ( "&" ) ; } try { StringBuilder sb2 = new StringBuilder ( ) ; sb2 . append ( parameter . getKey ( ) ) ; sb2 . append ( "=" ) ; sb2 . append ( URLEncoder . encode ( parameter . getValue ( ) . toString ( ) , "UTF-8" ) ) ; sb . append ( sb2 ) ; } catch ( UnsupportedEncodingException e ) { System . err . println ( e . getMessage ( ) ) ; } } for ( Entry < String , List > customTrackingParameter : customTrackingParameters . entrySet ( ) ) { for ( Object o : customTrackingParameter . getValue ( ) ) { if ( sb . length ( ) > 0 ) { sb . append ( "&" ) ; } try { StringBuilder sb2 = new StringBuilder ( ) ; sb2 . append ( URLEncoder . encode ( customTrackingParameter . getKey ( ) , "UTF-8" ) ) ; sb2 . append ( "=" ) ; sb2 . append ( URLEncoder . encode ( o . toString ( ) , "UTF-8" ) ) ; sb . append ( sb2 ) ; } catch ( UnsupportedEncodingException e ) { System . err . println ( e . getMessage ( ) ) ; } } } return sb . toString ( ) ;
public class TrivialSwap { /** * Swap all the elements of two lists at the same position . The run time of this method * depends on the implementation of the lists since elements are removed and added in the * lists . * @ param < E > the type of elements in this list . * @ param list1 one of the lists that will have one of its values swapped . * @ param list2 the other list that will have one of its values swapped . * @ param index the index of the lists that will have their values swapped . */ public static < E > void swap ( List < E > list1 , List < E > list2 , int index ) { } }
TrivialSwap . swap ( list1 , index , list2 , index ) ;
public class CommerceTaxFixedRatePersistenceImpl { /** * Returns the commerce tax fixed rate with the primary key or throws a { @ link com . liferay . portal . kernel . exception . NoSuchModelException } if it could not be found . * @ param primaryKey the primary key of the commerce tax fixed rate * @ return the commerce tax fixed rate * @ throws NoSuchTaxFixedRateException if a commerce tax fixed rate with the primary key could not be found */ @ Override public CommerceTaxFixedRate findByPrimaryKey ( Serializable primaryKey ) throws NoSuchTaxFixedRateException { } }
CommerceTaxFixedRate commerceTaxFixedRate = fetchByPrimaryKey ( primaryKey ) ; if ( commerceTaxFixedRate == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchTaxFixedRateException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return commerceTaxFixedRate ;
public class AbstractPendingLinkingCandidate { /** * Returns the mapping of type parameters to their bound arguments . * @ see # initializeTypeParameterMapping ( ) */ @ Override protected Map < JvmTypeParameter , LightweightMergedBoundTypeArgument > getTypeParameterMapping ( ) { } }
if ( typeParameterMapping == null ) { typeParameterMapping = initializeTypeParameterMapping ( ) ; } return typeParameterMapping ;
public class FunctionList { /** * / * internal */ FunctionList filter ( Filter f ) { } }
List < Function > r = new ArrayList < Function > ( ) ; for ( Function m : functions ) if ( f . keep ( m ) ) r . add ( m ) ; return new FunctionList ( r . toArray ( new Function [ 0 ] ) ) ;
public class RserverConf { /** * used for windows multi - session emulation . Incremented at each new Rscript instance . */ public static boolean isPortAvailable ( int p ) { } }
try { ServerSocket test = new ServerSocket ( p ) ; test . close ( ) ; } catch ( BindException e ) { return false ; } catch ( IOException e ) { return false ; } return true ;
public class Matrix4f { /** * Apply an arcball view transformation to this matrix with the given < code > radius < / code > and center < code > ( centerX , centerY , centerZ ) < / code > * position of the arcball and the specified X and Y rotation angles . * This method is equivalent to calling : < code > translate ( 0 , 0 , - radius ) . rotateX ( angleX ) . rotateY ( angleY ) . translate ( - centerX , - centerY , - centerZ ) < / code > * @ param radius * the arcball radius * @ param centerX * the x coordinate of the center position of the arcball * @ param centerY * the y coordinate of the center position of the arcball * @ param centerZ * the z coordinate of the center position of the arcball * @ param angleX * the rotation angle around the X axis in radians * @ param angleY * the rotation angle around the Y axis in radians * @ return a matrix holding the result */ public Matrix4f arcball ( float radius , float centerX , float centerY , float centerZ , float angleX , float angleY ) { } }
return arcball ( radius , centerX , centerY , centerZ , angleX , angleY , thisOrNew ( ) ) ;
public class BlockType { /** * Make this object an exact duplicate of given object . * @ param other * the other BlockType object */ public void copyFrom ( BlockType other ) { } }
this . isValid = other . isValid ; this . isTop = other . isTop ; if ( isValid ) { this . depth = other . depth ; this . clear ( ) ; this . or ( other ) ; }
public class UtlReflection { /** * < p > Retrieve setter from given class by field name . < / p > * @ param pClazz - class * @ param pFieldName - field name * @ return Method setter . * @ throws Exception if method not exist */ @ Override public final Method retrieveSetterForField ( final Class < ? > pClazz , final String pFieldName ) throws Exception { } }
String setterName = "set" + pFieldName . substring ( 0 , 1 ) . toUpperCase ( ) + pFieldName . substring ( 1 ) ; return retrieveMethod ( pClazz , setterName ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcTimeSeriesSchedule ( ) { } }
if ( ifcTimeSeriesScheduleEClass == null ) { ifcTimeSeriesScheduleEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 615 ) ; } return ifcTimeSeriesScheduleEClass ;
public class Ghprb { /** * Checks for skip pattern in the passed string * @ param patterns The map of Patter to String values * @ param string The string we ' re looking for the phrase in * @ return the skip value or null if we don ' t find it */ private String checkSkipBuildInString ( Map < Pattern , String > patterns , String string ) { } }
// check for skip build phrase in the passed string if ( ! patterns . isEmpty ( ) && StringUtils . isNotBlank ( string ) ) { for ( Map . Entry < Pattern , String > e : patterns . entrySet ( ) ) { if ( e . getKey ( ) . matcher ( string ) . matches ( ) ) { return e . getValue ( ) ; } } } return null ;
public class DatabaseInformationMain { /** * Retrieves the system table corresponding to the specified * tableIndex value . < p > * @ param tableIndex int value identifying the system table to generate * @ return the system table corresponding to the specified tableIndex value */ protected Table generateTable ( int tableIndex ) { } }
Table t = sysTables [ tableIndex ] ; // Please note that this class produces non - null tables for // just those absolutely essential to the JDBC 1 spec and the // HSQLDB core . Also , all table producing methods except // SYSTEM _ PROCEDURES ( ) and SYSTEM _ PROCEDURECOLUMNS ( ) are declared final ; // this class produces only an empty table for each , as per previous // DatabaseInformation implementations , whereas // DatabaseInformationFull produces comprehensive content for // them ) . // This break down of inheritance allows DatabaseInformation and // DatabaseInformationMain ( this class ) to be made as small as possible // while still meeting their mandates : // 1 . ) DatabaseInformation prevents use of reserved system table names // for user tables and views , meaning that even under highly // constrained use cases where the notion of DatabaseMetaData can // be discarded ( i . e . the engine operates in a distribution where // DatabaseInforationMain / Full and JDBCDatabaseMetaData have been // dropped from the JAR ) , it is still impossible to produce a // database which will be incompatible in terms of system table < = > // user table name clashes , if / when imported into a more // capable operating environment . // 2 . ) DatabaseInformationMain builds on DatabaseInformation , providing // at minimum what is needed for comprehensive operation under // JDK 1.1 / JDBC 1 and provides , at minimum , what was provided under // earlier implementations . // 3 . ) descendents of DatabaseInformationMain ( such as the current // DatabaseInformationFull ) need not ( and indeed : now cannot ) // override most of the DatabaseInformationMain table producing // methods , as for the most part they are expected to be already // fully comprehensive , security aware and accessible to all users . switch ( tableIndex ) { case SYSTEM_ALLTYPEINFO : return SYSTEM_ALLTYPEINFO ( ) ; case SYSTEM_BESTROWIDENTIFIER : return SYSTEM_BESTROWIDENTIFIER ( ) ; case SYSTEM_COLUMNS : return SYSTEM_COLUMNS ( ) ; case SYSTEM_CROSSREFERENCE : return SYSTEM_CROSSREFERENCE ( ) ; case SYSTEM_INDEXINFO : return SYSTEM_INDEXINFO ( ) ; case SYSTEM_PRIMARYKEYS : return SYSTEM_PRIMARYKEYS ( ) ; case SYSTEM_PROCEDURECOLUMNS : return SYSTEM_PROCEDURECOLUMNS ( ) ; case SYSTEM_PROCEDURES : return SYSTEM_PROCEDURES ( ) ; case SYSTEM_SCHEMAS : return SYSTEM_SCHEMAS ( ) ; case SYSTEM_TABLES : return SYSTEM_TABLES ( ) ; case SYSTEM_TABLETYPES : return SYSTEM_TABLETYPES ( ) ; case SYSTEM_TYPEINFO : return SYSTEM_TYPEINFO ( ) ; case SYSTEM_USERS : return SYSTEM_USERS ( ) ; case SYSTEM_SEQUENCES : return SYSTEM_SEQUENCES ( ) ; case COLUMN_PRIVILEGES : return COLUMN_PRIVILEGES ( ) ; case SEQUENCES : return SEQUENCES ( ) ; case TABLE_PRIVILEGES : return TABLE_PRIVILEGES ( ) ; case INFORMATION_SCHEMA_CATALOG_NAME : return INFORMATION_SCHEMA_CATALOG_NAME ( ) ; default : return null ; }
public class JobsImpl { /** * Lists the jobs that have been created under the specified job schedule . * @ param jobScheduleId The ID of the job schedule from which you want to get a list of jobs . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws BatchErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; CloudJob & gt ; object if successful . */ public PagedList < CloudJob > listFromJobSchedule ( final String jobScheduleId ) { } }
ServiceResponseWithHeaders < Page < CloudJob > , JobListFromJobScheduleHeaders > response = listFromJobScheduleSinglePageAsync ( jobScheduleId ) . toBlocking ( ) . single ( ) ; return new PagedList < CloudJob > ( response . body ( ) ) { @ Override public Page < CloudJob > nextPage ( String nextPageLink ) { return listFromJobScheduleNextSinglePageAsync ( nextPageLink , null ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class RESTRequest { /** * Convenience method that gets the { @ link TableDefinition } of the table defined by * the decoded " { table } " variable in the current request from the given application * definition . If the given table is not found , an IllegalArgumentException is thrown * so the REST API can turn it into a 400 Bad Request response . * @ param appDef { @ link ApplicationDefinition } of application to get table for . * @ return { @ link TableDefinition } of table . Won ' t be null since an exception * is thrown if the table isn ' t found . */ public TableDefinition getTableDef ( ApplicationDefinition appDef ) { } }
assert appDef != null ; String table = getVariableDecoded ( "table" ) ; if ( Utils . isEmpty ( table ) ) { throw new RuntimeException ( "Missing {table} variable" ) ; } TableDefinition tableDef = m_appDef . getTableDef ( table ) ; if ( tableDef == null ) { throw new IllegalArgumentException ( "Unknown table for application '" + m_appDef . getAppName ( ) + "': " + table ) ; } return tableDef ;
public class MultipartParser { /** * Read the next part arriving in the stream . Will be either a * < code > FilePart < / code > or a < code > ParamPart < / code > , or < code > null < / code > * to indicate there are no more parts to read . The order of arrival * corresponds to the order of the form elements in the submitted form . * @ return either a < code > FilePart < / code > , a < code > ParamPart < / code > or * < code > null < / code > if there are no more parts to read . * @ exception IOExceptionif an input or output exception has occurred . * @ see FilePart * @ see ParamPart */ public Part readNextPart ( ) throws IOException { } }
// Make sure the last file was entirely read from the input if ( lastFilePart != null ) { lastFilePart . getInputStream ( ) . close ( ) ; lastFilePart = null ; } // Read the headers ; they look like this ( not all may be present ) : // Content - Disposition : form - data ; name = " field1 " ; filename = " file1 . txt " // Content - Type : type / subtype // Content - Transfer - Encoding : binary Vector headers = new Vector ( ) ; String line = readLine ( ) ; if ( line == null ) { // No parts left , we ' re done return null ; } else if ( line . length ( ) == 0 ) { // IE4 on Mac sends an empty line at the end ; treat that as the end . // Thanks to Daniel Lemire and Henri Tourigny for this fix . return null ; } // Read the following header lines we hit an empty line // A line starting with whitespace is considered a continuation ; // that requires a little special logic . Thanks to Nic Ferrier for // identifying a good fix . while ( line != null && line . length ( ) > 0 ) { String nextLine = null ; boolean getNextLine = true ; while ( getNextLine ) { nextLine = readLine ( ) ; if ( nextLine != null && ( nextLine . startsWith ( " " ) || nextLine . startsWith ( "\t" ) ) ) { line = line + nextLine ; } else { getNextLine = false ; } } // Add the line to the header list headers . addElement ( line ) ; line = nextLine ; } // If we got a null above , it ' s the end if ( line == null ) { return null ; } String name = null ; String filename = null ; String origname = null ; String contentType = "text/plain" ; // rfc1867 says this is the default Enumeration enu = headers . elements ( ) ; while ( enu . hasMoreElements ( ) ) { String headerline = ( String ) enu . nextElement ( ) ; if ( headerline . toLowerCase ( ) . startsWith ( "content-disposition:" ) ) { // Parse the content - disposition line String [ ] dispInfo = extractDispositionInfo ( headerline ) ; // String disposition = dispInfo [ 0 ] ; / / not currently used name = dispInfo [ 1 ] ; filename = dispInfo [ 2 ] ; origname = dispInfo [ 3 ] ; } else if ( headerline . toLowerCase ( ) . startsWith ( "content-type:" ) ) { // Get the content type , or null if none specified String type = extractContentType ( headerline ) ; if ( type != null ) { contentType = type ; } } } // Now , finally , we read the content ( end after reading the boundary ) if ( filename == null ) { // This is a parameter , add it to the vector of values // The encoding is needed to help parse the value return new ParamPart ( name , in , boundary , encoding ) ; } else { // This is a file if ( filename . equals ( "" ) ) { filename = null ; // empty filename , probably an " empty " file param } lastFilePart = new FilePart ( name , in , boundary , contentType , filename , origname ) ; return lastFilePart ; }
public class WebhooksInner { /** * Triggers a ping event to be sent to the webhook . * @ param resourceGroupName The name of the resource group to which the container registry belongs . * @ param registryName The name of the container registry . * @ param webhookName The name of the webhook . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the EventInfoInner object */ public Observable < ServiceResponse < EventInfoInner > > pingWithServiceResponseAsync ( String resourceGroupName , String registryName , String webhookName ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( registryName == null ) { throw new IllegalArgumentException ( "Parameter registryName is required and cannot be null." ) ; } if ( webhookName == null ) { throw new IllegalArgumentException ( "Parameter webhookName is required and cannot be null." ) ; } final String apiVersion = "2017-10-01" ; return service . ping ( this . client . subscriptionId ( ) , resourceGroupName , registryName , webhookName , apiVersion , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < EventInfoInner > > > ( ) { @ Override public Observable < ServiceResponse < EventInfoInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < EventInfoInner > clientResponse = pingDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class AnnotatedMethodFilter { /** * { @ inheritDoc } */ public boolean match ( MetadataReader metadataReader , MetadataReaderFactory metadataReaderFactory ) throws IOException { } }
AnnotationMetadata annotationMetadata = metadataReader . getAnnotationMetadata ( ) ; Set < MethodMetadata > annotatedMethods = annotationMetadata . getAnnotatedMethods ( annotatedClass . getCanonicalName ( ) ) ; return ! annotatedMethods . isEmpty ( ) ;
public class Execution { /** * Adds the deferred publisher if its needed at the end of the query . This is also a good time for the deferred code to start running */ private CompletableFuture < ExecutionResult > deferSupport ( ExecutionContext executionContext , CompletableFuture < ExecutionResult > result ) { } }
return result . thenApply ( er -> { DeferSupport deferSupport = executionContext . getDeferSupport ( ) ; if ( deferSupport . isDeferDetected ( ) ) { // we start the rest of the query now to maximize throughput . We have the initial important results // and now we can start the rest of the calls as early as possible ( even before some one subscribes ) Publisher < ExecutionResult > publisher = deferSupport . startDeferredCalls ( ) ; return ExecutionResultImpl . newExecutionResult ( ) . from ( er ) . addExtension ( GraphQL . DEFERRED_RESULTS , publisher ) . build ( ) ; } return er ; } ) ;
public class DescribeEventDetailsResult { /** * Error messages for any events that could not be retrieved . * @ param failedSet * Error messages for any events that could not be retrieved . */ public void setFailedSet ( java . util . Collection < EventDetailsErrorItem > failedSet ) { } }
if ( failedSet == null ) { this . failedSet = null ; return ; } this . failedSet = new java . util . ArrayList < EventDetailsErrorItem > ( failedSet ) ;
public class Typeahead { /** * Triggered when the query is autocompleted . Autocompleted means the query was changed to the hint . * @ param event the event * @ param suggestion the suggestion object */ private void onAutoCompleted ( final Event event , final Suggestion < T > suggestion ) { } }
TypeaheadAutoCompletedEvent . fire ( this , suggestion , event ) ;
public class Light4jHttpClientProvider { /** * Create instances of " io . undertow . client . http . HttpClientConnection " using reflections */ private ClientConnection createHttpClientConnection ( final StreamConnection connection , final OptionMap options , final ByteBufferPool bufferPool ) { } }
try { Class < ? > cls = Class . forName ( "io.undertow.client.http.HttpClientConnection" ) ; Constructor < ? > o = cls . getDeclaredConstructor ( StreamConnection . class , OptionMap . class , ByteBufferPool . class ) ; o . setAccessible ( true ) ; return ( ClientConnection ) o . newInstance ( connection , options , bufferPool ) ; } catch ( Exception e ) { logger . error ( e . getMessage ( ) , e ) ; } return null ;
public class FNDImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setMaxHSize ( Integer newMaxHSize ) { } }
Integer oldMaxHSize = maxHSize ; maxHSize = newMaxHSize ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . FND__MAX_HSIZE , oldMaxHSize , maxHSize ) ) ;
public class ManualDescriptor { /** * getter for geneSymbolList - gets GeneSymbolList in PubMed * @ generated * @ return value of the feature */ public StringArray getGeneSymbolList ( ) { } }
if ( ManualDescriptor_Type . featOkTst && ( ( ManualDescriptor_Type ) jcasType ) . casFeat_geneSymbolList == null ) jcasType . jcas . throwFeatMissing ( "geneSymbolList" , "de.julielab.jules.types.pubmed.ManualDescriptor" ) ; return ( StringArray ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( ManualDescriptor_Type ) jcasType ) . casFeatCode_geneSymbolList ) ) ) ;
public class SharedTreeSubgraph { /** * Make the left child of a node . * @ param parent Parent node * @ return The new child node */ public SharedTreeNode makeLeftChildNode ( SharedTreeNode parent ) { } }
SharedTreeNode child = new SharedTreeNode ( nodesArray . size ( ) , parent , subgraphNumber , parent . getDepth ( ) + 1 ) ; nodesArray . add ( child ) ; makeLeftEdge ( parent , child ) ; return child ;
public class CmsResourceBuilder { /** * Builds the resource . * @ return the cms resource */ public CmsResource buildResource ( ) { } }
return new CmsResource ( m_structureId , m_resourceId , m_rootPath , m_type , m_flags , m_projectLastModified , m_state , m_dateCreated , m_userCreated , m_dateLastModified , m_userLastModified , m_dateReleased , m_dateExpired , m_length , m_flags , m_dateContent , m_version ) ;
public class cachecontentgroup { /** * Use this API to fetch cachecontentgroup resource of given name . */ public static cachecontentgroup get ( nitro_service service , String name ) throws Exception { } }
cachecontentgroup obj = new cachecontentgroup ( ) ; obj . set_name ( name ) ; cachecontentgroup response = ( cachecontentgroup ) obj . get_resource ( service ) ; return response ;
public class OptionGroup { /** * Indicates what options are available in the option group . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setOptions ( java . util . Collection ) } or { @ link # withOptions ( java . util . Collection ) } if you want to override * the existing values . * @ param options * Indicates what options are available in the option group . * @ return Returns a reference to this object so that method calls can be chained together . */ public OptionGroup withOptions ( Option ... options ) { } }
if ( this . options == null ) { setOptions ( new com . amazonaws . internal . SdkInternalList < Option > ( options . length ) ) ; } for ( Option ele : options ) { this . options . add ( ele ) ; } return this ;
public class CoverageDataPng { /** * Get the coverage data value * @ param griddedTile * gridded tile * @ param raster * image raster * @ param x * x coordinate * @ param y * y coordinate * @ return coverage data value */ public Double getValue ( GriddedTile griddedTile , WritableRaster raster , int x , int y ) { } }
short pixelValue = getPixelValue ( raster , x , y ) ; Double value = getValue ( griddedTile , pixelValue ) ; return value ;
public class Organizer { /** * Copy data from object to object * @ param aFrom * the object to copy from * @ param aTo * the object to copy to */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public static void makeCopies ( Collection < Copier > aFrom , Collection < Copier > aTo ) { if ( aFrom == null || aTo == null ) return ; List < Copier > fromList = new ArrayList < Copier > ( aFrom ) ; List < Copier > toList = new ArrayList < Copier > ( aTo ) ; Collections . sort ( ( List ) fromList ) ; Collections . sort ( ( List ) toList ) ; Copier from = null ; Copier to = null ; Iterator < Copier > toIter = toList . iterator ( ) ; for ( Iterator < Copier > i = fromList . iterator ( ) ; i . hasNext ( ) && toIter . hasNext ( ) ; ) { from = ( Copier ) i . next ( ) ; to = ( Copier ) toIter . next ( ) ; // copy data to . copy ( from ) ; }
public class AssertKripton { /** * Assert true or invalid method sign exception . * @ param expression * the expression * @ param method * the method * @ param messageFormat * the message format * @ param args * the args */ public static void assertTrueOrInvalidMethodSignException ( boolean expression , SQLiteModelMethod method , String messageFormat , Object ... args ) { } }
if ( ! expression ) throw ( new InvalidMethodSignException ( method , String . format ( messageFormat , args ) ) ) ;
public class CacheEntry { protected void returnToPool ( ) { } }
CacheEntryPool cep = cacheEntryPool ; // An assertion failure here means this CE is being // returned to the pool while in use and this // should never be the case ; assert refCount . get ( ) == 0 ; if ( lruHead != null ) lruHead . remove ( this ) ; reset ( ) ; cep . add ( this ) ;
public class ConvergedServletContextImpl { /** * Executes the method of the specified < code > ApplicationContext < / code > * @ param method The method object to be invoked . * @ param context The AppliationContext object on which the method will be invoked * @ param params The arguments passed to the called method . */ private Object executeMethod ( final Method method , final ServletContextImpl context , final Object [ ] params ) throws PrivilegedActionException , IllegalAccessException , InvocationTargetException { } }
if ( SecurityUtil . isPackageProtectionEnabled ( ) ) { return AccessController . doPrivileged ( new PrivilegedExceptionAction < Object > ( ) { public Object run ( ) throws IllegalAccessException , InvocationTargetException { return method . invoke ( context , params ) ; } } ) ; } else { return method . invoke ( context , params ) ; }
public class MDAG { /** * Adds a string to the MDAG . * @ param str the String to be added to the MDAG */ public void addString ( String str ) { } }
if ( sourceNode != null ) { addStringInternal ( str ) ; replaceOrRegister ( sourceNode , str ) ; } else { unSimplify ( ) ; addString ( str ) ; }
public class BeanMetaData { /** * LI3408 */ private String getCacheReloadIntervalString ( ) { } }
String reload ; switch ( ivCacheReloadType ) { case CACHE_RELOAD_NONE : reload = "N/A" ; break ; case CACHE_RELOAD_INTERVAL : reload = "INTERVAL (" + ivCacheReloadInterval + ")" ; break ; case CACHE_RELOAD_DAILY : reload = "DAILY (" + ivCacheReloadInterval + ")" ; break ; case CACHE_RELOAD_WEEKLY : reload = "WEEKLY (" + ivCacheReloadInterval + ")" ; break ; default : reload = "UNKNOWN (" + ivCacheReloadInterval + ")" ; } return reload ;
public class GeocodingApi { /** * Requests the street address of a { @ code location } . * @ param context The { @ link GeoApiContext } to make requests through . * @ param location The location to reverse geocode . * @ return Returns the request , ready to run . */ public static GeocodingApiRequest reverseGeocode ( GeoApiContext context , LatLng location ) { } }
GeocodingApiRequest request = new GeocodingApiRequest ( context ) ; request . latlng ( location ) ; return request ;
public class TerminalTextUtils { /** * Given a string and a character index inside that string , find out what the column index of that character would * be if printed in a terminal . If the string only contains non - CJK characters then the returned value will be same * as { @ code stringCharacterIndex } , but if there are CJK characters the value will be different due to CJK * characters taking up two columns in width . If the character at the index in the string is a CJK character itself , * the returned value will be the index of the left - side of character . * @ param s String to translate the index from * @ param stringCharacterIndex Index within the string to get the terminal column index of * @ param tabBehaviour The behavior to use when encountering the tab character * @ param firstCharacterColumnPosition Where on the screen the first character in the string would be printed , this * applies only when you have an alignment - based { @ link TabBehaviour } * @ return Index of the character inside the String at { @ code stringCharacterIndex } when it has been writted to a * terminal * @ throws StringIndexOutOfBoundsException if the index given is outside the String length or negative */ public static int getColumnIndex ( String s , int stringCharacterIndex , TabBehaviour tabBehaviour , int firstCharacterColumnPosition ) throws StringIndexOutOfBoundsException { } }
int index = 0 ; for ( int i = 0 ; i < stringCharacterIndex ; i ++ ) { if ( s . charAt ( i ) == '\t' ) { index += tabBehaviour . getTabReplacement ( firstCharacterColumnPosition ) . length ( ) ; } else { if ( isCharCJK ( s . charAt ( i ) ) ) { index ++ ; } index ++ ; } } return index ;
public class AbstractDateCalculator { /** * move the current date by a given tenor , this means that if a date is * either a ' weekend ' or holiday , it will be skipped acording to the holiday * handler and not count towards the number of days to move . * @ param tenor the tenor . * @ param spotLag * number of days to " spot " days , this can vary from one market * to the other . * @ return the current businessCalendar ( so one can do * calendar . moveByTenor ( StandardTenor . T _ 2M ) . getCurrentBusinessDate ( ) ; ) */ @ Override public DateCalculator < E > moveByTenor ( final Tenor tenor , final int spotLag ) { } }
if ( tenor == null ) { throw new IllegalArgumentException ( "Tenor cannot be null" ) ; } TenorCode tenorCode = tenor . getCode ( ) ; if ( tenorCode != TenorCode . OVERNIGHT && tenorCode != TenorCode . TOM_NEXT /* & & spotLag ! = 0 */ ) { // get to the Spot date first : moveToSpotDate ( spotLag ) ; } int unit = tenor . getUnits ( ) ; if ( tenorCode == TenorCode . WEEK ) { tenorCode = TenorCode . DAY ; unit *= DAYS_IN_WEEK ; } if ( tenorCode == TenorCode . YEAR ) { tenorCode = TenorCode . MONTH ; unit *= MONTHS_IN_YEAR ; } return applyTenor ( tenorCode , unit ) ;
public class BoundedBuffer { /** * Increases the expedited buffer ' s capacity by the given amount . * @ param additionalCapacity * The amount by which the expedited buffer ' s capacity should be increased . */ @ SuppressWarnings ( "unchecked" ) public synchronized void expandExpedited ( int additionalCapacity ) { } }
if ( additionalCapacity <= 0 ) { throw new IllegalArgumentException ( ) ; } int capacityBefore = expeditedBuffer . length ; synchronized ( lock ) { // D312598 int capacityAfter = expeditedBuffer . length ; // Check that no one was expanding while we waited on this lock if ( capacityAfter == capacityBefore ) { final Object [ ] newBuffer = new Object [ expeditedBuffer . length + additionalCapacity ] ; // PK53203 - put ( ) acquires two locks in sequence . First , it acquires // the insert lock to update putIndex . Then , it drops the insert lock // and acquires the extract lock to update numberOfUsedSlots . As a // result , there is a window where putIndex has been updated , but // numberOfUsedSlots has not . Consequently , even though we have // acquired both locks in this method , we cannot rely on the values in // numberOfUsedSlots ; we can only rely on putIndex and takeIndex . if ( expeditedPutIndex > expeditedTakeIndex ) { // The contents of the buffer do not wrap round // the end of the array . We can move its contents // into the new expanded buffer in one go . int used = expeditedPutIndex - expeditedTakeIndex ; System . arraycopy ( expeditedBuffer , expeditedTakeIndex , newBuffer , 0 , used ) ; expeditedPutIndex = used ; // PK53203.1 - If putIndex = = takeIndex , then the buffer is either // completely full or completely empty . If it is completely full , then // we need to copy and adjust putIndex . Otherwise , we need to set // putIndex to 0. } else if ( expeditedPutIndex != expeditedTakeIndex || expeditedBuffer [ expeditedTakeIndex ] != null ) { // The contents of the buffer wrap round the end // of the array . We have to perform two copies to // move its contents into the new buffer . int used = expeditedBuffer . length - expeditedTakeIndex ; System . arraycopy ( expeditedBuffer , expeditedTakeIndex , newBuffer , 0 , used ) ; System . arraycopy ( expeditedBuffer , 0 , newBuffer , used , expeditedPutIndex ) ; expeditedPutIndex += used ; } else { expeditedPutIndex = 0 ; } // The contents of the buffer now begin at 0 - update the head pointer . expeditedTakeIndex = 0 ; // The buffer ' s capacity has been increased so update the count of the // empty slots to reflect this . expeditedBuffer = ( T [ ] ) newBuffer ; } } // D312598
public class ManagedInstancesInner { /** * Creates or updates a managed instance . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param managedInstanceName The name of the managed instance . * @ param parameters The requested managed instance resource state . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ManagedInstanceInner object if successful . */ public ManagedInstanceInner beginCreateOrUpdate ( String resourceGroupName , String managedInstanceName , ManagedInstanceInner parameters ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , managedInstanceName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ;
public class AbstractSARLLaunchConfigurationDelegate { /** * Copied from JDT ' s super class , and patched for invoking * { @ link # getOrComputeUnresolvedSARLRuntimeClasspath ( ILaunchConfiguration ) } . * { @ inheritDoc } */ @ Override public String [ ] getBootpath ( ILaunchConfiguration configuration ) throws CoreException { } }
final String [ ] [ ] paths = getBootpathExt ( configuration ) ; final String [ ] pre = paths [ 0 ] ; final String [ ] main = paths [ 1 ] ; final String [ ] app = paths [ 2 ] ; if ( pre == null && main == null && app == null ) { // default return null ; } IRuntimeClasspathEntry [ ] entries = getOrComputeUnresolvedSARLRuntimeClasspath ( configuration ) ; entries = JavaRuntime . resolveRuntimeClasspath ( entries , configuration ) ; final List < String > bootEntries = new ArrayList < > ( entries . length ) ; boolean empty = true ; boolean allStandard = true ; for ( int i = 0 ; i < entries . length ; i ++ ) { if ( entries [ i ] . getClasspathProperty ( ) != IRuntimeClasspathEntry . USER_CLASSES ) { final String location = entries [ i ] . getLocation ( ) ; if ( location != null ) { empty = false ; bootEntries . add ( location ) ; allStandard = allStandard && entries [ i ] . getClasspathProperty ( ) == IRuntimeClasspathEntry . STANDARD_CLASSES ; } } } if ( empty ) { return new String [ 0 ] ; } else if ( allStandard ) { return null ; } else { return bootEntries . toArray ( new String [ bootEntries . size ( ) ] ) ; }
public class CmsSearchField { /** * To allow sorting on a field the field must be added to the map given to { @ link org . apache . solr . uninverting . UninvertingReader # wrap ( org . apache . lucene . index . DirectoryReader , Map ) } . * The method adds all default fields . * @ param uninvertingMap the map to which the fields are added . */ public static void addUninvertingMappings ( Map < String , Type > uninvertingMap ) { } }
uninvertingMap . put ( FIELD_CATEGORY , Type . SORTED ) ; uninvertingMap . put ( FIELD_CONTENT , Type . SORTED ) ; uninvertingMap . put ( FIELD_CONTENT_BLOB , Type . SORTED ) ; uninvertingMap . put ( FIELD_CONTENT_LOCALES , Type . SORTED ) ; uninvertingMap . put ( FIELD_DATE_CONTENT , Type . SORTED ) ; uninvertingMap . put ( FIELD_DATE_CREATED , Type . SORTED ) ; uninvertingMap . put ( FIELD_DATE_CREATED_LOOKUP , Type . SORTED ) ; uninvertingMap . put ( FIELD_DATE_EXPIRED , Type . SORTED ) ; uninvertingMap . put ( FIELD_DATE_LASTMODIFIED , Type . SORTED ) ; uninvertingMap . put ( FIELD_DATE_LASTMODIFIED_LOOKUP , Type . SORTED ) ; uninvertingMap . put ( FIELD_DATE_LOOKUP_SUFFIX , Type . SORTED ) ; uninvertingMap . put ( FIELD_DATE_RELEASED , Type . SORTED ) ; uninvertingMap . put ( FIELD_DEPENDENCY_TYPE , Type . SORTED ) ; uninvertingMap . put ( FIELD_DESCRIPTION , Type . SORTED ) ; uninvertingMap . put ( FIELD_DYNAMIC_EXACT , Type . SORTED ) ; uninvertingMap . put ( FIELD_DYNAMIC_PROPERTIES , Type . SORTED ) ; uninvertingMap . put ( FIELD_EXCERPT , Type . SORTED ) ; uninvertingMap . put ( FIELD_FILENAME , Type . SORTED ) ; uninvertingMap . put ( FIELD_ID , Type . SORTED ) ; uninvertingMap . put ( FIELD_KEYWORDS , Type . SORTED ) ; uninvertingMap . put ( FIELD_LINK , Type . SORTED ) ; uninvertingMap . put ( FIELD_META , Type . SORTED ) ; uninvertingMap . put ( FIELD_MIMETYPE , Type . SORTED ) ; uninvertingMap . put ( FIELD_PARENT_FOLDERS , Type . SORTED ) ; uninvertingMap . put ( FIELD_PATH , Type . SORTED ) ; uninvertingMap . put ( FIELD_PREFIX_DEPENDENCY , Type . SORTED ) ; uninvertingMap . put ( FIELD_PREFIX_DYNAMIC , Type . SORTED ) ; uninvertingMap . put ( FIELD_PREFIX_TEXT , Type . SORTED ) ; uninvertingMap . put ( FIELD_PRIORITY , Type . SORTED ) ; uninvertingMap . put ( FIELD_RESOURCE_LOCALES , Type . SORTED ) ; uninvertingMap . put ( FIELD_SCORE , Type . SORTED ) ; uninvertingMap . put ( FIELD_SEARCH_EXCLUDE , Type . SORTED ) ; uninvertingMap . put ( FIELD_SIZE , Type . SORTED ) ; uninvertingMap . put ( FIELD_SORT_TITLE , Type . SORTED ) ; uninvertingMap . put ( FIELD_STATE , Type . SORTED ) ; uninvertingMap . put ( FIELD_SUFFIX , Type . SORTED ) ; uninvertingMap . put ( FIELD_TEXT , Type . SORTED ) ; uninvertingMap . put ( FIELD_TITLE , Type . SORTED ) ; uninvertingMap . put ( FIELD_TITLE_UNSTORED , Type . SORTED ) ; uninvertingMap . put ( FIELD_TYPE , Type . SORTED ) ; uninvertingMap . put ( FIELD_USER_CREATED , Type . SORTED ) ; uninvertingMap . put ( FIELD_USER_LAST_MODIFIED , Type . SORTED ) ; uninvertingMap . put ( FIELD_VERSION , Type . SORTED ) ;
public class HtmlDocument { /** * Returns the title of the page or null . */ @ Override public String getTitle ( ) { } }
HtmlElement title = htmlElements ( ) . find ( "title" ) ; return title != null ? title . getText ( ) : null ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcBoxedHalfSpace ( ) { } }
if ( ifcBoxedHalfSpaceEClass == null ) { ifcBoxedHalfSpaceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 56 ) ; } return ifcBoxedHalfSpaceEClass ;
public class Utils { /** * Replies a singleton list with the given element , or the empty list if * the element is < code > null < / code > . * @ param < T > the type of the element in the list . * @ param element the element . * @ return the list with the element , or the empty list . */ public static < T > List < T > singletonList ( T element ) { } }
if ( element == null ) { return Collections . emptyList ( ) ; } return Collections . singletonList ( element ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcElectricalBaseProperties ( ) { } }
if ( ifcElectricalBasePropertiesEClass == null ) { ifcElectricalBasePropertiesEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 193 ) ; } return ifcElectricalBasePropertiesEClass ;
public class KerasCropping1D { /** * Get layer output type . * @ param inputType Array of InputTypes * @ return output type as InputType * @ throws InvalidKerasConfigurationException Invalid Keras config */ @ Override public InputType getOutputType ( InputType ... inputType ) throws InvalidKerasConfigurationException { } }
if ( inputType . length > 1 ) throw new InvalidKerasConfigurationException ( "Keras Cropping layer accepts only one input (received " + inputType . length + ")" ) ; return this . getCropping1DLayer ( ) . getOutputType ( - 1 , inputType [ 0 ] ) ;
public class SQLMultiScopeRecoveryLog { /** * Acquires ownership of the special row used in the HA locking * scheme . There is sometimes a lag in peer recovery where an old * server is closing down while a new server is opening the same * log for peer recovery . * @ exception SQLException thrown if a SQLException is * encountered when accessing the * Database . * @ exception InternalLogException Thrown if an * unexpected error has occured . */ private void updateHADBLock ( Connection conn , Statement lockingStmt , ResultSet lockingRS ) throws SQLException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "updateHADBLock" , new java . lang . Object [ ] { conn , lockingStmt , lockingRS , this } ) ; if ( lockingRS . next ( ) ) { // We found the HA Lock row if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Acquired lock on HA Lock row" ) ; String storedServerName = lockingRS . getString ( 1 ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Stored server value is: " + storedServerName ) ; if ( _currentProcessServerName . equalsIgnoreCase ( storedServerName ) ) { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "This server ALREADY OWNS the HA lock row" ) ; } else { String updateString = "UPDATE " + _recoveryTableName + _logIdentifierString + _recoveryTableNameSuffix + " SET SERVER_NAME = '" + _currentProcessServerName + "' WHERE RU_ID = -1" ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "ANOTHER server OWNS the lock, lets update it using - " + updateString ) ; int ret = lockingStmt . executeUpdate ( updateString ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Have updated HA Lock row with return: " + ret ) ; } } else { // Is this entirely necessary ? We didn ' t find the HA Lock row in the table , perhaps we should barf short serviceId = ( short ) 1 ; String insertString = "INSERT INTO " + _recoveryTableName + _logIdentifierString + _recoveryTableNameSuffix + " (SERVER_NAME, SERVICE_ID, RU_ID, RUSECTION_ID, RUSECTION_DATA_INDEX, DATA)" + " VALUES (?,?,?,?,?,?)" ; PreparedStatement specStatement = null ; try { if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Need to setup HA Lock row using - " + insertString ) ; specStatement = conn . prepareStatement ( insertString ) ; specStatement . setString ( 1 , _currentProcessServerName ) ; specStatement . setShort ( 2 , serviceId ) ; specStatement . setLong ( 3 , - 1 ) ; // NOTE RU _ ID SET TO - 1 specStatement . setLong ( 4 , 1 ) ; specStatement . setShort ( 5 , ( short ) 1 ) ; byte buf [ ] = new byte [ 2 ] ; specStatement . setBytes ( 6 , buf ) ; int ret = specStatement . executeUpdate ( ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Have inserted HA Lock row with return: " + ret ) ; } finally { if ( specStatement != null && ! specStatement . isClosed ( ) ) specStatement . close ( ) ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "updateHADBLock" ) ;
public class Branch { /** * < p > This method should be called if you know that a different person is about to use the app . For example , * if you allow users to log out and let their friend use the app , you should call this to notify Branch * to create a new user for this device . This will clear the first and latest params , as a new session is created . < / p > * @ param callback An instance of { @ link io . branch . referral . Branch . LogoutStatusListener } to callback with the logout operation status . */ public void logout ( LogoutStatusListener callback ) { } }
ServerRequest req = new ServerRequestLogout ( context_ , callback ) ; if ( ! req . constructError_ && ! req . handleErrors ( context_ ) ) { handleNewRequest ( req ) ; }
public class CmsSchedulerConfiguration { /** * Adds a new job description for the scheduler . < p > * @ param jobInfo the job description to add */ public void addJobFromConfiguration ( CmsScheduledJobInfo jobInfo ) { } }
m_configuredJobs . add ( jobInfo ) ; if ( CmsLog . INIT . isInfoEnabled ( ) ) { CmsLog . INIT . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . INIT_SCHEDULER_CONFIG_JOB_3 , jobInfo . getJobName ( ) , jobInfo . getClassName ( ) , jobInfo . getContextInfo ( ) . getUserName ( ) ) ) ; }
public class DomUtils { /** * HELPER METHOD TO PRINT A DOM TO STDOUT */ static public void displayNode ( Node node ) { } }
try { TransformerFactory TF = TransformerFactory . newInstance ( ) ; // Fortify Mod : disable external entity injection TF . setFeature ( XMLConstants . FEATURE_SECURE_PROCESSING , true ) ; Transformer identity = TF . newTransformer ( ) ; // End Fortify Mod identity . transform ( new DOMSource ( node ) , new StreamResult ( System . out ) ) ; } catch ( Exception ex ) { System . out . println ( "ERROR: " + ex . getMessage ( ) ) ; }
public class PresenceNotifySender { /** * This method sends the given request to the subscriber . Knowledge of JAIN - SIP API headers is * required . The request will be resent if challenged . Use this method only if you have previously * called processSubscribe ( ) . Use this method if you don ' t care about checking the response to the * sent NOTIFY , otherwise use sendStatefulNotify ( ) . * @ param req javax . sip . message . Request to send . * @ param viaProxy If true , send the message to the proxy . In this case a Route header will be * added . Else send the message as is . * @ return true if successful , false otherwise ( call getErrorMessage ( ) for details ) . */ public boolean sendNotify ( Request req , boolean viaProxy ) { } }
setErrorMessage ( "" ) ; synchronized ( dialogLock ) { if ( dialog == null ) { setErrorMessage ( "Can't send notify, haven't received a request" ) ; return false ; } try { phone . addAuthorizations ( ( ( CallIdHeader ) req . getHeader ( CallIdHeader . NAME ) ) . getCallId ( ) , req ) ; SipTransaction transaction = phone . sendRequestWithTransaction ( req , viaProxy , dialog , this ) ; if ( transaction == null ) { setErrorMessage ( phone . getErrorMessage ( ) ) ; return false ; } setLastSentNotify ( req ) ; LOG . trace ( "Sent NOTIFY to {}:\n{}" , dialog . getRemoteParty ( ) . getURI ( ) , req ) ; return true ; } catch ( Exception e ) { setErrorMessage ( e . getClass ( ) . getName ( ) + ": " + e . getMessage ( ) ) ; } } return false ;
public class TaxinvoiceServiceImp { /** * / * ( non - Javadoc ) * @ see com . popbill . api . TaxinvoiceService # registIssue ( java . lang . String , com . popbill . api . Taxinvoice , String , Boolean ) */ @ Override public IssueResponse registIssue ( String CorpNum , Taxinvoice taxinvoice , String Memo , Boolean ForceIssue ) throws PopbillException { } }
return registIssue ( CorpNum , taxinvoice , false , Memo , ForceIssue , null , null , null ) ;
public class Files { /** * Copy the source { @ link File } to the target { @ link File } . */ public static void copy ( File source , File target ) throws IOException { } }
if ( ! source . exists ( ) ) { throw new FileNotFoundException ( "Source file not found:" + source . getAbsolutePath ( ) ) ; } if ( ! target . exists ( ) && ! target . getParentFile ( ) . exists ( ) && ! target . getParentFile ( ) . mkdirs ( ) ) { throw new IOException ( "Can't create target directory:" + target . getParentFile ( ) . getAbsolutePath ( ) ) ; } FileInputStream is = new FileInputStream ( source ) ; FileOutputStream os = new FileOutputStream ( target ) ; IOHelper . copy ( is , os ) ;
public class HtmlTool { /** * Finds a set of elements through a CSS selector and unwraps them . * This allows removing elements without losing their contents . * @ param root * root element for the selection * @ param selector * CSS selector for the elements to unwrap */ public final void unwrap ( final Element root , final String selector ) { } }
final Iterable < Element > elements ; // Elements to unwrap checkNotNull ( root , "Received a null pointer as root element" ) ; checkNotNull ( selector , "Received a null pointer as selector" ) ; // Selects and iterates over the elements elements = root . select ( selector ) ; for ( final Element element : elements ) { element . unwrap ( ) ; }
public class UniqueKeyGeneratorImpl { /** * Returns a unique value that is unique only to the current instance of the * < code > UniqueKeyGenerator < / code > * @ return A value which is unique to the current instance of the unique * key generator */ public long getPerInstanceUniqueValue ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getPerInstanceUniqueValue" ) ; long retval ; synchronized ( _instanceUniqueLock ) { retval = _instanceUnique -- ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getPerInstanceUniqueValue" , "return=" + retval ) ; return retval ;
public class EssentialNister5 { /** * Once z is known then x and y can be solved for using the B matrix */ private void solveForXandY ( double z ) { } }
this . z = z ; // solve for x and y using the first two rows of B tmpA . data [ 0 ] = ( ( helper . K00 * z + helper . K01 ) * z + helper . K02 ) * z + helper . K03 ; tmpA . data [ 1 ] = ( ( helper . K04 * z + helper . K05 ) * z + helper . K06 ) * z + helper . K07 ; tmpY . data [ 0 ] = ( ( ( helper . K08 * z + helper . K09 ) * z + helper . K10 ) * z + helper . K11 ) * z + helper . K12 ; tmpA . data [ 2 ] = ( ( helper . L00 * z + helper . L01 ) * z + helper . L02 ) * z + helper . L03 ; tmpA . data [ 3 ] = ( ( helper . L04 * z + helper . L05 ) * z + helper . L06 ) * z + helper . L07 ; tmpY . data [ 1 ] = ( ( ( helper . L08 * z + helper . L09 ) * z + helper . L10 ) * z + helper . L11 ) * z + helper . L12 ; tmpA . data [ 4 ] = ( ( helper . M00 * z + helper . M01 ) * z + helper . M02 ) * z + helper . M03 ; tmpA . data [ 5 ] = ( ( helper . M04 * z + helper . M05 ) * z + helper . M06 ) * z + helper . M07 ; tmpY . data [ 2 ] = ( ( ( helper . M08 * z + helper . M09 ) * z + helper . M10 ) * z + helper . M11 ) * z + helper . M12 ; CommonOps_DDRM . scale ( - 1 , tmpY ) ; CommonOps_DDRM . solve ( tmpA , tmpY , tmpX ) ; this . x = tmpX . get ( 0 , 0 ) ; this . y = tmpX . get ( 1 , 0 ) ;
public class OsiamConnector { /** * Retrieve a list of the of all { @ link Group } resources saved in the OSIAM service . If you need to have all Group * but the number is very large , this method can be slow . In this case you can also use Query . Builder with no filter * to split the number of Groups returned * @ param accessToken the OSIAM access token for the current session * @ param attributes the list of attributes that should be returned in the response * @ return a list of all groups * @ throws UnauthorizedException if the request could not be authorized . * @ throws ForbiddenException if the scope doesn ' t allow this request * @ throws ConnectionInitializationException if the connection to the given OSIAM service could not be initialized * @ throws IllegalStateException if OSIAM ' s endpoint ( s ) are not properly configured */ public List < Group > getAllGroups ( AccessToken accessToken , String ... attributes ) { } }
return getGroupService ( ) . getAllGroups ( accessToken ) ;
public class AbstractDescribableScriptPlugin { /** * Loads the plugin configuration values stored in project or framework properties , also * @ param context execution context * @ param localDataContext current context data * @ param description plugin description * @ param instanceData instance data * @ param serviceName service name * @ return context data with a new " config " entry containing the loaded plugin config * properties . * @ throws ConfigurationException configuration error */ protected Map < String , Map < String , String > > loadConfigData ( final ExecutionContext context , final Map < String , Object > instanceData , final Map < String , Map < String , String > > localDataContext , final Description description , final String serviceName ) throws ConfigurationException { } }
final PropertyResolver resolver = PropertyResolverFactory . createPluginRuntimeResolver ( context , instanceData , serviceName , getProvider ( ) . getName ( ) ) ; final Map < String , Object > config = PluginAdapterUtility . mapDescribedProperties ( resolver , description , PropertyScope . Instance ) ; // expand properties Map < String , Object > expanded = DataContextUtils . replaceDataReferences ( config , localDataContext ) ; Map < String , String > data = MapData . toStringStringMap ( expanded ) ; loadContentConversionPropertyValues ( data , context , description . getProperties ( ) ) ; Map < String , Map < String , String > > newLocalDataContext = localDataContext ; VersionCompare pluginVersion = VersionCompare . forString ( provider . getPluginMeta ( ) . getRundeckPluginVersion ( ) ) ; if ( pluginVersion . atLeast ( VersionCompare . forString ( ScriptPluginProviderLoader . VERSION_2_0 ) ) ) { // new context variable name newLocalDataContext = DataContextUtils . addContext ( serviceName . toLowerCase ( ) , data , localDataContext ) ; } // using " config " name to old plugins return DataContextUtils . addContext ( "config" , data , newLocalDataContext ) ;
public class DestinationDescriptionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DestinationDescription destinationDescription , ProtocolMarshaller protocolMarshaller ) { } }
if ( destinationDescription == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( destinationDescription . getDestinationId ( ) , DESTINATIONID_BINDING ) ; protocolMarshaller . marshall ( destinationDescription . getS3DestinationDescription ( ) , S3DESTINATIONDESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( destinationDescription . getExtendedS3DestinationDescription ( ) , EXTENDEDS3DESTINATIONDESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( destinationDescription . getRedshiftDestinationDescription ( ) , REDSHIFTDESTINATIONDESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( destinationDescription . getElasticsearchDestinationDescription ( ) , ELASTICSEARCHDESTINATIONDESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( destinationDescription . getSplunkDestinationDescription ( ) , SPLUNKDESTINATIONDESCRIPTION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DescribeServicesRequest { /** * A list of services to describe . You may specify up to 10 services to describe in a single operation . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setServices ( java . util . Collection ) } or { @ link # withServices ( java . util . Collection ) } if you want to override * the existing values . * @ param services * A list of services to describe . You may specify up to 10 services to describe in a single operation . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeServicesRequest withServices ( String ... services ) { } }
if ( this . services == null ) { setServices ( new com . amazonaws . internal . SdkInternalList < String > ( services . length ) ) ; } for ( String ele : services ) { this . services . add ( ele ) ; } return this ;
public class LogSemiring { /** * ( non - Javadoc ) * @ see * edu . cmu . sphinx . fst . weight . Semiring # plus ( edu . cmu . sphinx . fst . weight . float , * edu . cmu . sphinx . fst . weight . float ) */ @ Override public double plus ( double w1 , double w2 ) { } }
if ( ! isMember ( w1 ) || ! isMember ( w2 ) ) { return Double . NEGATIVE_INFINITY ; } if ( w1 == Double . POSITIVE_INFINITY ) { return w2 ; } else if ( w2 == Double . POSITIVE_INFINITY ) { return w1 ; } return ( double ) - Math . log ( Math . exp ( - w1 ) + Math . exp ( - w2 ) ) ;