signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
|---|---|
public class AuthServlet { /** * Adjusts the information cookie based on the authentication token
* @ param request
* @ param response
* @ throws ServletException
* @ throws IOException */
private void performAdjustCookies ( HttpServletRequest request , HttpServletResponse response ) throws Exception { } }
|
boolean loggedIn = false ; User user = null ; try { Cookie cookie = getCookieFromRequest ( request ) ; if ( null != cookie ) { user = CookieAuthentication . verifyCookieString ( userRepository , cookie . getValue ( ) ) ; loggedIn = true ; } } catch ( Exception e ) { // Ignore
} if ( null == user ) { user = userRepository . getDefaultUser ( ) ; } acceptRequest ( response , loggedIn , user ) ;
|
public class ConfluenceGreenPepper { /** * Retrieves the home repository of the confluence space .
* @ param spaceKey a { @ link java . lang . String } object .
* @ return the home repository of the confluence space .
* @ throws com . greenpepper . server . GreenPepperServerException if any . */
public Repository getHomeRepository ( String spaceKey ) throws GreenPepperServerException { } }
|
String uid = getSettingsManager ( ) . getGlobalSettings ( ) . getSiteTitle ( ) + "-" + spaceKey ; Repository repository = Repository . newInstance ( uid ) ; repository . setMaxUsers ( getNumberOfUserForGreenPepperUserGroup ( ) ) ; return repository ;
|
public class FSNamesystem { /** * * The client would like to let go of a given file */
public boolean abandonFile ( String src , String holder ) throws IOException { } }
|
writeLock ( ) ; try { if ( NameNode . stateChangeLog . isDebugEnabled ( ) ) { NameNode . stateChangeLog . debug ( "FILE* NameSystem.abandonFile: " + src ) ; } if ( isInSafeMode ( ) ) { throw new SafeModeException ( "Cannot abandon file " + src , safeMode ) ; } checkLease ( src , holder ) ; internalReleaseLeaseOne ( leaseManager . getLease ( holder ) , src ) ; if ( NameNode . stateChangeLog . isDebugEnabled ( ) ) { NameNode . stateChangeLog . debug ( "FILE* NameSystem.abandonFile: " + " has been scheduled for lease recovery" ) ; } return true ; } finally { writeUnlock ( ) ; }
|
public class RecoveringZooKeeper { /** * - - - - - Internal Helpers - - - - - / / */
private String createNonSequential ( String path , byte [ ] data , List < ACL > acl , CreateMode createMode ) throws KeeperException , InterruptedException { } }
|
RetryCounter retryCounter = retryCounterFactory . create ( ) ; while ( true ) { try { return zk . create ( path , data , acl , createMode ) ; } catch ( KeeperException e ) { switch ( e . code ( ) ) { case NODEEXISTS : // Non - sequential node was successfully created
return path ; case CONNECTIONLOSS : case OPERATIONTIMEOUT : LOG . warn ( "Possibly transient ZooKeeper exception: " + e ) ; if ( ! retryCounter . shouldRetry ( ) ) { LOG . error ( "ZooKeeper create failed after " + retryCounter . getMaxRetries ( ) + " retries" ) ; throw e ; } break ; default : throw e ; } } LOG . info ( "Retrying ZooKeeper after sleeping..." ) ; retryCounter . sleepUntilNextRetry ( ) ; retryCounter . useRetry ( ) ; }
|
public class FnMutableDateTime { /** * It converts the input { @ link String } into a { @ link MutableDateTime } using the given pattern parameter and with the given
* { @ link DateTimeZone } . If the pattern includes either , the name of the month or day of week , a conversion
* accepting a { @ link Locale } must be used instead
* @ param pattern string with the format of the input String
* @ param dateTimeZone the the time zone ( { @ link DateTimeZone } ) to be used
* @ return the { @ link MutableDateTime } created from the input and arguments */
public static final Function < String , MutableDateTime > strToMutableDateTime ( String pattern , DateTimeZone dateTimeZone ) { } }
|
return new StringToMutableDateTime ( pattern , dateTimeZone ) ;
|
public class RepositoryImpl { /** * Creation contains three steps . First
* < code > configWorkspace ( WorkspaceEntry wsConfig ) < / code > - registration a new
* configuration in RepositoryContainer and create WorkspaceContainer .
* Second , the main step , is
* < code > initWorkspace ( ) < / code > -
* initializing workspace by name and root nodetype . Third , final step ,
* starting all components of workspace . Before creation workspace < b > must be
* configured < / b >
* @ see RepositoryImpl # configWorkspace ( org . exoplatform . services . jcr . config . WorkspaceEntry )
* @ see WorkspaceInitializer # initWorkspace ( )
* @ param workspaceName - Creates a new Workspace with the specified name
* @ throws RepositoryException */
public synchronized void createWorkspace ( String workspaceName ) throws RepositoryException { } }
|
SecurityHelper . validateSecurityPermission ( JCRRuntimePermissions . MANAGE_REPOSITORY_PERMISSION ) ; final WorkspaceContainer wsContainer = repositoryContainer . getWorkspaceContainer ( workspaceName ) ; if ( wsContainer == null ) { throw new RepositoryException ( "Workspace " + workspaceName + " is not configured. Use RepositoryImpl.configWorkspace() method" ) ; } final WorkspaceInitializer workspaceInitializer = repositoryContainer . getWorkspaceContainer ( workspaceName ) . getWorkspaceInitializer ( ) ; SystemParametersPersistenceConfigurator sppc = ( SystemParametersPersistenceConfigurator ) repositoryContainer . getComponentInstanceOfType ( SystemParametersPersistenceConfigurator . class ) ; if ( sppc != null ) { WorkspaceEntry workspaceEntry = repositoryContainer . getWorkspaceEntry ( workspaceName ) ; repositoryContainer . setInitializerAndValidateOverriddenParameters ( workspaceEntry , workspaceInitializer ) ; } if ( isWorkspaceInitialized ( workspaceName ) ) { LOG . warn ( "Workspace '" + workspaceName + "' is presumably initialized. config canceled" ) ; return ; } try { SecurityHelper . doPrivilegedExceptionAction ( new PrivilegedExceptionAction < Void > ( ) { public Void run ( ) throws Exception { workspaceInitializer . initWorkspace ( ) ; wsContainer . start ( ) ; return null ; } } ) ; } catch ( PrivilegedActionException pae ) { Throwable cause = pae . getCause ( ) ; if ( cause instanceof RepositoryException ) { throw ( RepositoryException ) cause ; } else if ( cause instanceof RuntimeException ) { throw ( RuntimeException ) cause ; } else { throw new RuntimeException ( cause ) ; } } LOG . info ( "Workspace " + workspaceName + "@" + this . name + " is initialized" ) ;
|
public class SubscriberState { /** * Returns an empty subscriber state
* with - 1 as total updates , master as false
* and server state as empty
* @ return an empty subscriber state */
public static SubscriberState empty ( ) { } }
|
val map = new ConcurrentHashMap < String , Number > ( ) ; return SubscriberState . builder ( ) . serverState ( "empty" ) . streamId ( - 1 ) . parameterUpdaterStatus ( map ) . totalUpdates ( - 1 ) . isMaster ( false ) . build ( ) ;
|
public class BELScriptLexer { /** * $ ANTLR start " T _ _ 119" */
public final void mT__119 ( ) throws RecognitionException { } }
|
try { int _type = T__119 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // BELScript . g : 106:8 : ( ' subProcessOf ' )
// BELScript . g : 106:10 : ' subProcessOf '
{ match ( "subProcessOf" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
|
public class PhotonGlobalState { /** * Set the default application ID of an application servlet .
* @ param sDefaultApplicationID
* The last application ID to be set . May be < code > null < / code > .
* @ return this for chaining */
@ Nonnull public PhotonGlobalState setDefaultApplicationID ( @ Nullable final String sDefaultApplicationID ) { } }
|
m_aRWLock . writeLocked ( ( ) -> { if ( ! EqualsHelper . equals ( m_sDefaultApplicationID , sDefaultApplicationID ) ) { m_sDefaultApplicationID = sDefaultApplicationID ; if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Default application ID set to '" + sDefaultApplicationID + "'" ) ; } } ) ; return this ;
|
public class DNSName { /** * Return subtree depth of this name for purposes of determining
* NameConstraints minimum and maximum bounds and for calculating
* path lengths in name subtrees .
* @ returns distance of name from root
* @ throws UnsupportedOperationException if not supported for this name type */
public int subtreeDepth ( ) throws UnsupportedOperationException { } }
|
String subtree = name ; int i = 1 ; /* count dots */
for ( ; subtree . lastIndexOf ( '.' ) >= 0 ; i ++ ) { subtree = subtree . substring ( 0 , subtree . lastIndexOf ( '.' ) ) ; } return i ;
|
public class PermutationGenerator { public int [ ] getNext ( ) { } }
|
if ( numLeft . equals ( total ) ) { numLeft = numLeft . subtract ( BigInteger . ONE ) ; return a ; } int temp ; // Find largest index j with a [ j ] < a [ j + 1]
int j = a . length - 2 ; while ( a [ j ] > a [ j + 1 ] ) { j -- ; } // Find index k such that a [ k ] is smallest integer
// greater than a [ j ] to the right of a [ j ]
int k = a . length - 1 ; while ( a [ j ] > a [ k ] ) { k -- ; } // Interchange a [ j ] and a [ k ]
temp = a [ k ] ; a [ k ] = a [ j ] ; a [ j ] = temp ; // Put tail end of permutation after jth position in increasing order
int r = a . length - 1 ; int s = j + 1 ; while ( r > s ) { temp = a [ s ] ; a [ s ] = a [ r ] ; a [ r ] = temp ; r -- ; s ++ ; } numLeft = numLeft . subtract ( BigInteger . ONE ) ; return a ;
|
public class PathSequence { /** * Initialize and start the next path in the sequence . */
protected void initNextPath ( long initStamp , long tickStamp ) { } }
|
if ( _paths . size ( ) == 0 ) { _pable . pathCompleted ( tickStamp ) ; } else { _curPath = _paths . remove ( 0 ) ; _lastInit = initStamp ; _curPath . init ( _pableRep , initStamp ) ; _curPath . tick ( _pableRep , tickStamp ) ; }
|
public class OIndexManagerShared { /** * Binds POJO to ODocument . */
@ Override public ODocument toStream ( ) { } }
|
acquireExclusiveLock ( ) ; try { document . setInternalStatus ( ORecordElement . STATUS . UNMARSHALLING ) ; try { final ORecordTrackedSet idxs = new ORecordTrackedSet ( document ) ; for ( final OIndexInternal < ? > i : indexes . values ( ) ) { idxs . add ( i . updateConfiguration ( ) ) ; } document . field ( CONFIG_INDEXES , idxs , OType . EMBEDDEDSET ) ; } finally { document . setInternalStatus ( ORecordElement . STATUS . LOADED ) ; } document . setDirty ( ) ; return document ; } finally { releaseExclusiveLock ( ) ; }
|
public class Moment { /** * Deserialisierungsmethode .
* @ param in input stream
* @ param positiveLS positive leap second indicated ?
* @ return deserialized instance
* @ throws IOException in case of I / O - errors */
static Moment readTimestamp ( DataInput in , boolean positiveLS , boolean hasNanos ) throws IOException { } }
|
long unixTime = in . readLong ( ) ; int nano = ( hasNanos ? in . readInt ( ) : 0 ) ; if ( unixTime == 0 ) { if ( positiveLS ) { throw new InvalidObjectException ( "UTC epoch is no leap second." ) ; } else if ( nano == 0 ) { return UNIX_EPOCH ; } } if ( ( unixTime == MIN_LIMIT ) && ( nano == 0 ) ) { if ( positiveLS ) { throw new InvalidObjectException ( "Minimum is no leap second." ) ; } return MIN ; } else if ( ( unixTime == MAX_LIMIT ) && ( nano == MRD - 1 ) ) { if ( positiveLS ) { throw new InvalidObjectException ( "Maximum is no leap second." ) ; } return MAX ; } else { checkFraction ( nano ) ; } if ( positiveLS ) { LeapSeconds ls = LeapSeconds . getInstance ( ) ; if ( ! ls . isEnabled ( ) // keep LS - state when propagating to next vm
|| ls . isPositiveLS ( ls . enhance ( unixTime ) + 1 ) ) { nano |= POSITIVE_LEAP_MASK ; } else { long packed = GregorianMath . toPackedDate ( unixTime ) ; int month = GregorianMath . readMonth ( packed ) ; int day = GregorianMath . readDayOfMonth ( packed ) ; throw new InvalidObjectException ( "Not registered as leap second event: " + GregorianMath . readYear ( packed ) + "-" + ( ( month < 10 ) ? "0" : "" ) + month + ( ( day < 10 ) ? "0" : "" ) + day + " [Please check leap second configurations " + "either of emitter vm or this target vm]" ) ; } } return new Moment ( nano , unixTime ) ;
|
public class SmithWaterman { /** * method for AbstractMatrixAligner */
@ Override protected void setProfile ( List < Step > sx , List < Step > sy ) { } }
|
profile = pair = new SimpleSequencePair < S , C > ( getQuery ( ) , getTarget ( ) , sx , xyStart [ 0 ] , getQuery ( ) . getLength ( ) - xyMax [ 0 ] , sy , xyStart [ 1 ] , getTarget ( ) . getLength ( ) - xyMax [ 1 ] ) ;
|
public class ByteBuffer { /** * Appends an < CODE > int < / CODE > . The size of the array will grow by one .
* @ param b the int to be appended
* @ return a reference to this < CODE > ByteBuffer < / CODE > object */
public ByteBuffer append_i ( int b ) { } }
|
int newcount = count + 1 ; if ( newcount > buf . length ) { byte newbuf [ ] = new byte [ Math . max ( buf . length << 1 , newcount ) ] ; System . arraycopy ( buf , 0 , newbuf , 0 , count ) ; buf = newbuf ; } buf [ count ] = ( byte ) b ; count = newcount ; return this ;
|
public class BboxService { /** * Calculates the union of 2 bounding boxes .
* @ param one
* The first bounding box .
* @ param two
* The second bounding box .
* @ return A new bounding box representing the union . */
public static Bbox union ( Bbox one , Bbox two ) { } }
|
if ( two . getWidth ( ) == 0 && two . getHeight ( ) == 0 && two . getX ( ) == 0 && two . getY ( ) == 0 ) { return new Bbox ( one . getX ( ) , one . getY ( ) , one . getWidth ( ) , one . getHeight ( ) ) ; } if ( one . getWidth ( ) == 0 && one . getHeight ( ) == 0 && one . getX ( ) == 0 && one . getY ( ) == 0 ) { return new Bbox ( two . getX ( ) , two . getY ( ) , two . getWidth ( ) , two . getHeight ( ) ) ; } double minx = two . getX ( ) < one . getX ( ) ? two . getX ( ) : one . getX ( ) ; double maxx = two . getMaxX ( ) > one . getMaxX ( ) ? two . getMaxX ( ) : one . getMaxX ( ) ; double miny = two . getY ( ) < one . getY ( ) ? two . getY ( ) : one . getY ( ) ; double maxy = two . getMaxY ( ) > one . getMaxY ( ) ? two . getMaxY ( ) : one . getMaxY ( ) ; return new Bbox ( minx , miny , ( maxx - minx ) , ( maxy - miny ) ) ;
|
public class DeleteDatasetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteDatasetRequest deleteDatasetRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( deleteDatasetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteDatasetRequest . getIdentityPoolId ( ) , IDENTITYPOOLID_BINDING ) ; protocolMarshaller . marshall ( deleteDatasetRequest . getIdentityId ( ) , IDENTITYID_BINDING ) ; protocolMarshaller . marshall ( deleteDatasetRequest . getDatasetName ( ) , DATASETNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class VirtualMachineScaleSetsInner { /** * Redeploy one or more virtual machines in a VM scale set .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < OperationStatusResponseInner > redeployAsync ( String resourceGroupName , String vmScaleSetName ) { } }
|
return redeployWithServiceResponseAsync ( resourceGroupName , vmScaleSetName ) . map ( new Func1 < ServiceResponse < OperationStatusResponseInner > , OperationStatusResponseInner > ( ) { @ Override public OperationStatusResponseInner call ( ServiceResponse < OperationStatusResponseInner > response ) { return response . body ( ) ; } } ) ;
|
public class RPC { /** * Construct a client - side proxy object that implements the named protocol ,
* talking to a server at the named address . */
public static < T extends VersionedProtocol > T getProxy ( Class < T > protocol , long clientVersion , InetSocketAddress addr , Configuration conf , SocketFactory factory ) throws IOException { } }
|
return getProtocolProxy ( protocol , clientVersion , addr , conf , factory ) . getProxy ( ) ;
|
public class DeleteIPSetRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteIPSetRequest deleteIPSetRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( deleteIPSetRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteIPSetRequest . getDetectorId ( ) , DETECTORID_BINDING ) ; protocolMarshaller . marshall ( deleteIPSetRequest . getIpSetId ( ) , IPSETID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class PdfCell { /** * methods */
private void addLine ( PdfLine line ) { } }
|
lines . add ( line ) ; contentHeight += line . height ( ) ; lastLine = line ; this . line = null ;
|
public class Cache { /** * Sets up a recurring task every n seconds to report on the status of this cache . This can be useful
* if you are doing exploratory caching and wish to monitor the performance of this cache with minimal fuss .
* Consider
* @ param seconds how often to log the entry
* @ param logger the logger to use
* @ return this */
public synchronized Cache < K , V > logEveryNSeconds ( int seconds , final ILogger logger ) { } }
|
if ( _loggingTask == null ) { ScheduledExecutorService service = Executors . newScheduledThreadPool ( 1 ) ; _loggingTask = service . scheduleAtFixedRate ( new Runnable ( ) { public void run ( ) { logger . info ( Cache . this ) ; } } , seconds , seconds , TimeUnit . SECONDS ) ; } else { throw new IllegalStateException ( "Logging for " + this + " is already enabled" ) ; } return this ;
|
public class ContentRepository { /** * Grants the specified principal ( user or group ) on the specified resource one or more JCR permissions .
* @ param principalId
* the id of the principal to grant privileges
* @ param path
* the path of the node to which a privilege should be applied
* @ param privileges
* the privileges to grant . */
public void grant ( String principalId , String path , String ... privileges ) throws RepositoryException { } }
|
final Session session = this . getAdminSession ( ) ; final AccessControlManager acm = session . getAccessControlManager ( ) ; final Privilege [ ] privilegeArray = this . toPrivilegeArray ( session , privileges ) ; final AccessControlList acl = this . getAccessControlList ( session , path ) ; final Principal principal = this . resolvePrincipal ( principalId ) ; // add a new one for the special " everyone " principal
acl . addAccessControlEntry ( principal , privilegeArray ) ; // the policy must be re - set
acm . setPolicy ( path , acl ) ; // and the session must be saved for the changes to be applied
session . save ( ) ;
|
public class CompressorHttp2ConnectionEncoder { /** * Called after the super class has written the headers and created any associated stream objects .
* @ param compressor The compressor associated with the stream identified by { @ code streamId } .
* @ param streamId The stream id for which the headers were written . */
private void bindCompressorToStream ( EmbeddedChannel compressor , int streamId ) { } }
|
if ( compressor != null ) { Http2Stream stream = connection ( ) . stream ( streamId ) ; if ( stream != null ) { stream . setProperty ( propertyKey , compressor ) ; } }
|
public class DSUtil { /** * Returns an immutable < code > Iterable < / code > that is the union
* of two < code > Iterable < / code > s . The resulting
* < code > Iterable < / code > contains first all elements from
* < code > it1 < / code > , and next all elements from < code > it2 < / code > . */
public static < E > Iterable < E > unionIterable ( Iterable < E > it1 , Iterable < E > it2 ) { } }
|
return unionIterable ( Arrays . < Iterable < E > > asList ( it1 , it2 ) ) ;
|
public class WebAppSecurityCollaboratorImpl { /** * Format the map of config change attributes for the audit function . The output format would be the
* same as original WebAppSecurityConfig . getChangedProperties method .
* @ return String in the format of " name = value , name = value , . . . " encapsulating the
* properties that are different between this WebAppSecurityConfig and the specified one */
private String toStringFormChangedPropertiesMap ( Map < String , String > delta ) { } }
|
if ( delta == null || delta . isEmpty ( ) ) { return "" ; } StringBuffer sb = new StringBuffer ( ) ; for ( Map . Entry < String , String > entry : delta . entrySet ( ) ) { if ( sb . length ( ) > 0 ) { sb . append ( "," ) ; } sb . append ( entry . getKey ( ) ) . append ( "=" ) . append ( entry . getValue ( ) ) ; } return sb . toString ( ) ;
|
public class vrid6 { /** * Use this API to fetch all the vrid6 resources that are configured on netscaler . */
public static vrid6 [ ] get ( nitro_service service ) throws Exception { } }
|
vrid6 obj = new vrid6 ( ) ; vrid6 [ ] response = ( vrid6 [ ] ) obj . get_resources ( service ) ; return response ;
|
public class CounterController { /** * Retrieve information about a specific counter .
* @ param name name
* @ return counter information */
@ RequestMapping ( value = "/{name}" , method = RequestMethod . GET ) public CounterResource display ( @ PathVariable ( "name" ) String name ) { } }
|
Metric < Double > c = findCounter ( name ) ; return counterResourceAssembler . toResource ( c ) ;
|
public class GlUtil { /** * Creates a new program from the supplied vertex and fragment shaders .
* @ return A handle to the program , or 0 on failure . */
public static int createProgram ( String vertexSource , String fragmentSource ) { } }
|
int vertexShader = loadShader ( GLES20 . GL_VERTEX_SHADER , vertexSource ) ; if ( vertexShader == 0 ) { return 0 ; } int pixelShader = loadShader ( GLES20 . GL_FRAGMENT_SHADER , fragmentSource ) ; if ( pixelShader == 0 ) { return 0 ; } int program = GLES20 . glCreateProgram ( ) ; checkGlError ( "glCreateProgram" ) ; if ( program == 0 ) { Log . e ( TAG , "Could not create program" ) ; } GLES20 . glAttachShader ( program , vertexShader ) ; checkGlError ( "glAttachShader" ) ; GLES20 . glAttachShader ( program , pixelShader ) ; checkGlError ( "glAttachShader" ) ; GLES20 . glLinkProgram ( program ) ; int [ ] linkStatus = new int [ 1 ] ; GLES20 . glGetProgramiv ( program , GLES20 . GL_LINK_STATUS , linkStatus , 0 ) ; if ( linkStatus [ 0 ] != GLES20 . GL_TRUE ) { Log . e ( TAG , "Could not link program: " ) ; Log . e ( TAG , GLES20 . glGetProgramInfoLog ( program ) ) ; GLES20 . glDeleteProgram ( program ) ; program = 0 ; } return program ;
|
public class SectionLoader { /** * Returns the file offset of the data directory entry the given key belongs
* to .
* Returns absent if data directory doesn ' t exist .
* @ param dataDirKey
* the key of the data directory entry
* @ return file offset of the rva that is in the data directory entry with
* the given key , absent if file offset can not be determined */
public Optional < Long > maybeGetFileOffsetFor ( DataDirectoryKey dataDirKey ) { } }
|
Optional < DataDirEntry > dataDir = optHeader . maybeGetDataDirEntry ( dataDirKey ) ; if ( dataDir . isPresent ( ) ) { long rva = dataDir . get ( ) . getVirtualAddress ( ) ; return Optional . of ( getFileOffset ( rva ) ) ; } return Optional . absent ( ) ;
|
public class Client { /** * Gets a list of Role resources .
* @ param queryParameters Query parameters of the Resource
* Parameters to filter the result of the list
* @ return List of Role
* @ throws OAuthSystemException - if there is a IOException reading parameters of the httpURLConnection
* @ throws OAuthProblemException - if there are errors validating the OneloginOAuthJSONResourceResponse and throwOAuthProblemException is enabled
* @ throws URISyntaxException - if there is an error when generating the target URL at the URIBuilder constructor
* @ see com . onelogin . sdk . model . Role
* @ see < a target = " _ blank " href = " https : / / developers . onelogin . com / api - docs / 1 / roles / get - roles " > Get Roles documentation < / a > */
public List < Role > getRoles ( HashMap < String , String > queryParameters ) throws OAuthSystemException , OAuthProblemException , URISyntaxException { } }
|
return getRoles ( queryParameters , this . maxResults ) ;
|
public class AbstractPropertyReader { /** * If property file is xml .
* @ param propertyFileName
* @ param puMetadata
* @ return */
private ClientProperties onParseXML ( String propertyFileName , PersistenceUnitMetadata puMetadata ) { } }
|
InputStream inStream = puMetadata . getClassLoader ( ) . getResourceAsStream ( propertyFileName ) ; if ( inStream == null ) { propertyFileName = KunderaCoreUtils . resolvePath ( propertyFileName ) ; try { inStream = new FileInputStream ( new File ( propertyFileName ) ) ; } catch ( FileNotFoundException e ) { log . warn ( "File {} not found, Caused by " , propertyFileName ) ; return null ; } } if ( inStream != null ) { xStream = getXStreamObject ( ) ; Object o = xStream . fromXML ( inStream ) ; return ( ClientProperties ) o ; } return null ;
|
public class OffsetDateTime { /** * Returns a new date - time based on this one , returning { @ code this } where possible .
* @ param dateTime the date - time to create with , not null
* @ param offset the zone offset to create with , not null */
private OffsetDateTime with ( LocalDateTime dateTime , ZoneOffset offset ) { } }
|
if ( this . dateTime == dateTime && this . offset . equals ( offset ) ) { return this ; } return new OffsetDateTime ( dateTime , offset ) ;
|
public class KunderaCriteriaBuilder { /** * ( non - Javadoc )
* @ see
* javax . persistence . criteria . CriteriaBuilder # greaterThanOrEqualTo ( javax
* . persistence . criteria . Expression , javax . persistence . criteria . Expression ) */
@ Override public < Y extends Comparable < ? super Y > > Predicate greaterThanOrEqualTo ( Expression < ? extends Y > lhs , Expression < ? extends Y > rhs ) { } }
|
return new ComparisonPredicate ( lhs , rhs , ConditionalOperator . GTE ) ;
|
public class ResolverSystemFactory { /** * Creates a new { @ link ResolverSystem } instance of the specified user view type using the specified { @ link ClassLoader } .
* Will consult a configuration file visible to the specified { @ link ClassLoader } named
* " META - INF / services / $ fullyQualfiedClassName " which should contain a key = value format with the key
* { @ link ResolverSystemFactory # KEY _ IMPL _ CLASS _ NAME } . The implementation class name must have a no - arg constructor .
* @ param userViewClass The user view type
* @ param cl The { @ link ClassLoader }
* @ return The new { @ link ResolverSystem } instance of the specified user view type created by using the specified
* { @ link ClassLoader } . */
static < RESOLVERSYSTEMTYPE extends ResolverSystem > RESOLVERSYSTEMTYPE createFromUserView ( final Class < RESOLVERSYSTEMTYPE > userViewClass , final ClassLoader cl ) { } }
|
assert userViewClass != null : "user view class must be specified" ; assert cl != null : "ClassLoader must be specified" ; // get SPI service loader
final Object spiServiceLoader = new Invokable ( cl , CLASS_NAME_SPISERVICELOADER ) . invokeConstructor ( new Class [ ] { ClassLoader . class } , new Object [ ] { cl } ) ; // return service loader implementation
final Object serviceLoader = new Invokable ( cl , CLASS_NAME_SPISERVICELOADER ) . invokeMethod ( METHOD_NAME_ONLY_ONE , new Class [ ] { Class . class , Class . class } , spiServiceLoader , new Object [ ] { Invokable . loadClass ( cl , CLASS_NAME_SPISERVICELOADER ) , spiServiceLoader . getClass ( ) } ) ; // get registry
final Object serviceRegistry = new Invokable ( cl , CLASS_NAME_SERVICEREGISTRY ) . invokeConstructor ( new Class < ? > [ ] { Invokable . loadClass ( cl , CLASS_NAME_SERVICELOADER ) } , new Object [ ] { serviceLoader } ) ; // register itself
new Invokable ( cl , serviceRegistry . getClass ( ) ) . invokeMethod ( METHOD_NAME_REGISTER , new Class < ? > [ ] { serviceRegistry . getClass ( ) } , null , new Object [ ] { serviceRegistry } ) ; Object userViewObject = new Invokable ( cl , serviceRegistry . getClass ( ) ) . invokeMethod ( METHOD_NAME_ONLY_ONE , new Class < ? > [ ] { Class . class } , serviceRegistry , new Object [ ] { userViewClass } ) ; return userViewClass . cast ( userViewObject ) ;
|
public class AbstractSearcher { /** * Sets the Matcher used to match and find the desired element or elements in the collection .
* @ param matcher the Matcher used to match and find the desired element or elements in the collection during
* the search operation .
* @ throws NullPointerException if the Matcher reference to be used by this Searcher is null .
* @ see # getMatcher ( )
* @ see org . cp . elements . util . search . Matcher */
public void setMatcher ( final Matcher matcher ) { } }
|
Assert . notNull ( matcher , "The Matcher used to match elements in the collection during the search operation by this Searcher ({0}) cannot be null!" , getClass ( ) . getName ( ) ) ; this . matcher = matcher ;
|
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public EClass getIfcAnnotationFillArea ( ) { } }
|
if ( ifcAnnotationFillAreaEClass == null ) { ifcAnnotationFillAreaEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 19 ) ; } return ifcAnnotationFillAreaEClass ;
|
public class AbstractEmbedVaadinTomcat { /** * Initializes the vaadin servlet and maps it to < tt > / * < / tt > .
* Returns the associated { @ link Wrapper } for further customization .
* @ param servlet the servlet to use to handle vaadin calls .
* @ param < T > the type of the servlet
* @ return the created wrapper for the servlet */
protected < T extends VaadinServlet > Wrapper initializeVaadinServlet ( T servlet ) { } }
|
// Setup vaadin servlet
final Wrapper wrapper = Tomcat . addServlet ( getContext ( ) , "vaadin" , servlet ) ; if ( getConfig ( ) . getWidgetSet ( ) != null ) { wrapper . addInitParameter ( "widgetset" , getConfig ( ) . getWidgetSet ( ) ) ; } wrapper . addMapping ( "/*" ) ; return wrapper ;
|
public class FutureImpl { /** * All callers to get ( ) are now returned . < br >
* Multiple calls are ignored . < br >
* Calls after cancel are ignored .
* @ param value Value */
public void set ( final T value ) { } }
|
if ( this . value . compareAndSet ( null , value ) ) { this . listeners . clear ( ) ; this . cdl . countDown ( ) ; }
|
public class Messenger { /** * Finding peers by text query
* @ param query text query
* @ return found peers */
@ ObjectiveCName ( "findPeersWithQuery:" ) public Command < List < PeerSearchEntity > > findPeers ( String query ) { } }
|
return callback -> modules . getSearchModule ( ) . findPeers ( query ) . then ( v -> callback . onResult ( v ) ) . failure ( e -> callback . onError ( e ) ) ;
|
public class QueuePlugin { /** * Replaces the current named queue with the given queue on all matched elements . */
@ SuppressWarnings ( "unchecked" ) public T queue ( String name , Queue < ? > queue ) { } }
|
for ( Element e : elements ( ) ) { replacequeue ( e , name , queue ) ; } return ( T ) this ;
|
public class GitkitClient { /** * Gets all user info of this web site . Underlying requests are paginated and send on demand with
* given size .
* @ param resultsPerRequest pagination size
* @ return lazy iterator over all user accounts . */
public Iterator < GitkitUser > getAllUsers ( final Integer resultsPerRequest ) { } }
|
return new DownloadIterator < GitkitUser > ( ) { private String nextPageToken = null ; @ Override protected Iterator < GitkitUser > getNextResults ( ) { try { JSONObject response = rpcHelper . downloadAccount ( nextPageToken , resultsPerRequest ) ; nextPageToken = response . has ( "nextPageToken" ) ? response . getString ( "nextPageToken" ) : null ; if ( response . has ( "users" ) ) { return jsonToList ( response . getJSONArray ( "users" ) ) . iterator ( ) ; } } catch ( JSONException e ) { logger . warning ( e . getMessage ( ) ) ; } catch ( GitkitServerException e ) { logger . warning ( e . getMessage ( ) ) ; } catch ( GitkitClientException e ) { logger . warning ( e . getMessage ( ) ) ; } return ImmutableSet . < GitkitUser > of ( ) . iterator ( ) ; } } ;
|
public class OSGiUtil { /** * only installs a bundle , if the bundle does not already exist , if the bundle exists the existing
* bundle is unloaded first .
* @ param factory
* @ param context
* @ param bundle
* @ return
* @ throws IOException
* @ throws BundleException */
public static Bundle installBundle ( BundleContext context , Resource bundle , boolean checkExistence ) throws IOException , BundleException { } }
|
if ( checkExistence ) { BundleFile bf = new BundleFile ( bundle ) ; if ( ! bf . isBundle ( ) ) throw new BundleException ( bundle + " is not a valid bundle!" ) ; Bundle existing = loadBundleFromLocal ( context , bf . getSymbolicName ( ) , bf . getVersion ( ) , false , null ) ; if ( existing != null ) return existing ; } return _loadBundle ( context , bundle . getAbsolutePath ( ) , bundle . getInputStream ( ) , true ) ;
|
public class SimpleQuery { /** * < p > Return maximum size in bytes that each result row from this query may return . Mainly used for
* batches that return results . < / p >
* < p > Results are cached until / unless the query is re - described . < / p >
* @ return Max size of result data in bytes according to returned fields , 0 if no results , - 1 if
* result is unbounded .
* @ throws IllegalStateException if the query is not described */
public int getMaxResultRowSize ( ) { } }
|
if ( cachedMaxResultRowSize != null ) { return cachedMaxResultRowSize ; } if ( ! this . statementDescribed ) { throw new IllegalStateException ( "Cannot estimate result row size on a statement that is not described" ) ; } int maxResultRowSize = 0 ; if ( fields != null ) { for ( Field f : fields ) { final int fieldLength = f . getLength ( ) ; if ( fieldLength < 1 || fieldLength >= 65535 ) { /* * Field length unknown or large ; we can ' t make any safe estimates about the result size ,
* so we have to fall back to sending queries individually . */
maxResultRowSize = - 1 ; break ; } maxResultRowSize += fieldLength ; } } cachedMaxResultRowSize = maxResultRowSize ; return maxResultRowSize ;
|
public class ReferencesBuilder { /** * Appends the specified reference and section number to this instance , returning { @ code this } .
* @ param reference The reference to append
* @ param section The section number of the reference to append
* @ return { @ code this } */
@ Override public ReferencesBuilder append ( final String reference , final int section ) { } }
|
return doAppend ( Collections . singletonList ( new Reference ( reference , section ) ) ) ;
|
public class Crowd { /** * / @ param [ in ] params The new configuration . */
public void setObstacleAvoidanceParams ( int idx , ObstacleAvoidanceParams params ) { } }
|
if ( idx >= 0 && idx < DT_CROWD_MAX_OBSTAVOIDANCE_PARAMS ) { m_obstacleQueryParams [ idx ] = new ObstacleAvoidanceParams ( params ) ; }
|
public class Pipeline { /** * The activities that perform transformations on the messages .
* @ param activities
* The activities that perform transformations on the messages . */
public void setActivities ( java . util . Collection < PipelineActivity > activities ) { } }
|
if ( activities == null ) { this . activities = null ; return ; } this . activities = new java . util . ArrayList < PipelineActivity > ( activities ) ;
|
public class ConnectionMonitorsInner { /** * Query a snapshot of the most recent connection states .
* @ param resourceGroupName The name of the resource group containing Network Watcher .
* @ param networkWatcherName The name of the Network Watcher resource .
* @ param connectionMonitorName The name given to the connection monitor .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the ConnectionMonitorQueryResultInner object */
public Observable < ConnectionMonitorQueryResultInner > beginQueryAsync ( String resourceGroupName , String networkWatcherName , String connectionMonitorName ) { } }
|
return beginQueryWithServiceResponseAsync ( resourceGroupName , networkWatcherName , connectionMonitorName ) . map ( new Func1 < ServiceResponse < ConnectionMonitorQueryResultInner > , ConnectionMonitorQueryResultInner > ( ) { @ Override public ConnectionMonitorQueryResultInner call ( ServiceResponse < ConnectionMonitorQueryResultInner > response ) { return response . body ( ) ; } } ) ;
|
public class Cob2AvroJob { /** * Sets the job input record choice strategy class .
* @ param job The job to configure .
* @ param choiceStrategyClass The input record choice strategy class . */
public static void setInputChoiceStrategy ( Job job , Class < ? extends FromCobolChoiceStrategy > choiceStrategyClass ) { } }
|
job . getConfiguration ( ) . setClass ( CONF_INPUT_RECORD_CHOICE_STRATEGY_CLASS , choiceStrategyClass , FromCobolChoiceStrategy . class ) ;
|
public class AbstractVisitor { /** * Helper method to visit a collection of VisitableCommands .
* @ param ctx Invocation context
* @ param toVisit collection of commands to visit
* @ throws Throwable in the event of problems */
public void visitCollection ( InvocationContext ctx , Collection < ? extends VisitableCommand > toVisit ) throws Throwable { } }
|
for ( VisitableCommand command : toVisit ) { command . acceptVisitor ( ctx , this ) ; }
|
public class ProcessorUtils { /** * Create the input object required by the processor and populate all the fields from the values object .
* < p > < / p >
* If { @ link Processor # createInputParameter ( ) } returns an instance of values then the values object will
* be returned .
* @ param processor the processor that the input object will be for .
* @ param values the object containing the values to put into the input object
* @ param < In > type of the processor input object
* @ param < Out > type of the processor output object */
public static < In , Out > In populateInputParameter ( final Processor < In , Out > processor , @ Nonnull final Values values ) { } }
|
In inputObject = processor . createInputParameter ( ) ; if ( inputObject != null ) { Collection < Field > fields = getAllAttributes ( inputObject . getClass ( ) ) ; for ( Field field : fields ) { String name = getInputValueName ( processor . getOutputPrefix ( ) , processor . getInputMapperBiMap ( ) , field . getName ( ) ) ; Object value = values . getObject ( name , Object . class ) ; if ( value != null ) { try { field . set ( inputObject , value ) ; } catch ( IllegalAccessException e ) { throw ExceptionUtils . getRuntimeException ( e ) ; } } else { if ( field . getAnnotation ( HasDefaultValue . class ) == null ) { throw new NoSuchElementException ( name + " is a required property for " + processor + " and therefore must be defined in the " + "Request Data or be an output of " + "one of the other processors. Available " + "values: " + values . asMap ( ) . keySet ( ) + "." ) ; } } } } return inputObject ;
|
public class ReplaceRegExpTextDecorator { /** * replace the text based on a RegExpr
* @ return Text . replaceForRegExprWith ( this . target . getText ( ) , regExp , replacement ) */
public String getText ( ) { } }
|
if ( this . target == null ) throw new RequiredException ( "this.target in ReplaceTextDecorator" ) ; return Text . replaceForRegExprWith ( this . target . getText ( ) , regExp , replacement ) ;
|
public class ControllerActionTransformer { /** * Converts a method into a controller action . If the method accepts parameters ,
* a no - arg counterpart is created which delegates to the original .
* @ param classNode The controller class
* @ param methodNode The method to be converted
* @ return The no - arg wrapper method , or null if none was created . */
private MethodNode convertToMethodAction ( ClassNode classNode , MethodNode methodNode , SourceUnit source , GeneratorContext context ) { } }
|
final ClassNode returnType = methodNode . getReturnType ( ) ; Parameter [ ] parameters = methodNode . getParameters ( ) ; for ( Parameter param : parameters ) { if ( param . hasInitialExpression ( ) ) { String paramName = param . getName ( ) ; String methodName = methodNode . getName ( ) ; String initialValue = param . getInitialExpression ( ) . getText ( ) ; String methodDeclaration = methodNode . getText ( ) ; String message = "Parameter [%s] to method [%s] has default value [%s]. " + "Default parameter values are not allowed in controller action methods. ([%s])" ; String formattedMessage = String . format ( message , paramName , methodName , initialValue , methodDeclaration ) ; GrailsASTUtils . error ( source , methodNode , formattedMessage ) ; } } MethodNode method = null ; if ( methodNode . getParameters ( ) . length > 0 ) { final BlockStatement methodCode = new BlockStatement ( ) ; final BlockStatement codeToHandleAllowedMethods = getCodeToHandleAllowedMethods ( classNode , methodNode . getName ( ) ) ; final Statement codeToCallOriginalMethod = addOriginalMethodCall ( methodNode , initializeActionParameters ( classNode , methodNode , methodNode . getName ( ) , parameters , source , context ) ) ; methodCode . addStatement ( codeToHandleAllowedMethods ) ; methodCode . addStatement ( codeToCallOriginalMethod ) ; method = new MethodNode ( methodNode . getName ( ) , Modifier . PUBLIC , returnType , ZERO_PARAMETERS , EMPTY_CLASS_ARRAY , methodCode ) ; GrailsASTUtils . copyAnnotations ( methodNode , method ) ; methodNode . addAnnotation ( DELEGATING_METHOD_ANNOATION ) ; annotateActionMethod ( classNode , parameters , method ) ; wrapMethodBodyWithExceptionHandling ( classNode , method ) ; } else { annotateActionMethod ( classNode , parameters , methodNode ) ; } wrapMethodBodyWithExceptionHandling ( classNode , methodNode ) ; return method ;
|
public class GBSDeleteFringe { /** * Examine the final fringe and re - balance if necessary .
* < p > Examine the fringe that was produced by either rightFringe or
* lastFringe . As this fringe may have resulted from the combination
* of one or more partial fringes it may now require input fringe
* rebalancing . < / p >
* @ param g grandparent
* @ param gLeft If true , f was left son of g
* @ param stack The NodeStack for delete
* @ param ntop Index within stack of t0 parent
* @ param istack A scratch InsertStack
* @ param newg New t0 grandparent
* @ param newf New t0 parent */
private void lastFringe ( DeleteStack . FringeNote xx , GBSNode g , boolean gLeft , DeleteStack stack , int ntop , InsertStack istack ) { } }
|
if ( gLeft ) { /* Original father was left child */
stack . setNode ( ntop , g . leftChild ( ) ) ; /* Put new left parent on stack */
} else { /* Original parent was right child */
stack . setNode ( ntop , g . rightChild ( ) ) ; /* Put new right parent on stack */
} GBSNode p = xx . newf ; GBSNode q = null ; istack . start ( xx . newg , "GBSDeleteFringe.lastFringe" ) ; istack . resetBalancePointIndex ( ) ; GBSNode fpoint = null ; int fdepth = 0 ; int fpidx = 0 ; while ( p != null ) { fdepth ++ ; istack . push ( p ) ; if ( fpoint == null ) { if ( p . leftChild ( ) == null ) /* First right - heavy half leaf */
{ fdepth = 1 ; /* Reset fringe depth */
fpoint = p ; /* Remember fringe balance point */
fpidx = istack . index ( ) ; /* Remember index to fringe balance pt . */
} } q = p ; p = p . rightChild ( ) ; } int maxBal = g . kFactor ( ) + 1 ; if ( ( g . kFactor ( ) % 3 ) == 0 ) { maxBal = g . kFactor ( ) + 2 ; } if ( ( fdepth > maxBal ) || ( ( fdepth >= maxBal ) && ( q . isFull ( ) ) ) ) { GBSInsertFringe . singleInstance ( ) . balance ( g . kFactor ( ) , istack , fpoint , fpidx , maxBal ) ; if ( xx . conditionalDecrease ) { stack . node ( ntop ) . setBalance ( xx . conditionalBalance ) ; } xx . conditionalDecrease = false ; } if ( xx . conditionalDecrease ) { xx . depthDecrease = true ; } if ( xx . depthDecrease ) { GBSDeleteHeight . singleInstance ( ) . balance ( stack , ntop ) ; }
|
public class JDBCPersistenceManagerImpl { /** * / * ( non - Javadoc )
* @ see com . ibm . jbatch . container . services . IPersistenceManagerService # createJobStatus ( long ) */
@ Override public JobStatus createJobStatus ( long jobInstanceId ) { } }
|
logger . entering ( CLASSNAME , "createJobStatus" , jobInstanceId ) ; Connection conn = null ; PreparedStatement statement = null ; JobStatus jobStatus = new JobStatus ( jobInstanceId ) ; try { conn = getConnection ( ) ; statement = conn . prepareStatement ( "INSERT INTO jobstatus (id, obj) VALUES(?, ?)" ) ; statement . setLong ( 1 , jobInstanceId ) ; statement . setBytes ( 2 , serializeObject ( jobStatus ) ) ; statement . executeUpdate ( ) ; } catch ( SQLException e ) { throw new PersistenceException ( e ) ; } catch ( IOException e ) { throw new PersistenceException ( e ) ; } finally { cleanupConnection ( conn , null , statement ) ; } logger . exiting ( CLASSNAME , "createJobStatus" ) ; return jobStatus ;
|
public class Reader { /** * Accumulate annotations .
* @ param annotations
* the annotations
* @ param annatt
* the annatt */
public void accumulateAnnotations ( List < String > annotations , AnnotationsAttribute annatt ) { } }
|
if ( null == annatt ) { return ; } for ( Annotation ann : annatt . getAnnotations ( ) ) { annotations . add ( ann . getTypeName ( ) ) ; }
|
public class CronTriggerImpl { /** * Updates the < code > CronTrigger < / code > ' s state based on the MISFIRE _ INSTRUCTION _ XXX that was
* selected when the < code > CronTrigger < / code > was created .
* < p > If the misfire instruction is set to MISFIRE _ INSTRUCTION _ SMART _ POLICY , then the following
* scheme will be used : < br >
* < ul >
* < li > The instruction will be interpreted as < code > MISFIRE _ INSTRUCTION _ FIRE _ ONCE _ NOW < / code >
* < / ul > */
@ Override public void updateAfterMisfire ( org . quartz . core . Calendar cal ) { } }
|
int instr = getMisfireInstruction ( ) ; if ( instr == Trigger . MISFIRE_INSTRUCTION_IGNORE_MISFIRE_POLICY ) { return ; } if ( instr == MISFIRE_INSTRUCTION_SMART_POLICY ) { instr = MISFIRE_INSTRUCTION_FIRE_ONCE_NOW ; } if ( instr == MISFIRE_INSTRUCTION_DO_NOTHING ) { Date newFireTime = getFireTimeAfter ( new Date ( ) ) ; while ( newFireTime != null && cal != null && ! cal . isTimeIncluded ( newFireTime . getTime ( ) ) ) { newFireTime = getFireTimeAfter ( newFireTime ) ; } setNextFireTime ( newFireTime ) ; } else if ( instr == MISFIRE_INSTRUCTION_FIRE_ONCE_NOW ) { setNextFireTime ( new Date ( ) ) ; }
|
public class ElementBoxView { /** * Converts an Shape to instance of rectangle
* @ param a
* the shape
* @ return the rectangle */
public static final Rectangle toRect ( Shape a ) { } }
|
return a instanceof Rectangle ? ( Rectangle ) a : a . getBounds ( ) ;
|
public class NodeTypes { /** * Get the mandatory child node definitions for a node with the named primary type and mixin types . Note that the
* { @ link # hasMandatoryChildNodeDefinitions ( Name , Set ) } method should first be called with the primary type and mixin types ;
* if that method returns < code > true < / code > , then this method will never return an empty collection .
* @ param primaryType the primary type name ; may not be null
* @ param mixinTypes the mixin type names ; may not be null but may be empty
* @ return the collection of mandatory child node definitions ; never null but possibly empty */
public Collection < JcrNodeDefinition > getMandatoryChildNodeDefinitions ( Name primaryType , Set < Name > mixinTypes ) { } }
|
if ( mixinTypes . isEmpty ( ) ) { return mandatoryChildrenNodeTypes . get ( primaryType ) ; } Set < JcrNodeDefinition > defn = new HashSet < JcrNodeDefinition > ( ) ; defn . addAll ( mandatoryChildrenNodeTypes . get ( primaryType ) ) ; for ( Name mixinType : mixinTypes ) { defn . addAll ( mandatoryChildrenNodeTypes . get ( mixinType ) ) ; } return defn ;
|
public class GitlabHTTPRequestor { /** * Sets the HTTP Form Post parameters for the request
* Has a fluent api for method chaining
* @ param key Form parameter Key
* @ param file File data
* @ return this */
public GitlabHTTPRequestor withAttachment ( String key , File file ) { } }
|
if ( file != null && key != null ) { attachments . put ( key , file ) ; } return this ;
|
public class TextCache { /** * This is called internally when old rows need to be removed from the
* cache . Text table rows that have not been saved are those that have not
* been committed yet . So we don ' t save them but add them to the
* uncommitted cache until such time that they are committed or rolled
* back - fredt */
protected synchronized void saveRows ( CachedObject [ ] rows , int offset , int count ) { } }
|
if ( count == 0 ) { return ; } for ( int i = offset ; i < offset + count ; i ++ ) { CachedObject r = rows [ i ] ; uncommittedCache . put ( r . getPos ( ) , r ) ; rows [ i ] = null ; }
|
public class CRDTReplicationMigrationService { /** * Attempts to replicate only the unreplicated CRDT state to any non - local
* member in the cluster . The state may be unreplicated because the CRDT
* state has been changed ( via mutation or merge with an another CRDT ) but
* has not yet been disseminated through the usual replication mechanism
* to any member .
* This method will iterate through the member list and try and replicate
* to at least one member . The method returns once all of the unreplicated
* state has been replicated successfully or when there are no more members
* to attempt processing .
* This method will replicate all of the unreplicated CRDT states to any
* data member in the cluster , regardless if that member is actually the
* replica for some CRDT ( because of a configured replica count ) . It is
* the responsibility of that member to migrate the state for which it is
* not a replica . The configured replica count can therefore be broken
* during shutdown to increase the chance of survival of unreplicated CRDT
* data ( if the actual replicas are unreachable ) .
* @ see CRDTReplicationTask */
@ Override public boolean onShutdown ( long timeout , TimeUnit unit ) { } }
|
if ( nodeEngine . getLocalMember ( ) . isLiteMember ( ) ) { return true ; } long timeoutNanos = unit . toNanos ( timeout ) ; for ( CRDTReplicationAwareService service : getReplicationServices ( ) ) { service . prepareToSafeShutdown ( ) ; final CRDTReplicationContainer replicationOperation = service . prepareReplicationOperation ( replicationVectorClocks . getLatestReplicatedVectorClock ( service . getName ( ) ) , 0 ) ; if ( replicationOperation == null ) { logger . fine ( "Skipping replication since all CRDTs are replicated" ) ; continue ; } long start = System . nanoTime ( ) ; if ( ! tryProcessOnOtherMembers ( replicationOperation . getOperation ( ) , service . getName ( ) , timeoutNanos ) ) { logger . warning ( "Failed replication of CRDTs for " + service . getName ( ) + ". CRDT state may be lost." ) ; } timeoutNanos -= ( System . nanoTime ( ) - start ) ; if ( timeoutNanos < 0 ) { return false ; } } return true ;
|
public class NumberSequenceLocalStorage { /** * Get file and ensure dir exists , cache directory
* @ param name
* @ return */
private static synchronized String getFile ( String name ) { } }
|
if ( Objects . isNullOrEmpty ( baseDirectory ) ) { baseDirectory = Options . getStorage ( ) . getSystem ( "numberSequence.home" , System . getProperty ( "user.home" ) + File . separator + ".s1-sequences" ) ; } File dir = new File ( baseDirectory ) ; if ( ! dir . exists ( ) ) dir . mkdirs ( ) ; if ( ! dir . isDirectory ( ) ) throw new S1SystemError ( "Directory error: " + baseDirectory ) ; return dir . getAbsolutePath ( ) + File . separator + name ;
|
public class MapLayer { /** * Forward to the parent layer the event that indicates the content of a child layer was changed .
* Only the { @ link MapLayerListener } and the container are notified .
* @ param event the event . */
public void fireLayerContentChangedEvent ( MapLayerContentEvent event ) { } }
|
if ( isEventFirable ( ) ) { final MapLayerListener [ ] theListeners = getListeners ( ) ; if ( theListeners != null && theListeners . length > 0 ) { for ( final MapLayerListener listener : theListeners ) { listener . onMapLayerContentChanged ( event ) ; } } // stop the event firing when it was consumed
if ( event . isConsumed ( ) && event . isDisappearingWhenConsumed ( ) ) { return ; } final GISLayerContainer < ? > container = getContainer ( ) ; if ( container != null ) { container . fireLayerContentChangedEvent ( event ) ; } }
|
public class QueryBuilder { /** * Join with another query builder . This will add into the SQL something close to " INNER JOIN other - table . . . " .
* Either the object associated with the current QueryBuilder or the argument QueryBuilder must have a foreign field
* of the other one . An exception will be thrown otherwise .
* < b > NOTE : < / b > This will do combine the WHERE statement of the two query builders with a SQL " AND " . See
* { @ link # joinOr ( QueryBuilder ) } . */
public QueryBuilder < T , ID > join ( QueryBuilder < ? , ? > joinedQueryBuilder ) throws SQLException { } }
|
addJoinInfo ( JoinType . INNER , null , null , joinedQueryBuilder , JoinWhereOperation . AND ) ; return this ;
|
public class LineReader { /** * Read from the InputStream into the given Text .
* @ param str the object to store the given line
* @ return the number of bytes read including the newline
* @ throws IOException if the underlying stream throws */
public int readLine ( Text str ) throws IOException { } }
|
return readLine ( str , Integer . MAX_VALUE , Integer . MAX_VALUE ) ;
|
public class EventDispatcher { protected int subscribe_change_event ( String attr_name , String [ ] filters , boolean stateless ) throws DevFailed { } }
|
return event_supplier . subscribe_event ( attr_name , CHANGE_EVENT , this , filters , stateless ) ;
|
public class AABBUtils { /** * Rotates the { @ link AxisAlignedBB } around the Y axis based on the specified angle . < br >
* @ param aabb the aabb
* @ param angle the angle
* @ return the axis aligned bb */
public static AxisAlignedBB rotate ( AxisAlignedBB aabb , int angle ) { } }
|
return rotate ( aabb , angle , Axis . Y ) ;
|
public class SarlAgentImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public boolean eIsSet ( int featureID ) { } }
|
switch ( featureID ) { case SarlPackage . SARL_AGENT__EXTENDS : return extends_ != null ; } return super . eIsSet ( featureID ) ;
|
public class ValidateApplicationMojo { /** * Validate aspects that are specific to recipes ( i . e . partial Roboconf applications ) .
* Most of this validation could have been handled through enforcer rules . However ,
* they are all warnings and we do not want to create hundreds of projects . We can
* see these rules as good practices that will be shared amongst all the Roboonf users .
* At worst , users can ignore these warnings .
* Or they can submit a feature request to add or remove validation rules .
* @ param project a Maven project
* @ param tpl an application template
* @ param official true if this recipe is maintained by the Roboconf team , false otherwise
* @ return a non - null list of errors */
static Collection < RoboconfError > validateRecipesSpecifics ( MavenProject project , ApplicationTemplate tpl , boolean official ) { } }
|
Collection < RoboconfError > result = new ArrayList < > ( ) ; if ( ! project . getArtifactId ( ) . equals ( project . getArtifactId ( ) . toLowerCase ( ) ) ) result . add ( new RoboconfError ( ErrorCode . REC_ARTIFACT_ID_IN_LOWER_CASE ) ) ; if ( ! tpl . getRootInstances ( ) . isEmpty ( ) ) result . add ( new RoboconfError ( ErrorCode . REC_AVOID_INSTANCES ) ) ; if ( official && ! project . getGroupId ( ) . startsWith ( Constants . OFFICIAL_RECIPES_GROUP_ID ) ) result . add ( new RoboconfError ( ErrorCode . REC_OFFICIAL_GROUP_ID ) ) ; if ( ! project . getArtifactId ( ) . equals ( project . getArtifactId ( ) ) ) result . add ( new RoboconfError ( ErrorCode . REC_NON_MATCHING_ARTIFACT_ID ) ) ; File [ ] files = project . getBasedir ( ) . listFiles ( ) ; boolean found = false ; if ( files != null ) { for ( int i = 0 ; i < files . length && ! found ; i ++ ) found = files [ i ] . getName ( ) . matches ( "(?i)readme(\\..*)?" ) ; } if ( ! found ) result . add ( new RoboconfError ( ErrorCode . REC_MISSING_README ) ) ; return result ; }
|
public class ByteBuddy { /** * Creates a new interface type that extends the provided interface .
* < b > Note < / b > : This methods implements the supplied types < i > as is < / i > , i . e . any { @ link Class } values are implemented
* as raw types if they declare type variables or an owner type .
* < b > Note < / b > : Byte Buddy does not cache previous subclasses but will attempt the generation of a new subclass . For caching
* types , a external cache or { @ link TypeCache } should be used .
* @ param interfaceTypes The interface types to implement . The types must be raw or parameterized types . All
* type variables that are referenced by a parameterized type must be declared by the
* generated subclass before creating the type .
* @ return A type builder that creates a new interface type . */
public DynamicType . Builder < ? > makeInterface ( List < ? extends Type > interfaceTypes ) { } }
|
return makeInterface ( new TypeList . Generic . ForLoadedTypes ( interfaceTypes ) ) ;
|
public class Graph { /** * Joins the edge DataSet with an input Tuple2 DataSet and applies a user - defined transformation
* on the values of the matched records .
* The source ID of the edges input and the first field of the input DataSet are used as join keys .
* @ param inputDataSet the DataSet to join with .
* The first field of the Tuple2 is used as the join key
* and the second field is passed as a parameter to the transformation function .
* @ param edgeJoinFunction the transformation function to apply .
* The first parameter is the current edge value and the second parameter is the value
* of the matched Tuple2 from the input DataSet .
* @ param < T > the type of the second field of the input Tuple2 DataSet .
* @ return a new Graph , where the edge values have been updated according to the
* result of the edgeJoinFunction . */
public < T > Graph < K , VV , EV > joinWithEdgesOnSource ( DataSet < Tuple2 < K , T > > inputDataSet , final EdgeJoinFunction < EV , T > edgeJoinFunction ) { } }
|
DataSet < Edge < K , EV > > resultedEdges = this . getEdges ( ) . coGroup ( inputDataSet ) . where ( 0 ) . equalTo ( 0 ) . with ( new ApplyCoGroupToEdgeValuesOnEitherSourceOrTarget < > ( edgeJoinFunction ) ) . name ( "Join with edges on source" ) ; return new Graph < > ( this . vertices , resultedEdges , this . context ) ;
|
public class ErrorParser { /** * Parses a { @ link Response } into an { @ link ApiError } .
* @ param response the { @ link Response } to parse .
* @ return an { @ link ApiError } if parsable , or { @ code null } if the response is not in error . */
@ Nullable public static ApiError parseError ( @ Nonnull Response < ? > response ) { } }
|
if ( response . isSuccessful ( ) ) { return null ; } try { return parseError ( response . errorBody ( ) . string ( ) , response . code ( ) , response . message ( ) ) ; } catch ( IOException e ) { return new ApiError ( null , response . code ( ) , "Unknown Error" ) ; }
|
public class HandlerPatterns { /** * Get a List of process names from a comma separated list
* @ param nameList a list of process names conforming to the processNameListPattern */
public static List < String > getNames ( String nameList ) { } }
|
String [ ] names = nameList . split ( "," ) ; List < String > results = new LinkedList < > ( ) ; for ( String p : names ) { String configName = p . trim ( ) ; if ( configName . length ( ) > 0 ) { results . add ( configName ) ; } } return results ;
|
public class LayerUtil { /** * Lower right tile for an area .
* @ param boundingBox the area boundingBox
* @ param zoomLevel the zoom level .
* @ param tileSize the tile size .
* @ return the tile at the lower right of the bbox . */
public static Tile getLowerRight ( BoundingBox boundingBox , byte zoomLevel , int tileSize ) { } }
|
int tileRight = MercatorProjection . longitudeToTileX ( boundingBox . maxLongitude , zoomLevel ) ; int tileBottom = MercatorProjection . latitudeToTileY ( boundingBox . minLatitude , zoomLevel ) ; return new Tile ( tileRight , tileBottom , zoomLevel , tileSize ) ;
|
public class FormDataBuilder { /** * 添加form数据 ( key不能重复 )
* @ param key key
* @ param value value
* @ return FormDataBuilder */
public FormDataBuilder form ( String key , Object value ) { } }
|
datas . put ( key , value == null ? "" : value ) ; return this ;
|
public class GosuClassTransformer { /** * If this is an enhancement , add a ( hacky ) marker field to indicate the enhanced type for tooling . E . g . ,
* private static final < enhanced - type > ENHNANCED $ TYPE ; */
private void maybeAddEnhancedTypeMarkerField ( ) { } }
|
if ( getGosuClass ( ) instanceof IGosuEnhancementInternal ) { int iModifiers = Opcodes . ACC_STATIC | Opcodes . ACC_FINAL | Opcodes . ACC_SYNTHETIC ; iModifiers |= ( BytecodeOptions . isSingleServingLoader ( ) ? Opcodes . ACC_PUBLIC : Opcodes . ACC_PRIVATE ) ; IRFieldDecl enhancedTypeMarkerField = new IRFieldDecl ( iModifiers , false , ENHANCED_TYPE_FIELD , IRTypeResolver . getDescriptor ( ( ( IGosuEnhancementInternal ) getGosuClass ( ) ) . getEnhancedType ( ) ) , null ) ; _irClass . addField ( enhancedTypeMarkerField ) ; }
|
public class TokenValidator { /** * Validates the identification against the OCPI token database service .
* @ param identification identification to verify .
* @ param chargingStationId charging station id for which the validation should be executed
* @ return The validated IdentifyingToken , status ACCEPTED if identification
* is valid according to OCPI . */
@ Override public IdentifyingToken validate ( IdentifyingToken identification , @ Nullable ChargingStationId chargingStationId ) { } }
|
LOG . debug ( "validate({}, {})" , identification . getToken ( ) , chargingStationId ) ; try { String hiddenId = identification . getToken ( ) ; String visibleId = identification . getVisibleId ( ) ; Token token = null ; if ( hiddenId != null ) { token = ocpiRepository . findTokenByUid ( hiddenId ) ; } else { token = ocpiRepository . findTokenByVisualNumber ( visibleId ) ; } if ( token != null ) { if ( token . isValid ( ) ) { LOG . debug ( "Token valid: {}" , token . getUid ( ) ) ; return new TextualToken ( identification . getToken ( ) , AuthenticationStatus . ACCEPTED , token . getIssuingCompany ( ) , token . getVisualNumber ( ) ) ; } else { LOG . debug ( "Token not valid: {}" , token . getUid ( ) ) ; } } else { LOG . debug ( "Token not found: {}" , identification . getToken ( ) ) ; } } catch ( Exception e ) { LOG . error ( "Exception OCPI authorization" , e ) ; } return identification ;
|
public class NpmDependencyResolver { /** * / * - - - Private methods - - - */
private String getSha1FromRegistryPackageUrl ( String registryPackageUrl , boolean isScopeDep , String versionOfPackage , RegistryType registryType , String npmAccessToken ) { } }
|
String uriScopeDep = registryPackageUrl ; if ( isScopeDep ) { try { uriScopeDep = registryPackageUrl . replace ( BomFile . DUMMY_PARAMETER_SCOPE_PACKAGE , URL_SLASH ) ; } catch ( Exception e ) { logger . warn ( "Failed creating uri of {}" , registryPackageUrl ) ; return Constants . EMPTY_STRING ; } } String responseFromRegistry = null ; try { Client client = Client . create ( ) ; ClientResponse response ; WebResource resource ; resource = client . resource ( uriScopeDep ) ; if ( StringUtils . isEmptyOrNull ( npmAccessToken ) ) { response = resource . accept ( MediaType . APPLICATION_JSON ) . get ( ClientResponse . class ) ; logger . debug ( "npm.accessToken is not defined" ) ; } else { logger . debug ( "npm.accessToken is defined" ) ; if ( registryType == RegistryType . VISUAL_STUDIO ) { String userCredentials = BEARER + Constants . COLON + npmAccessToken ; String basicAuth = BASIC + Constants . WHITESPACE + new String ( Base64 . getEncoder ( ) . encode ( userCredentials . getBytes ( ) ) ) ; response = resource . accept ( MediaType . APPLICATION_JSON ) . header ( "Authorization" , basicAuth ) . get ( ClientResponse . class ) ; } else { // Bearer authorization
String userCredentials = BEARER + Constants . WHITESPACE + npmAccessToken ; response = resource . accept ( MediaType . APPLICATION_JSON ) . header ( "Authorization" , userCredentials ) . get ( ClientResponse . class ) ; } } if ( response . getStatus ( ) >= 200 && response . getStatus ( ) < 300 ) { responseFromRegistry = response . getEntity ( String . class ) ; } else { logger . debug ( "Got {} status code from registry using the url {}." , response . getStatus ( ) , uriScopeDep ) ; } } catch ( Exception e ) { logger . warn ( "Could not reach the registry using the URL: {}. Got an error: {}" , registryPackageUrl , e . getMessage ( ) ) ; return Constants . EMPTY_STRING ; } if ( responseFromRegistry == null ) { return Constants . EMPTY_STRING ; } JSONObject jsonRegistry = new JSONObject ( responseFromRegistry ) ; String shasum ; if ( isScopeDep ) { shasum = jsonRegistry . getJSONObject ( VERSIONS ) . getJSONObject ( versionOfPackage ) . getJSONObject ( DIST ) . getString ( SHASUM ) ; } else { shasum = jsonRegistry . getJSONObject ( DIST ) . getString ( SHASUM ) ; } return shasum ;
|
public class Matrix3d { /** * Set the elements of this matrix to the upper left 3x3 of the given { @ link Matrix4fc } .
* @ param mat
* the { @ link Matrix4fc } to copy the values from
* @ return this */
public Matrix3d set ( Matrix4fc mat ) { } }
|
m00 = mat . m00 ( ) ; m01 = mat . m01 ( ) ; m02 = mat . m02 ( ) ; m10 = mat . m10 ( ) ; m11 = mat . m11 ( ) ; m12 = mat . m12 ( ) ; m20 = mat . m20 ( ) ; m21 = mat . m21 ( ) ; m22 = mat . m22 ( ) ; return this ;
|
public class MapFormat { /** * Scans the pattern and prepares internal variables .
* @ param newPattern String to be parsed .
* @ throws IllegalArgumentException if number of arguments exceeds BUFSIZE or
* parser found unmatched brackets ( this exception should be switched off
* using setExactMatch ( false ) ) . */
public String processPattern ( String newPattern ) throws IllegalArgumentException { } }
|
int idx = 0 ; int offnum = - 1 ; StringBuffer outpat = new StringBuffer ( ) ; offsets = new int [ BUFSIZE ] ; arguments = new String [ BUFSIZE ] ; maxOffset = - 1 ; // skipped = new RangeList ( ) ;
// What was this for ? ?
// process ( newPattern , " \ " " , " \ " " ) ; / / NOI18N
while ( true ) { int ridx = - 1 ; int lidx = newPattern . indexOf ( ldel , idx ) ; /* Range ran = skipped . getRangeContainingOffset ( lidx ) ;
if ( ran ! = null ) {
outpat . append ( newPattern . substring ( idx , ran . getEnd ( ) ) ) ;
idx = ran . getEnd ( ) ; continue ; */
if ( lidx >= 0 ) { ridx = newPattern . indexOf ( rdel , lidx + ldel . length ( ) ) ; } else { break ; } if ( ++ offnum >= BUFSIZE ) { throw new IllegalArgumentException ( "TooManyArguments" ) ; } if ( ridx < 0 ) { if ( exactmatch ) { throw new IllegalArgumentException ( "UnmatchedBraces" ) ; } else { break ; } } outpat . append ( newPattern . substring ( idx , lidx ) ) ; offsets [ offnum ] = outpat . length ( ) ; arguments [ offnum ] = newPattern . substring ( lidx + ldel . length ( ) , ridx ) ; idx = ridx + rdel . length ( ) ; maxOffset ++ ; } outpat . append ( newPattern . substring ( idx ) ) ; return outpat . toString ( ) ;
|
public class ModelExt { /** * redis key for attrs ' values
* @ param flag : ids or id store
* @ return data [ s ] : md5 ( concat ( columns ' value ) ) */
private String redisColumnKey ( SqlpKit . FLAG flag ) { } }
|
StringBuilder key = new StringBuilder ( this . _getUsefulClass ( ) . toGenericString ( ) ) ; String [ ] attrs = this . _getAttrNames ( ) ; Object val ; for ( String attr : attrs ) { val = this . get ( attr ) ; if ( null == val ) { continue ; } key . append ( val . toString ( ) ) ; } key = new StringBuilder ( HashKit . md5 ( key . toString ( ) ) ) ; if ( flag . equals ( SqlpKit . FLAG . ONE ) ) { return "data:" + key ; } return "datas:" + key ;
|
public class GitChangelogApi { /** * Get the changelog as data object .
* @ param useIntegrationIfConfigured true if title / link / labels / issueType should be fetched from
* integrations ( GitHub , GitLab , Jira ) if that is configured .
* @ throws GitChangelogRepositoryException */
public Changelog getChangelog ( final boolean useIntegrationIfConfigured ) throws GitChangelogRepositoryException { } }
|
try ( GitRepo gitRepo = new GitRepo ( new File ( this . settings . getFromRepo ( ) ) ) ) { return getChangelog ( gitRepo , useIntegrationIfConfigured ) ; } catch ( final IOException e ) { throw new GitChangelogRepositoryException ( "" , e ) ; }
|
public class CmsPublish { /** * Checks for possible broken links when the given list of resources would be published . < p >
* @ param pubResources list of resources to be published
* @ return a list of resources that would produce broken links when published */
public List < CmsPublishResource > getBrokenResources ( List < CmsResource > pubResources ) { } }
|
List < CmsPublishResource > resources = new ArrayList < CmsPublishResource > ( ) ; CmsPublishManager publishManager = OpenCms . getPublishManager ( ) ; CmsPublishList publishList ; try { publishList = OpenCms . getPublishManager ( ) . getPublishListAll ( m_cms , pubResources , m_options . isIncludeSiblings ( ) , true ) ; if ( m_options . isIncludeRelated ( ) ) { CmsPublishList related = publishManager . getRelatedResourcesToPublish ( m_cms , publishList ) ; publishList = publishManager . mergePublishLists ( m_cms , publishList , related ) ; } } catch ( CmsException e ) { // should never happen
LOG . error ( e . getLocalizedMessage ( ) , e ) ; return resources ; } CmsRelationPublishValidator validator = new CmsRelationPublishValidator ( m_cms , publishList ) ; m_relationValidator = validator ; for ( String resourceName : validator . keySet ( ) ) { CmsRelationValidatorInfoEntry infoEntry = validator . getInfoEntry ( resourceName ) ; try { CmsResource resource = m_cms . readResource ( m_cms . getRequestContext ( ) . removeSiteRoot ( resourceName ) , CmsResourceFilter . ALL ) ; if ( resource . getState ( ) . isDeleted ( ) ) { for ( CmsRelation relation : infoEntry . getRelations ( ) ) { try { CmsResource theResource = relation . getSource ( m_cms , CmsResourceFilter . ALL ) ; CmsPublishResourceInfo info = new CmsPublishResourceInfo ( Messages . get ( ) . getBundle ( m_workplaceLocale ) . key ( Messages . GUI_BROKEN_LINK_ONLINE_0 ) , CmsPublishResourceInfo . Type . BROKENLINK ) ; // HACK : GWT serialization does not like unmodifiable collections : (
// Collections . singletonList ( resourceToBean ( resource , info , false , null ) ) ) ;
ArrayList < CmsPublishResource > relatedList = new ArrayList < CmsPublishResource > ( ) ; relatedList . add ( resourceToBean ( resource , info , false , null ) ) ; CmsPublishResource pubRes = resourceToBean ( theResource , null , false , relatedList ) ; resources . add ( pubRes ) ; } catch ( CmsException e ) { // should never happen
LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } } else { try { List < CmsPublishResource > related = new ArrayList < CmsPublishResource > ( ) ; for ( CmsRelation relation : infoEntry . getRelations ( ) ) { try { CmsResource theResource = relation . getTarget ( m_cms , CmsResourceFilter . ALL ) ; CmsPublishResource pubRes = resourceToBean ( theResource , null , false , null ) ; related . add ( pubRes ) ; } catch ( CmsException e ) { CmsPublishResource pubRes = relationToBean ( relation ) ; related . add ( pubRes ) ; LOG . warn ( e . getLocalizedMessage ( ) , e ) ; } } CmsPublishResourceInfo info = new CmsPublishResourceInfo ( Messages . get ( ) . getBundle ( m_workplaceLocale ) . key ( Messages . GUI_RESOURCE_MISSING_ONLINE_0 ) , CmsPublishResourceInfo . Type . MISSING ) ; CmsPublishResource pubRes = resourceToBean ( resource , info , false , related ) ; resources . add ( pubRes ) ; } catch ( Exception e ) { // should never happen
LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } } catch ( CmsException e ) { // should never happen
LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } return resources ;
|
public class GetPolicyRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( GetPolicyRequest getPolicyRequest , ProtocolMarshaller protocolMarshaller ) { } }
|
if ( getPolicyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getPolicyRequest . getPolicyName ( ) , POLICYNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
|
public class CSVParser { /** * Parses the CSV according to the given strategy
* and returns the content as an array of records
* ( whereas records are arrays of single values ) .
* The returned content starts at the current parse - position in
* the stream .
* @ return matrix of records x values ( ' null ' when end of file )
* @ throws IOException on parse error or input read - failure */
public String [ ] [ ] getAllValues ( ) throws IOException { } }
|
ArrayList < String [ ] > records = new ArrayList < String [ ] > ( ) ; String [ ] values ; String [ ] [ ] ret = null ; while ( ( values = getLine ( ) ) != null ) { records . add ( values ) ; } if ( records . size ( ) > 0 ) { ret = new String [ records . size ( ) ] [ ] ; records . toArray ( ret ) ; } return ret ;
|
public class WeakValueHashMap { /** * isEmpty ( ) . */
public final Object get ( Object key ) { } }
|
clearUnreferencedEntries ( ) ; WeakEntry weakEntry = ( WeakEntry ) super . get ( key ) ; if ( weakEntry == null ) return null ; else return weakEntry . get ( ) ;
|
public class FulltextIndex { /** * CALL apoc . index . removeNodeByName ( ' name ' , joe ) */
@ Procedure ( mode = Mode . WRITE ) @ Description ( "apoc.index.removeNodeByName('name',node) remove node from an index for the given name" ) public void removeNodeByName ( @ Name ( "name" ) String name , @ Name ( "node" ) Node node ) { } }
|
Index < Node > index = getNodeIndex ( name , null ) ; index . remove ( node ) ;
|
public class EventsImpl { /** * Execute OData query .
* Executes an OData query for events .
* @ param appId ID of the application . This is Application ID from the API Access settings blade in the Azure portal .
* @ param eventType The type of events to query ; either a standard event type ( ` traces ` , ` customEvents ` , ` pageViews ` , ` requests ` , ` dependencies ` , ` exceptions ` , ` availabilityResults ` ) or ` $ all ` to query across all event types . Possible values include : ' $ all ' , ' traces ' , ' customEvents ' , ' pageViews ' , ' browserTimings ' , ' requests ' , ' dependencies ' , ' exceptions ' , ' availabilityResults ' , ' performanceCounters ' , ' customMetrics '
* @ param timespan Optional . The timespan over which to retrieve events . This is an ISO8601 time period value . This timespan is applied in addition to any that are specified in the Odata expression .
* @ param filter An expression used to filter the returned events
* @ param search A free - text search expression to match for whether a particular event should be returned
* @ param orderby A comma - separated list of properties with \ " asc \ " ( the default ) or \ " desc \ " to control the order of returned events
* @ param select Limits the properties to just those requested on each returned event
* @ param skip The number of items to skip over before returning events
* @ param top The number of events to return
* @ param format Format for the returned events
* @ param count Request a count of matching items included with the returned events
* @ param apply An expression used for aggregation over returned events
* @ param serviceCallback the async ServiceCallback to handle successful and failed responses .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceFuture } object */
public ServiceFuture < EventsResults > getByTypeAsync ( String appId , EventType eventType , String timespan , String filter , String search , String orderby , String select , Integer skip , Integer top , String format , Boolean count , String apply , final ServiceCallback < EventsResults > serviceCallback ) { } }
|
return ServiceFuture . fromResponse ( getByTypeWithServiceResponseAsync ( appId , eventType , timespan , filter , search , orderby , select , skip , top , format , count , apply ) , serviceCallback ) ;
|
public class ContentMappings { /** * Constructs a new ContentMappings using an existing config file or default settings if not found . */
public static @ Extension ContentMappings newInstance ( ) throws IOException { } }
|
ContentMappings mappings = Persistence . load ( ContentMappings . class ) ; if ( mappings == null ) { mappings = ( ContentMappings ) new XmlProxy ( ) . readResolve ( ) ; } return mappings ;
|
public class SimpleListAttributeDefinition { /** * Overrides { @ link ListAttributeDefinition # convertParameterElementExpressions ( ModelNode ) the superclass }
* to check that expressions are supported yet the { @ code valueType } passed to the constructor is one of
* the { @ link # COMPLEX _ TYPES complex DMR types } . If it is , an { @ link IllegalStateException } is thrown , as this
* implementation cannot properly handle such a combination .
* { @ inheritDoc }
* @ throws IllegalStateException if expressions are supported , but the { @ code valueType } is { @ link # COMPLEX _ TYPES complex } */
@ Override protected ModelNode convertParameterElementExpressions ( ModelNode parameterElement ) { } }
|
boolean allowExp = isAllowExpression ( ) || valueType . isAllowExpression ( ) ; if ( allowExp && COMPLEX_TYPES . contains ( valueType . getType ( ) ) ) { // They need to subclass and override
throw new IllegalStateException ( ) ; } return allowExp ? convertStringExpression ( parameterElement ) : parameterElement ;
|
public class DiffOperation { /** * Perform a diff between two data states .
* Note : For now , this operation will ignore type instructions for non - unique keys .
* @ param fromState - The " from " state engine , populated with one of the deserialized data states to compare
* @ param toState - the " to " state engine , populated with the other deserialized data state to compare .
* @ param factory - The SerializerFactory describing the data model to use .
* @ return the DiffReport for investigation of the differences between the two data states .
* @ throws DiffReportGenerationException */
public DiffReport performDiff ( FastBlobStateEngine fromState , FastBlobStateEngine toState ) throws DiffReportGenerationException { } }
|
return performDiff ( null , fromState , toState ) ;
|
public class ClassInfoAnalysisEngine { /** * ( non - Javadoc )
* @ see
* edu . umd . cs . findbugs . classfile . IAnalysisEngine # analyze ( edu . umd . cs . findbugs
* . classfile . IAnalysisCache , java . lang . Object ) */
@ Override public ClassInfo analyze ( IAnalysisCache analysisCache , ClassDescriptor descriptor ) throws CheckedAnalysisException { } }
|
if ( descriptor instanceof ClassInfo ) { return ( ClassInfo ) descriptor ; } ClassData classData ; try { classData = analysisCache . getClassAnalysis ( ClassData . class , descriptor ) ; } catch ( edu . umd . cs . findbugs . classfile . MissingClassException e ) { if ( ! "package-info" . equals ( descriptor . getSimpleName ( ) ) ) { throw e ; } ClassInfo . Builder builder = new ClassInfo . Builder ( ) ; builder . setClassDescriptor ( descriptor ) ; builder . setAccessFlags ( 1536 ) ; return builder . build ( ) ; } // Read the class info
FBClassReader reader = analysisCache . getClassAnalysis ( FBClassReader . class , descriptor ) ; ClassParserInterface parser = new ClassParserUsingASM ( reader , descriptor , classData . getCodeBaseEntry ( ) ) ; ClassInfo . Builder classInfoBuilder = new ClassInfo . Builder ( ) ; parser . parse ( classInfoBuilder ) ; ClassInfo classInfo = classInfoBuilder . build ( ) ; if ( ! classInfo . getClassDescriptor ( ) . equals ( descriptor ) ) { throw new ClassNameMismatchException ( descriptor , classInfo . getClassDescriptor ( ) , classData . getCodeBaseEntry ( ) ) ; } return classInfo ;
|
public class XOManagerImpl { /** * Create a new { @ link CompositeObject } instance using an example .
* @ param exampleEntity
* The example instance .
* @ param type
* The interface the property type shall implement .
* @ param types
* Additional interfaces the entity type shall implement .
* @ return The { @ link CompositeObject } instance . */
private CompositeObject createByExample ( Map < PrimitivePropertyMethodMetadata < PropertyMetadata > , Object > exampleEntity , Class < ? > type , Class < ? > ... types ) { } }
|
DynamicType < EntityTypeMetadata < EntityMetadata > > effectiveTypes = sessionContext . getMetadataProvider ( ) . getEffectiveTypes ( type , types ) ; Set < EntityDiscriminator > entityDiscriminators = sessionContext . getMetadataProvider ( ) . getEntityDiscriminators ( effectiveTypes ) ; DatastoreSession < EntityId , Entity , EntityMetadata , EntityDiscriminator , RelationId , Relation , RelationMetadata , RelationDiscriminator , PropertyMetadata > datastoreSession = sessionContext . getDatastoreSession ( ) ; Entity entity = datastoreSession . getDatastoreEntityManager ( ) . createEntity ( effectiveTypes , entityDiscriminators , exampleEntity ) ; AbstractInstanceManager < EntityId , Entity > entityInstanceManager = sessionContext . getEntityInstanceManager ( ) ; CompositeObject instance = entityInstanceManager . createInstance ( entity , effectiveTypes ) ; sessionContext . getInstanceListenerService ( ) . postCreate ( instance ) ; return instance ;
|
public class MultipartReader { /** * Reads a line up until \ r \ n This will act similar to
* BufferedReader . readLine
* @ return The line
* @ throws IOException If an error occurs while reading */
public String readLine ( ) throws IOException { } }
|
StringBuilder bldr = new StringBuilder ( ) ; while ( true ) { int b = read ( ) ; if ( b == - 1 || b == 10 ) { break ; } else if ( b != '\r' ) { bldr . append ( ( char ) ( ( byte ) b ) ) ; } } return bldr . toString ( ) ;
|
public class Vector3i { /** * Read this vector from the supplied { @ link ByteBuffer } starting at the
* specified absolute buffer position / index .
* This method will not increment the position of the given ByteBuffer .
* @ param index
* the absolute position into the ByteBuffer
* @ param buffer
* values will be read in < code > x , y , z < / code > order
* @ return this */
public Vector3i set ( int index , ByteBuffer buffer ) { } }
|
MemUtil . INSTANCE . get ( this , index , buffer ) ; return this ;
|
public class RequestHandler { /** * Helper method which grabs the current configuration and checks if the node setup is out of sync .
* This method is always called when a new configuration arrives and it will try to sync the actual node
* and service setup with the one proposed by the configuration . */
public Observable < ClusterConfig > reconfigure ( final ClusterConfig config ) { } }
|
LOGGER . debug ( "Starting reconfiguration." ) ; if ( config . bucketConfigs ( ) . values ( ) . isEmpty ( ) ) { LOGGER . debug ( "No open bucket found in config, disconnecting all nodes." ) ; // JVMCBC - 231 : a race condition can happen where the nodes set is seen as
// not empty , while the subsequent Observable . from is not , failing in calling last ( )
List < Node > snapshotNodes ; synchronized ( nodes ) { snapshotNodes = new ArrayList < Node > ( nodes ) ; } if ( snapshotNodes . isEmpty ( ) ) { return Observable . just ( config ) ; } return Observable . from ( snapshotNodes ) . doOnNext ( new Action1 < Node > ( ) { @ Override public void call ( Node node ) { removeNode ( node ) ; node . disconnect ( ) . subscribe ( new Subscriber < LifecycleState > ( ) { @ Override public void onCompleted ( ) { } @ Override public void onError ( Throwable e ) { LOGGER . warn ( "Got error during node disconnect." , e ) ; } @ Override public void onNext ( LifecycleState lifecycleState ) { } } ) ; } } ) . last ( ) . map ( new Func1 < Node , ClusterConfig > ( ) { @ Override public ClusterConfig call ( Node node ) { return config ; } } ) ; } return Observable . just ( config ) . flatMap ( new Func1 < ClusterConfig , Observable < BucketConfig > > ( ) { @ Override public Observable < BucketConfig > call ( final ClusterConfig clusterConfig ) { return Observable . from ( clusterConfig . bucketConfigs ( ) . values ( ) ) ; } } ) . flatMap ( new Func1 < BucketConfig , Observable < Boolean > > ( ) { @ Override public Observable < Boolean > call ( BucketConfig bucketConfig ) { return reconfigureBucket ( bucketConfig ) ; } } ) . last ( ) . doOnNext ( new Action1 < Boolean > ( ) { @ Override public void call ( Boolean aBoolean ) { Set < NetworkAddress > configNodes = config . allNodeAddresses ( ) ; for ( Node node : nodes ) { if ( ! configNodes . contains ( node . hostname ( ) ) ) { LOGGER . debug ( "Removing and disconnecting node {}." , node . hostname ( ) ) ; removeNode ( node ) ; node . disconnect ( ) . subscribe ( new Subscriber < LifecycleState > ( ) { @ Override public void onCompleted ( ) { } @ Override public void onError ( Throwable e ) { LOGGER . warn ( "Got error during node disconnect." , e ) ; } @ Override public void onNext ( LifecycleState lifecycleState ) { } } ) ; } } } } ) . map ( new Func1 < Boolean , ClusterConfig > ( ) { @ Override public ClusterConfig call ( Boolean aBoolean ) { return config ; } } ) ;
|
public class MultiAttributeProvider { /** * Replies the value associated to the specified name . */
private Attribute extract ( String name ) { } }
|
final AttributeValue value ; if ( this . cache . containsKey ( name ) ) { value = this . cache . get ( name ) ; } else { final ManyValueAttributeValue result = new ManyValueAttributeValue ( ) ; AttributeValue attrValue ; for ( final AttributeProvider c : this . containers ) { attrValue = c . getAttribute ( name ) ; assign ( result , attrValue ) ; } value = canonize ( result ) ; this . cache . put ( name , value ) ; } return ( value != null ) ? new AttributeImpl ( name , value ) : null ;
|
public class PipedOutputStream { /** * Closes this stream . If this stream is connected to an input stream , the
* input stream is closed and the pipe is disconnected .
* @ throws IOException
* if an error occurs while closing this stream . */
@ Override public void close ( ) throws IOException { } }
|
// Is the pipe connected ?
PipedInputStream stream = target ; if ( stream != null ) { stream . done ( ) ; target = null ; }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.