signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Clipping { /** * Builds a clip - ready { @ link Circle } with a specific radius and specific center position . * @ param radius The radius of this circle . * @ param centerX The horizontal position for this circle ' s center * @ param centerY The vertical position for this circle ' s center * @ return A circle with the given radius and center position */ private static Circle getCircleClip ( final double radius , final double centerX , final double centerY ) { } }
final Circle clip = new Circle ( radius ) ; clip . setCenterX ( centerX ) ; clip . setCenterY ( centerY ) ; return clip ;
public class CorporationApi { /** * Get corporation blueprints ( asynchronously ) Returns a list of blueprints * the corporation owns - - - This route is cached for up to 3600 seconds - - - * Requires one of the following EVE corporation role ( s ) : Director SSO * Scope : esi - corporations . read _ blueprints . v1 * @ param corporationId * An EVE corporation ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param page * Which page of results to return ( optional , default to 1) * @ param token * Access token to use if unable to set a header ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getCorporationsCorporationIdBlueprintsAsync ( Integer corporationId , String datasource , String ifNoneMatch , Integer page , String token , final ApiCallback < List < CorporationBlueprintsResponse > > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getCorporationsCorporationIdBlueprintsValidateBeforeCall ( corporationId , datasource , ifNoneMatch , page , token , callback ) ; Type localVarReturnType = new TypeToken < List < CorporationBlueprintsResponse > > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class CrestCli { /** * java 8 would have use Supplier which is cleaner */ protected CliEnvironment createMainEnvironment ( final AtomicReference < InputReader > dynamicInputReaderRef , final AtomicReference < History > dynamicHistoryAtomicReference ) { } }
final Map < String , ? > data = new HashMap < String , Object > ( ) ; return new CliEnv ( ) { @ Override public History history ( ) { return dynamicHistoryAtomicReference . get ( ) ; } @ Override public InputReader reader ( ) { return dynamicInputReaderRef . get ( ) ; } @ Override public Map < String , ? > userData ( ) { return data ; } } ;
public class DefaultPluginLoader { /** * Add all { @ code * . jar } files from { @ code lib } directories to plugin class loader . */ protected void loadJars ( Path pluginPath , PluginClassLoader pluginClassLoader ) { } }
for ( String libDirectory : pluginClasspath . getLibDirectories ( ) ) { Path file = pluginPath . resolve ( libDirectory ) ; List < File > jars = FileUtils . getJars ( file ) ; for ( File jar : jars ) { pluginClassLoader . addFile ( jar ) ; } }
public class WebUtils { /** * Looks up all of the WebRequestInterceptor instances registered with the application * @ param servletContext The ServletContext instance * @ return An array of WebRequestInterceptor instances */ public static WebRequestInterceptor [ ] lookupWebRequestInterceptors ( ServletContext servletContext ) { } }
WebApplicationContext wac = WebApplicationContextUtils . getRequiredWebApplicationContext ( servletContext ) ; final Collection < WebRequestInterceptor > webRequestInterceptors = wac . getBeansOfType ( WebRequestInterceptor . class ) . values ( ) ; return webRequestInterceptors . toArray ( new WebRequestInterceptor [ webRequestInterceptors . size ( ) ] ) ;
public class Widget { /** * Set the 4x4 model matrix and set current scaling , rotation , and * transformation based on this model matrix . * @ param mat * An array of 16 { @ code float } s representing a 4x4 matrix in * OpenGL - compatible column - major format . */ public void setModelMatrix ( float [ ] mat ) { } }
if ( mat . length != 16 ) { throw new IllegalArgumentException ( "Size not equal to 16." ) ; } getTransform ( ) . setModelMatrix ( mat ) ; if ( mTransformCache . setModelMatrix ( mat ) ) { onTransformChanged ( ) ; }
public class TableProxy { /** * Get the DATA _ SOURCE _ HANDLE to the last modified or added record . * @ param iHandleType The type of handle to use . * @ return The handle of the last modified record . */ public Object getLastModified ( int iHandleType ) throws DBException , RemoteException { } }
BaseTransport transport = this . createProxyTransport ( GET_LAST_MODIFIED ) ; transport . addParam ( TYPE , iHandleType ) ; Object strReturn = transport . sendMessageAndGetReply ( ) ; Object objReturn = transport . convertReturnObject ( strReturn ) ; return this . checkDBException ( objReturn ) ;
public class Job { /** * Returns the last completed build , if any . Otherwise null . */ @ Exported @ QuickSilver public RunT getLastCompletedBuild ( ) { } }
RunT r = getLastBuild ( ) ; while ( r != null && r . isBuilding ( ) ) r = r . getPreviousBuild ( ) ; return r ;
public class HttpRequest { /** * Set the ' http . proxyPort ' and ' https . proxyPort ' properties to the given port * number . * This setting will apply to all requests . * @ param port */ public static void proxyPort ( final int port ) { } }
final String portValue = Integer . toString ( port ) ; setProperty ( "http.proxyPort" , portValue ) ; setProperty ( "https.proxyPort" , portValue ) ;
public class VorbisFile { /** * clear out the current logical bitstream decoder */ void decode_clear ( ) { } }
os . clear ( ) ; vd . clear ( ) ; vb . clear ( ) ; decode_ready = false ; bittrack = 0.f ; samptrack = 0.f ;
public class Annotate { /** * Get all the WF ids for the terms contained in the KAFDocument . * @ param kaf * the KAFDocument * @ return the list of all WF ids in the terms layer */ public List < String > getAllWFIdsFromTerms ( KAFDocument kaf ) { } }
List < Term > terms = kaf . getTerms ( ) ; List < String > wfTermIds = new ArrayList < > ( ) ; for ( int i = 0 ; i < terms . size ( ) ; i ++ ) { List < WF > sentTerms = terms . get ( i ) . getWFs ( ) ; for ( WF form : sentTerms ) { wfTermIds . add ( form . getId ( ) ) ; } } return wfTermIds ;
public class AbstractSerializationSpecWriter { /** * * * * InputStream * * * */ protected ISynchronizationPoint < ? extends Exception > specifyInputStreamValue ( SerializationContext context , List < SerializationRule > rules ) { } }
return specifyIOReadableValue ( context , rules ) ;
public class MQTTLibrary { /** * Publish a message to a topic with specified qos and retained flag * @ param topic * topic to which the message will be published * @ param message * message payload to publish * @ param qos * qos of the message * @ param retained * retained flag * @ throws MqttException * if there is an issue publishing to the broker */ @ RobotKeyword ( "Publish to MQTT Synchronously" ) @ ArgumentNames ( { } }
"topic" , "message" , "qos=0" , "retained=false" } ) public void publishToMQTTSynchronously ( String topic , Object message , int qos , boolean retained ) throws MqttException { MqttMessage msg ; if ( message instanceof String ) { msg = new MqttMessage ( message . toString ( ) . getBytes ( ) ) ; } else { msg = new MqttMessage ( ( byte [ ] ) message ) ; } msg . setQos ( qos ) ; msg . setRetained ( retained ) ; System . out . println ( "*INFO:" + System . currentTimeMillis ( ) + "* publishing message" ) ; client . publish ( topic , msg ) ; System . out . println ( "*INFO:" + System . currentTimeMillis ( ) + "* published" ) ;
public class FacesBackingBean { /** * Reinitialize the bean for a new request . Used by the framework ; normally should not be called directly . */ public void reinitialize ( HttpServletRequest request , HttpServletResponse response , ServletContext servletContext ) { } }
super . reinitialize ( request , response , servletContext ) ; if ( _pageInputs == null ) { Map map = InternalUtils . getActionOutputMap ( request , false ) ; if ( map != null ) _pageInputs = Collections . unmodifiableMap ( map ) ; } // Initialize the page flow field . Field pageFlowMemberField = getCachedInfo ( ) . getPageFlowMemberField ( ) ; // TODO : should we add a compiler warning if this field isn ' t transient ? All this reinitialization logic is // for the transient case . if ( fieldIsUninitialized ( pageFlowMemberField ) ) { PageFlowController pfc = PageFlowUtils . getCurrentPageFlow ( request , servletContext ) ; initializeField ( pageFlowMemberField , pfc ) ; } // Initialize the shared flow fields . CachedSharedFlowRefInfo . SharedFlowFieldInfo [ ] sharedFlowMemberFields = getCachedInfo ( ) . getSharedFlowMemberFields ( ) ; if ( sharedFlowMemberFields != null ) { for ( int i = 0 ; i < sharedFlowMemberFields . length ; i ++ ) { CachedSharedFlowRefInfo . SharedFlowFieldInfo fi = sharedFlowMemberFields [ i ] ; Field field = fi . field ; if ( fieldIsUninitialized ( field ) ) { Map /* < String , SharedFlowController > */ sharedFlows = PageFlowUtils . getSharedFlows ( request ) ; String name = fi . sharedFlowName ; SharedFlowController sf = name != null ? ( SharedFlowController ) sharedFlows . get ( name ) : PageFlowUtils . getGlobalApp ( request ) ; if ( sf != null ) { initializeField ( field , sf ) ; } else { _log . error ( "Could not find shared flow with name \"" + fi . sharedFlowName + "\" to initialize field " + field . getName ( ) + " in " + getClass ( ) . getName ( ) ) ; } } } }
public class WRowRenderer { /** * Paints the given WButton . * @ param component the WRow to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WRow row = ( WRow ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; int cols = row . getChildCount ( ) ; Size gap = row . getSpace ( ) ; String gapString = gap != null ? gap . toString ( ) : null ; if ( cols > 0 ) { xml . appendTagOpen ( "ui:row" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; xml . appendOptionalAttribute ( "gap" , gapString ) ; xml . appendClose ( ) ; // Render margin MarginRendererUtil . renderMargin ( row , renderContext ) ; paintChildren ( row , renderContext ) ; xml . appendEndTag ( "ui:row" ) ; }
public class TransformedRenditionHandler { /** * Swaps width with height if rotated 90 ° clock - wise or counter clock - wise * @ param width Rendition width * @ param height Rendition height * @ return Width */ private long rotateMapWidth ( long width , long height ) { } }
if ( rotation != null && ( rotation == ROTATE_90 || rotation == ROTATE_270 ) ) { return height ; } else { return width ; }
public class GabowSCC { /** * Computes a { @ link List } of { @ link Set } s , where each set contains vertices * which together form a strongly connected component within the given * graph . * @ return < code > List < / code > of < code > EquivalanceClass < / code > es containing the strongly * connected components */ public List < Equivalences < V > > stronglyConnectedSets ( ) { } }
if ( stronglyConnectedSets == null ) { stronglyConnectedSets = new Vector < > ( ) ; // create VertexData objects for all vertices , store them createVertexNumber ( ) ; // perform DFS for ( VertexNumber < V > data : vertexToVertexNumber . values ( ) ) { if ( data . getNumber ( ) == 0 ) { dfsVisit ( graph , data ) ; } } vertexToVertexNumber = null ; stack = null ; B = null ; } return stronglyConnectedSets ;
public class Calcites { /** * Like RelDataTypeFactory . createSqlTypeWithNullability , but creates types that align best with how Druid * represents them . */ public static RelDataType createSqlTypeWithNullability ( final RelDataTypeFactory typeFactory , final SqlTypeName typeName , final boolean nullable ) { } }
final RelDataType dataType ; switch ( typeName ) { case TIMESTAMP : // Our timestamps are down to the millisecond ( precision = 3 ) . dataType = typeFactory . createSqlType ( typeName , 3 ) ; break ; case CHAR : case VARCHAR : dataType = typeFactory . createTypeWithCharsetAndCollation ( typeFactory . createSqlType ( typeName ) , Calcites . defaultCharset ( ) , SqlCollation . IMPLICIT ) ; break ; default : dataType = typeFactory . createSqlType ( typeName ) ; } return typeFactory . createTypeWithNullability ( dataType , nullable ) ;
public class CmsGroupOverviewDialog { /** * Creates the list of widgets for this dialog . < p > */ @ Override protected void defineWidgets ( ) { } }
// initialize the user object to use for the dialog initGroupObject ( ) ; setKeyPrefix ( KEY_PREFIX ) ; // widgets to display if ( ! isOverview ( ) ) { addWidget ( new CmsWidgetDialogParameter ( this , "name" , PAGES [ 0 ] , new CmsDisplayWidget ( ) ) ) ; addWidget ( new CmsWidgetDialogParameter ( this , "description" , PAGES [ 0 ] , new CmsDisplayWidget ( ) ) ) ; addWidget ( new CmsWidgetDialogParameter ( this , "assignedOu" , PAGES [ 0 ] , new CmsDisplayWidget ( ) ) ) ; } else { addWidget ( new CmsWidgetDialogParameter ( this , "niceName" , "name" , PAGES [ 0 ] , new CmsDisplayWidget ( ) ) ) ; addWidget ( new CmsWidgetDialogParameter ( this , "description" , PAGES [ 0 ] , new CmsDisplayWidget ( ) ) ) ; addWidget ( new CmsWidgetDialogParameter ( this , "assignedOu" , PAGES [ 0 ] , new CmsDisplayWidget ( ) ) ) ; addWidget ( new CmsWidgetDialogParameter ( this , "parentGroup" , PAGES [ 0 ] , new CmsDisplayWidget ( ) ) ) ; addWidget ( new CmsWidgetDialogParameter ( m_group , "enabled" , PAGES [ 0 ] , new CmsDisplayWidget ( ) ) ) ; }
public class CmsTreeItem { /** * Sets the tree to which this tree item belongs . < p > * This is automatically called when this tree item or one of its ancestors is inserted into a tree . < p > * @ param tree the tree into which the item has been inserted */ public void setTree ( CmsTree < CmsTreeItem > tree ) { } }
m_tree = tree ; for ( Widget widget : m_children ) { if ( widget instanceof CmsTreeItem ) { ( ( CmsTreeItem ) widget ) . setTree ( tree ) ; } }
public class CPOptionValueUtil { /** * Returns the first cp option value in the ordered set where companyId = & # 63 ; . * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp option value , or < code > null < / code > if a matching cp option value could not be found */ public static CPOptionValue fetchByCompanyId_First ( long companyId , OrderByComparator < CPOptionValue > orderByComparator ) { } }
return getPersistence ( ) . fetchByCompanyId_First ( companyId , orderByComparator ) ;
public class BoxUser { /** * Helper method to abstract out the common logic from the various users methods . * @ param api the API connection to be used when retrieving the users . * @ param filterTerm The filter term to lookup users by ( login for external , login or name for managed ) * @ param userType The type of users we want to search with this request . * Valid values are ' managed ' ( enterprise users ) , ' external ' or ' all ' * @ param externalAppUserId the external app user id that has been set for an app user * @ param fields the fields to retrieve . Leave this out for the standard fields . * @ return An iterator over the selected users . */ private static Iterable < BoxUser . Info > getUsersInfoForType ( final BoxAPIConnection api , final String filterTerm , final String userType , final String externalAppUserId , final String ... fields ) { } }
return new Iterable < BoxUser . Info > ( ) { public Iterator < BoxUser . Info > iterator ( ) { QueryStringBuilder builder = new QueryStringBuilder ( ) ; if ( filterTerm != null ) { builder . appendParam ( "filter_term" , filterTerm ) ; } if ( userType != null ) { builder . appendParam ( "user_type" , userType ) ; } if ( externalAppUserId != null ) { builder . appendParam ( "external_app_user_id" , externalAppUserId ) ; } if ( fields . length > 0 ) { builder . appendParam ( "fields" , fields ) ; } URL url = USERS_URL_TEMPLATE . buildWithQuery ( api . getBaseURL ( ) , builder . toString ( ) ) ; return new BoxUserIterator ( api , url ) ; } } ;
public class ConcurrentHashMapV7 { /** * Removes all of the mappings from this map . */ @ Override public void clear ( ) { } }
final Segment < K , V > [ ] segments = this . segments ; for ( int j = 0 ; j < segments . length ; ++ j ) { Segment < K , V > s = segmentAt ( segments , j ) ; if ( s != null ) { s . clear ( ) ; } }
public class CommerceWarehouseUtil { /** * Returns the last commerce warehouse in the ordered set where groupId = & # 63 ; and active = & # 63 ; . * @ param groupId the group ID * @ param active the active * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce warehouse , or < code > null < / code > if a matching commerce warehouse could not be found */ public static CommerceWarehouse fetchByG_A_Last ( long groupId , boolean active , OrderByComparator < CommerceWarehouse > orderByComparator ) { } }
return getPersistence ( ) . fetchByG_A_Last ( groupId , active , orderByComparator ) ;
public class HttpRmiTransaction { /** * Set remote method invocation actual parameters . Parameters order and types should be consistent with remote method * signature . * @ param arguments variable number of actual arguments for remote method invocation . * @ throws IllegalArgumentException if given arguments is null or missing . */ public void setArguments ( Object ... arguments ) { } }
Params . notNullOrEmpty ( arguments , "Arguments" ) ; this . arguments = arguments ; argumentsWriter = ClientEncoders . getInstance ( ) . getArgumentsWriter ( arguments ) ;
public class VueGWTObserverManager { /** * Will be called from JS by the Vue observer . This is called before Vue " walk " the properties of * the Object to make them reactive . If your object has it ' s own observation mechanism , or you * don ' t want Vue to make your properties reactive ( for some reason ) , you should return true in * your { @ link VueGWTObserver } . You are then responsible to call notifyDep on your object * { @ link VueObserver } and propagate observation to the object property values . * @ param object The object to potentially observe * @ return true if we are observing and Vue shouldn ' t observe , false otherwise */ private boolean observeJavaObject ( Object object ) { } }
// Ignore pure JS objects , this is to avoid impacting pure Vue . js components if ( object . getClass ( ) == JsObject . class ) { return false ; } // Don ' t observe Java classes if ( object instanceof Class ) { return true ; } // Check if we have a custom Java observer for ( VueGWTObserver observer : observers ) { if ( observer . observe ( object ) ) { return true ; } } makeStaticallyInitializedPropertiesReactive ( ( JsObject ) object , object . getClass ( ) . getCanonicalName ( ) ) ; return false ;
public class TypeMapImpl { /** * Used by PropertyMapBuilder to determine if a skipped mapping exists for the { @ code path } . No * need to synchronize here since the TypeMap is not exposed publicly yet . */ boolean isSkipped ( String path ) { } }
Mapping mapping = mappings . get ( path ) ; return mapping != null && mapping . isSkipped ( ) ;
public class ProtobufVarint32FrameDecoder { /** * Reads variable length 32bit int from buffer * @ return decoded int if buffers readerIndex has been forwarded else nonsense value */ private static int readRawVarint32 ( ByteBuf buffer ) { } }
if ( ! buffer . isReadable ( ) ) { return 0 ; } buffer . markReaderIndex ( ) ; byte tmp = buffer . readByte ( ) ; if ( tmp >= 0 ) { return tmp ; } else { int result = tmp & 127 ; if ( ! buffer . isReadable ( ) ) { buffer . resetReaderIndex ( ) ; return 0 ; } if ( ( tmp = buffer . readByte ( ) ) >= 0 ) { result |= tmp << 7 ; } else { result |= ( tmp & 127 ) << 7 ; if ( ! buffer . isReadable ( ) ) { buffer . resetReaderIndex ( ) ; return 0 ; } if ( ( tmp = buffer . readByte ( ) ) >= 0 ) { result |= tmp << 14 ; } else { result |= ( tmp & 127 ) << 14 ; if ( ! buffer . isReadable ( ) ) { buffer . resetReaderIndex ( ) ; return 0 ; } if ( ( tmp = buffer . readByte ( ) ) >= 0 ) { result |= tmp << 21 ; } else { result |= ( tmp & 127 ) << 21 ; if ( ! buffer . isReadable ( ) ) { buffer . resetReaderIndex ( ) ; return 0 ; } result |= ( tmp = buffer . readByte ( ) ) << 28 ; if ( tmp < 0 ) { throw new CorruptedFrameException ( "malformed varint." ) ; } } } } return result ; }
public class NamespaceMappings { /** * Given a namespace uri , and the namespaces mappings for the * current element , return the current prefix for that uri . * @ param uri the namespace URI to be search for * @ return an existing prefix that maps to the given URI , null if no prefix * maps to the given namespace URI . */ public String lookupPrefix ( String uri ) { } }
String foundPrefix = null ; Enumeration prefixes = m_namespaces . keys ( ) ; while ( prefixes . hasMoreElements ( ) ) { String prefix = ( String ) prefixes . nextElement ( ) ; String uri2 = lookupNamespace ( prefix ) ; if ( uri2 != null && uri2 . equals ( uri ) ) { foundPrefix = prefix ; break ; } } return foundPrefix ;
public class VectorMath { /** * Computes the dot product , { @ code a } < sup > T < / sup > { @ code b } of the two * vectors . * @ param a the left vector that will be transposed * @ param b the right vector * @ return the dot product of the two vectors . * @ throws IllegalArgumentException if the two vectors are not of equal * length */ @ SuppressWarnings ( "unchecked" ) public static double dotProduct ( DoubleVector a , DoubleVector b ) { } }
if ( a . length ( ) != b . length ( ) ) throw new IllegalArgumentException ( "cannot compute dot product of vectors with different lengths" ) ; double dotProduct = 0 ; // Check whether both vectors support fast iteration over their non - zero // values . If so , use only the non - zero indices to speed up the // computation by avoiding zero multiplications if ( a instanceof Iterable && b instanceof Iterable ) { // Check whether we can easily determine how many non - zero values // are in each vector . This value is used to select the iteration // order , which affects the number of get ( value ) calls . boolean useA = ( a instanceof SparseVector && b instanceof SparseVector ) && ( ( SparseVector ) a ) . getNonZeroIndices ( ) . length < ( ( SparseVector ) b ) . getNonZeroIndices ( ) . length ; // Choose the smaller of the two to use in computing the dot // product . Because it would be more expensive to compute the // intersection of the two sets , we assume that any potential // misses would be less of a performance hit . if ( useA ) { for ( DoubleEntry e : ( ( Iterable < DoubleEntry > ) a ) ) { int index = e . index ( ) ; double aValue = e . value ( ) ; double bValue = b . get ( index ) ; dotProduct += aValue * bValue ; } } else { for ( DoubleEntry e : ( ( Iterable < DoubleEntry > ) b ) ) { int index = e . index ( ) ; double aValue = a . get ( index ) ; double bValue = e . value ( ) ; dotProduct += aValue * bValue ; } } } // Check whether both vectors are sparse . If so , use only the non - zero // indices to speed up the computation by avoiding zero multiplications else if ( a instanceof SparseVector && b instanceof SparseVector ) { SparseVector svA = ( SparseVector ) a ; SparseVector svB = ( SparseVector ) b ; int [ ] nzA = svA . getNonZeroIndices ( ) ; int [ ] nzB = svB . getNonZeroIndices ( ) ; // Choose the smaller of the two to use in computing the dot // product . Because it would be more expensive to compute the // intersection of the two sets , we assume that any potential // misses would be less of a performance hit . if ( nzA . length < nzB . length ) { for ( int nz : nzA ) { double aValue = a . get ( nz ) ; double bValue = b . get ( nz ) ; dotProduct += aValue * bValue ; } } else { for ( int nz : nzB ) { double aValue = a . get ( nz ) ; double bValue = b . get ( nz ) ; dotProduct += aValue * bValue ; } } } // Otherwise , just assume both are dense and compute the full amount else { for ( int i = 0 ; i < b . length ( ) ; i ++ ) { double aValue = a . get ( i ) ; double bValue = b . get ( i ) ; dotProduct += aValue * bValue ; } } return dotProduct ;
public class BroxWarpingSpacial { /** * Computes the divergence for u , v , and d . Equation 8 and Equation 10. */ private void computeDivUVD ( GrayF32 u , GrayF32 v , GrayF32 psi , GrayF32 divU , GrayF32 divV , GrayF32 divD ) { } }
final int stride = psi . stride ; // compute the inside pixel for ( int y = 1 ; y < psi . height - 1 ; y ++ ) { // index of the current pixel int index = y * stride + 1 ; for ( int x = 1 ; x < psi . width - 1 ; x ++ , index ++ ) { float psi_index = psi . data [ index ] ; float coef0 = 0.5f * ( psi . data [ index + 1 ] + psi_index ) ; float coef1 = 0.5f * ( psi . data [ index - 1 ] + psi_index ) ; float coef2 = 0.5f * ( psi . data [ index + stride ] + psi_index ) ; float coef3 = 0.5f * ( psi . data [ index - stride ] + psi_index ) ; float u_index = u . data [ index ] ; divU . data [ index ] = coef0 * ( u . data [ index + 1 ] - u_index ) + coef1 * ( u . data [ index - 1 ] - u_index ) + coef2 * ( u . data [ index + stride ] - u_index ) + coef3 * ( u . data [ index - stride ] - u_index ) ; float v_index = v . data [ index ] ; divV . data [ index ] = coef0 * ( v . data [ index + 1 ] - v_index ) + coef1 * ( v . data [ index - 1 ] - v_index ) + coef2 * ( v . data [ index + stride ] - v_index ) + coef3 * ( v . data [ index - stride ] - v_index ) ; divD . data [ index ] = coef0 + coef1 + coef2 + coef3 ; } } // handle the image borders for ( int x = 0 ; x < psi . width ; x ++ ) { computeDivUVD_safe ( x , 0 , u , v , psi , divU , divV , divD ) ; computeDivUVD_safe ( x , psi . height - 1 , u , v , psi , divU , divV , divD ) ; } for ( int y = 1 ; y < psi . height - 1 ; y ++ ) { computeDivUVD_safe ( 0 , y , u , v , psi , divU , divV , divD ) ; computeDivUVD_safe ( psi . width - 1 , y , u , v , psi , divU , divV , divD ) ; }
public class DatabasesInner { /** * Creates an import operation that imports a bacpac into an existing database . The existing database must be empty . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database to import into * @ param parameters The required parameters for importing a Bacpac into a database . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ImportExportResponseInner object */ public Observable < ServiceResponse < ImportExportResponseInner > > beginCreateImportOperationWithServiceResponseAsync ( String resourceGroupName , String serverName , String databaseName , ImportExtensionRequest parameters ) { } }
if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( serverName == null ) { throw new IllegalArgumentException ( "Parameter serverName is required and cannot be null." ) ; } if ( databaseName == null ) { throw new IllegalArgumentException ( "Parameter databaseName is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; final String extensionName = "import" ; return service . beginCreateImportOperation ( this . client . subscriptionId ( ) , resourceGroupName , serverName , databaseName , extensionName , this . client . apiVersion ( ) , parameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < ImportExportResponseInner > > > ( ) { @ Override public Observable < ServiceResponse < ImportExportResponseInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < ImportExportResponseInner > clientResponse = beginCreateImportOperationDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcRelSequence ( ) { } }
if ( ifcRelSequenceEClass == null ) { ifcRelSequenceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 558 ) ; } return ifcRelSequenceEClass ;
public class WSSubject { /** * This method returns a Subject contains the principal of the J2EE run as identity * and the J2EE run as credential . If there is no run as credential , a < code > null < / code > * is returned . * If there is a run as credential in the current thread , it creates a new Subject * that contains a < code > com . ibm . websphere . security . auth . WSPrincipal < / code > and a * < code > com . ibm . websphere . security . cred . WSCredential < / code > . * This method is protected by Java 2 Security . If Java 2 Security is enabled , then * access will be denied if the application code is not granted the permission * < code > javax . security . auth . AuthPermission ( " wssecurity . getRunAsSubject " ) < / code > . * @ return Subject contains the run as identity , < code > null < / code > if there is no run as identity * and if called by application client or thin client code . * @ exception WSSecurityException * failed to get the run as identity * @ see com . ibm . websphere . security . auth . WSPrincipal * @ see com . ibm . websphere . security . cred . WSCredential */ @ SuppressWarnings ( "unchecked" ) public static Subject getRunAsSubject ( ) throws WSSecurityException { } }
SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { sm . checkPermission ( GETRUNASSUBJECT_PERM ) ; } Subject s = null ; try { s = ( Subject ) java . security . AccessController . doPrivileged ( getRunAsSubjectAction ) ; } catch ( PrivilegedActionException pae ) { WSSecurityException e = ( WSSecurityException ) pae . getException ( ) ; throw e ; } return s ;
public class MiniSatStyleSolver { /** * Initializes the solver configuration . */ private void initializeConfig ( ) { } }
this . varDecay = this . config . varDecay ; this . varInc = this . config . varInc ; this . ccminMode = this . config . clauseMin ; this . restartFirst = this . config . restartFirst ; this . restartInc = this . config . restartInc ; this . clauseDecay = this . config . clauseDecay ; this . removeSatisfied = this . config . removeSatisfied ; this . learntsizeFactor = this . config . learntsizeFactor ; this . learntsizeInc = this . config . learntsizeInc ; this . incremental = this . config . incremental ;
public class MessageProcessor { /** * Gets the SearchResultsObjectPool instance * @ return the object pool */ public SearchResultsObjectPool getSearchResultsObjectPool ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getSearchResultsObjectPool" ) ; SibTr . exit ( tc , "getSearchResultsObjectPool" , _connectionToMP ) ; } return _searchResultsPool ;
public class LUDecomposition { /** * Return pivot permutation vector as a one - dimensional double array * @ return ( double ) piv */ public double [ ] getDoublePivot ( ) { } }
double [ ] vals = new double [ m ] ; for ( int i = 0 ; i < m ; i ++ ) { vals [ i ] = ( double ) piv [ i ] ; } return vals ;
public class CmsSynchronize { /** * Imports a resource from the FS to the VFS and updates the * synchronization lists . < p > * @ param res the resource to be exported * @ throws CmsSynchronizeException if the resource could not be synchronized * @ throws CmsException if something goes wrong */ private void updateFromRfs ( CmsResource res ) throws CmsSynchronizeException , CmsException { } }
CmsFile vfsFile ; // to get the name of the file in the FS , we must look it up in the // sync list . This is necessary , since the VFS could use a translated // filename . String resourcename = m_cms . getSitePath ( res ) ; CmsSynchronizeList sync = m_syncList . get ( translate ( resourcename ) ) ; File fsFile = getFileInRfs ( sync . getResName ( ) ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_SUCCESSION_1 , String . valueOf ( m_count ++ ) ) , I_CmsReport . FORMAT_NOTE ) ; m_report . print ( Messages . get ( ) . container ( Messages . RPT_UPDATE_FILE_0 ) , I_CmsReport . FORMAT_NOTE ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_ARGUMENT_1 , resourcename ) ) ; m_report . print ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_DOTS_0 ) ) ; // lock the file in the VFS , so that it can be updated m_cms . lockResource ( resourcename ) ; // read the file in the VFS vfsFile = m_cms . readFile ( resourcename , CmsResourceFilter . IGNORE_EXPIRATION ) ; // import the content from the FS try { vfsFile . setContents ( CmsFileUtil . readFile ( fsFile ) ) ; } catch ( IOException e ) { throw new CmsSynchronizeException ( Messages . get ( ) . container ( Messages . ERR_IMPORT_1 , fsFile . getName ( ) ) ) ; } m_cms . writeFile ( vfsFile ) ; // now check if there is some external method to be called which // should modify // the updated resource in the VFS Iterator < I_CmsSynchronizeModification > i = m_synchronizeModifications . iterator ( ) ; while ( i . hasNext ( ) ) { try { i . next ( ) . modifyVfs ( m_cms , vfsFile , fsFile ) ; } catch ( CmsSynchronizeException e ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_SYNCHRONIZE_UPDATE_FAILED_1 , res . getRootPath ( ) ) , e ) ; } break ; } } // everything is done now , so unlock the resource // read the resource again , necessary to get the actual timestamp m_cms . setDateLastModified ( resourcename , fsFile . lastModified ( ) , false ) ; res = m_cms . readResource ( resourcename , CmsResourceFilter . IGNORE_EXPIRATION ) ; // add resource to synchronization list CmsSynchronizeList syncList = new CmsSynchronizeList ( sync . getResName ( ) , translate ( resourcename ) , res . getDateLastModified ( ) , fsFile . lastModified ( ) ) ; m_newSyncList . put ( translate ( resourcename ) , syncList ) ; // and remove it from the old one m_syncList . remove ( translate ( resourcename ) ) ; vfsFile = null ; m_report . println ( org . opencms . report . Messages . get ( ) . container ( org . opencms . report . Messages . RPT_OK_0 ) , I_CmsReport . FORMAT_OK ) ;
public class ClassificationExpressionObject { /** * Get classification code . * @ param cls The instance of classification to get code . ( NotNull ) * @ return The code of classification . ( NotNull : if not classification , throws exception ) */ public String code ( Object cls ) { } }
assertArgumentNotNull ( "cls" , cls ) ; assertCanBeClassification ( cls ) ; return ( ( Classification ) cls ) . code ( ) ;
public class AutoMlClient { /** * Deletes a dataset and all of its contents . Returns empty response in the * [ response ] [ google . longrunning . Operation . response ] field when it completes , and ` delete _ details ` * in the [ metadata ] [ google . longrunning . Operation . metadata ] field . * < p > Sample code : * < pre > < code > * try ( AutoMlClient autoMlClient = AutoMlClient . create ( ) ) { * DatasetName name = DatasetName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ DATASET ] " ) ; * autoMlClient . deleteDatasetAsync ( name ) . get ( ) ; * < / code > < / pre > * @ param name The resource name of the dataset to delete . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , OperationMetadata > deleteDatasetAsync ( DatasetName name ) { } }
DeleteDatasetRequest request = DeleteDatasetRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return deleteDatasetAsync ( request ) ;
public class KeenClient { /** * Handles a failure in the Keen library . If the client is running in debug mode , this will * immediately throw a runtime exception . Otherwise , this will log an error message and , if the * callback is non - null , call the { @ link KeenCallback # onFailure ( Exception ) } method . Any * exceptions thrown by the callback are silently ignored . * @ param callback A callback ; may be null . * @ param project The project in which the event was published . If a default project has been set * on the client , this parameter may be null , in which case the default project * was used . * @ param eventCollection The name of the collection in which the event was published * @ param event A Map that consists of key / value pairs . Keen naming conventions apply ( see * docs ) . Nested Maps and lists are acceptable ( and encouraged ! ) . * @ param keenProperties A Map that consists of key / value pairs to override default properties . * ex : " timestamp " - & gt ; Calendar . getInstance ( ) * @ param e The exception which caused the failure . */ private void handleFailure ( KeenCallback callback , KeenProject project , String eventCollection , Map < String , Object > event , Map < String , Object > keenProperties , Exception e ) { } }
if ( isDebugMode ) { if ( e instanceof RuntimeException ) { throw ( RuntimeException ) e ; } else { throw new RuntimeException ( e ) ; } } else { handleFailure ( callback , e ) ; KeenLogging . log ( "Encountered error: " + e . getMessage ( ) ) ; if ( callback != null ) { try { if ( callback instanceof KeenDetailedCallback ) { ( ( KeenDetailedCallback ) callback ) . onFailure ( project , eventCollection , event , keenProperties , e ) ; } } catch ( Exception userException ) { // Do nothing . Issue # 98 } } }
public class CmsPublishScheduled { /** * Sets the title of the dialog . < p > * @ param singleKey the key for the single operation */ public void setDialogTitle ( String singleKey ) { } }
// generate title using the resource name as parameter for the key String resourceName = CmsStringUtil . formatResourceName ( getParamResource ( ) , 50 ) ; setParamTitle ( key ( singleKey , new Object [ ] { resourceName } ) ) ;
public class MeterCollection { /** * remove * There are 3 objectives for this method * 1 ) Remove it from concurrent map , meters . * 2 ) Un - Register MXBean for specified Type of Meter ( e . g . ServletStats , ThreadPoolStats , etc ) * 3 ) Remove MXBean in a list of Bundle specific MBeans , so when a bundle is removed , we will clean all MXBeans for it . * @ param key */ public void remove ( String key ) { } }
T mBeanImpl = null ; ObjectName objectName = null ; try { // Get mBeanImpl Object from meters map if ( ( mBeanImpl = meters . remove ( key ) ) != null ) { // Un - Register MXBean for specified Type of Meter ( e . g . ServletStats , ThreadPoolStats , etc ) objectName = MXBeanHelper ( mBeanImpl . getClass ( ) . getSimpleName ( ) , key , UNREGISTER_MXBEAN , null ) ; } // Remove from a map where we are maintaining bundle specific MBeans . Set < ObjectName > s = MonitoringFrameworkExtender . mxmap . get ( monitor ) ; if ( s != null && objectName != null ) { s . remove ( objectName ) ; } } catch ( Throwable t ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , t . getMessage ( ) ) ; } }
public class These { /** * { @ inheritDoc } */ @ Override @ SuppressWarnings ( "unchecked" ) public final < Z > These < Z , B > biMapL ( Function < ? super A , ? extends Z > fn ) { } }
return ( These < Z , B > ) Bifunctor . super . biMapL ( fn ) ;
public class ZPoller { /** * Registers a SelectableChannel for polling on specified events . * @ param channel the registering channel . * @ param handler the events handler for this channel * @ param events the events to listen to , as a mask composed by ORing POLLIN , POLLOUT and POLLERR . * @ return true if registered , otherwise false */ public final boolean register ( final SelectableChannel channel , final EventsHandler handler , final int events ) { } }
if ( channel == null ) { return false ; } return add ( channel , create ( channel , handler , events ) ) ;
public class LettuceLists { /** * Creates a new { @ link ArrayList } containing all elements from { @ code elements } . * @ param elements the elements that the list should contain , must not be { @ literal null } . * @ param < T > the element type * @ return a new { @ link ArrayList } containing all elements from { @ code elements } . */ @ SafeVarargs public static < T > List < T > newList ( T ... elements ) { } }
LettuceAssert . notNull ( elements , "Elements must not be null" ) ; List < T > list = new ArrayList < > ( elements . length ) ; Collections . addAll ( list , elements ) ; return list ;
public class XMLParser { /** * / * ( non - Javadoc ) * @ see com . abubusoft . kripton . xml . XmlPullParser # setFeature ( java . lang . String , boolean ) */ @ Override public void setFeature ( String feature , boolean value ) { } }
if ( XmlPullParser . FEATURE_PROCESS_NAMESPACES . equals ( feature ) ) { processNsp = value ; } else if ( XmlPullParser . FEATURE_PROCESS_DOCDECL . equals ( feature ) ) { processDocDecl = value ; } else if ( FEATURE_RELAXED . equals ( feature ) ) { relaxed = value ; } else { throw new KriptonRuntimeException ( "unsupported feature: " + feature , true , this . getLineNumber ( ) , this . getColumnNumber ( ) , getPositionDescription ( ) , null ) ; }
public class DTMManagerDefault { /** * Add a DTM to the DTM table . * @ param dtm Should be a valid reference to a DTM . * @ param id Integer DTM ID to be bound to this DTM . * @ param offset Integer addressing offset . The internal DTM Node ID is * obtained by adding this offset to the node - number field of the * public DTM Handle . For the first DTM ID accessing each DTM , this is 0; * for overflow addressing it will be a multiple of 1 < < IDENT _ DTM _ NODE _ BITS . */ synchronized public void addDTM ( DTM dtm , int id , int offset ) { } }
if ( id >= IDENT_MAX_DTMS ) { // TODO : % REVIEW % Not really the right error message . throw new DTMException ( XMLMessages . createXMLMessage ( XMLErrorResources . ER_NO_DTMIDS_AVAIL , null ) ) ; // " No more DTM IDs are available ! " ) ; } // We used to just allocate the array size to IDENT _ MAX _ DTMS . // But we expect to increase that to 16 bits , and I ' m not willing // to allocate that much space unless needed . We could use one of our // handy - dandy Fast * Vectors , but this will do for now . // % REVIEW % int oldlen = m_dtms . length ; if ( oldlen <= id ) { // Various growth strategies are possible . I think we don ' t want // to over - allocate excessively , and I ' m willing to reallocate // more often to get that . See also Fast * Vector classes . // % REVIEW % Should throw a more diagnostic error if we go over the max . . . int newlen = Math . min ( ( id + 256 ) , IDENT_MAX_DTMS ) ; DTM new_m_dtms [ ] = new DTM [ newlen ] ; System . arraycopy ( m_dtms , 0 , new_m_dtms , 0 , oldlen ) ; m_dtms = new_m_dtms ; int new_m_dtm_offsets [ ] = new int [ newlen ] ; System . arraycopy ( m_dtm_offsets , 0 , new_m_dtm_offsets , 0 , oldlen ) ; m_dtm_offsets = new_m_dtm_offsets ; } m_dtms [ id ] = dtm ; m_dtm_offsets [ id ] = offset ; dtm . documentRegistration ( ) ; // The DTM should have been told who its manager was when we created it . // Do we need to allow for adopting DTMs _ not _ created by this manager ?
public class JMMap { /** * New flat key map map . * @ param map the map * @ return the map */ public static Map < String , Object > newFlatKeyMap ( Map < String , ? > map ) { } }
return newFlatKeyMap ( new HashMap < > ( ) , map ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertPTD1YPBASEToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class CommerceNotificationQueueEntryUtil { /** * Returns the last commerce notification queue entry in the ordered set where sentDate & lt ; & # 63 ; . * @ param sentDate the sent date * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce notification queue entry * @ throws NoSuchNotificationQueueEntryException if a matching commerce notification queue entry could not be found */ public static CommerceNotificationQueueEntry findByLtS_Last ( Date sentDate , OrderByComparator < CommerceNotificationQueueEntry > orderByComparator ) throws com . liferay . commerce . notification . exception . NoSuchNotificationQueueEntryException { } }
return getPersistence ( ) . findByLtS_Last ( sentDate , orderByComparator ) ;
public class StorageUtils { /** * Returns individual application cache directory ( for only image caching from ImageLoader ) . Cache directory will be * created on SD card < i > ( " / Android / data / [ app _ package _ name ] / cache / uil - images " ) < / i > if card is mounted and app has * appropriate permission . Else - Android defines cache directory on device ' s file system . * @ param context Application context * @ param cacheDir Cache directory path ( e . g . : " AppCacheDir " , " AppDir / cache / images " ) * @ return Cache { @ link File directory } */ public static File getIndividualCacheDirectory ( Context context , String cacheDir ) { } }
File appCacheDir = getCacheDirectory ( context ) ; File individualCacheDir = new File ( appCacheDir , cacheDir ) ; if ( ! individualCacheDir . exists ( ) ) { if ( ! individualCacheDir . mkdir ( ) ) { individualCacheDir = appCacheDir ; } } return individualCacheDir ;
public class AtomicGrowingSparseHashMatrix { /** * { @ inheritDoc } */ public double [ ] [ ] toDenseArray ( ) { } }
int r = rows . get ( ) ; int c = cols . get ( ) ; for ( int i = 0 ; i < r ; ++ i ) lockRow ( i , c ) ; double [ ] [ ] m = new double [ r ] [ 0 ] ; for ( int i = 0 ; i < r ; ++ i ) { DoubleVector row = getRowVector ( i ) ; // Ensure that we see a consistent length for all the rows if ( row . length ( ) != c ) row = Vectors . subview ( row , 0 , c ) ; m [ i ] = row . toArray ( ) ; } for ( int i = 0 ; i < r ; ++ i ) unlockRow ( i , c ) ; return m ;
public class ListDomainNamesResult { /** * List of Elasticsearch domain names . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDomainNames ( java . util . Collection ) } or { @ link # withDomainNames ( java . util . Collection ) } if you want to * override the existing values . * @ param domainNames * List of Elasticsearch domain names . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListDomainNamesResult withDomainNames ( DomainInfo ... domainNames ) { } }
if ( this . domainNames == null ) { setDomainNames ( new java . util . ArrayList < DomainInfo > ( domainNames . length ) ) ; } for ( DomainInfo ele : domainNames ) { this . domainNames . add ( ele ) ; } return this ;
public class LeaderAppender { /** * Handles an append response . */ protected void handleAppendResponse ( MemberState member , AppendRequest request , AppendResponse response ) { } }
// Trigger commit futures if necessary . updateHeartbeatTime ( member , null ) ; super . handleAppendResponse ( member , request , response ) ;
public class CmsJspLoginPersistingBean { /** * Gets the path to use for the authorization cookie , optionally resolving any macros used . < p > * @ param resolveMacros if true , macros should be resolved * @ return the authorization cookie path */ public String getCookiePath ( boolean resolveMacros ) { } }
String result = m_cookiePath ; if ( resolveMacros ) { CmsMacroResolver resolver = new CmsMacroResolver ( ) ; // add special mappings for macros resolver . addMacro ( "CONTEXT_NAME" , OpenCms . getSystemInfo ( ) . getContextPath ( ) ) ; resolver . addMacro ( "SERVLET_NAME" , OpenCms . getSystemInfo ( ) . getServletPath ( ) ) ; result = resolver . resolveMacros ( result ) ; } return result ;
public class CRC32 { /** * Make the table for a fast CRC . */ private static int [ ] make_crc_table ( ) { } }
int [ ] crc_table = new int [ 256 ] ; for ( int n = 0 ; n < 256 ; n ++ ) { int c = n ; for ( int k = 8 ; -- k >= 0 ; ) { if ( ( c & 1 ) != 0 ) c = 0xedb88320 ^ ( c >>> 1 ) ; else c = c >>> 1 ; } crc_table [ n ] = c ; } return crc_table ;
public class FactoryDistort { /** * Creates a { @ link boofcv . alg . distort . ImageDistort } for the specified image type , transformation * and interpolation instance . * @ param cached If true the distortion is only computed one . False for recomputed each time , but less memory . * @ param interp Which interpolation algorithm should be used . * @ param outputType Type of output image . * @ return ImageDistort */ public static < Input extends ImageBase < Input > , Output extends ImageBase < Output > > ImageDistort < Input , Output > distort ( boolean cached , InterpolatePixel < Input > interp , ImageType < Output > outputType ) { } }
switch ( outputType . getFamily ( ) ) { case GRAY : return distortSB ( cached , ( InterpolatePixelS ) interp , outputType . getImageClass ( ) ) ; case PLANAR : return distortPL ( cached , ( InterpolatePixelS ) interp , outputType . getImageClass ( ) ) ; case INTERLEAVED : if ( interp instanceof InterpolatePixelS ) throw new IllegalArgumentException ( "Interpolation function for single band images was" + " passed in for an interleaved image" ) ; return distortIL ( cached , ( InterpolatePixelMB ) interp , ( ImageType ) outputType ) ; default : throw new IllegalArgumentException ( "Unknown image family " + outputType . getFamily ( ) ) ; }
public class AWSCloud9Client { /** * Gets a list of AWS Cloud9 development environment identifiers . * @ param listEnvironmentsRequest * @ return Result of the ListEnvironments operation returned by the service . * @ throws BadRequestException * The target request is invalid . * @ throws ConflictException * A conflict occurred . * @ throws NotFoundException * The target resource cannot be found . * @ throws ForbiddenException * An access permissions issue occurred . * @ throws TooManyRequestsException * Too many service requests were made over the given time period . * @ throws LimitExceededException * A service limit was exceeded . * @ throws InternalServerErrorException * An internal server error occurred . * @ sample AWSCloud9 . ListEnvironments * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / cloud9-2017-09-23 / ListEnvironments " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ListEnvironmentsResult listEnvironments ( ListEnvironmentsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListEnvironments ( request ) ;
public class XtypeFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EObject create ( EClass eClass ) { } }
switch ( eClass . getClassifierID ( ) ) { case XtypePackage . XFUNCTION_TYPE_REF : return createXFunctionTypeRef ( ) ; case XtypePackage . XCOMPUTED_TYPE_REFERENCE : return createXComputedTypeReference ( ) ; case XtypePackage . XIMPORT_SECTION : return createXImportSection ( ) ; case XtypePackage . XIMPORT_DECLARATION : return createXImportDeclaration ( ) ; default : throw new IllegalArgumentException ( "The class '" + eClass . getName ( ) + "' is not a valid classifier" ) ; }
public class SimpleDenseCellName { /** * we might want to try to do better . */ @ Override public CellName copy ( CFMetaData cfm , AbstractAllocator allocator ) { } }
return new SimpleDenseCellName ( allocator . clone ( element ) ) ;
public class BuilderImpl { /** * ( non - Javadoc ) * @ see com . ibm . ws . security . jwt . internal . Builder # buildJwt ( ) */ @ Override public JwtToken buildJwt ( ) throws JwtException , InvalidBuilderException { } }
// Create JWT here // TODO check for default claims ? JwtConfig config = getConfig ( configId ) ; JwtToken jwt = new TokenImpl ( this , config ) ; return jwt ;
public class Job { /** * Invoke this method from within the { @ code run } method of a < b > generator * job < / b > in order to declare that some value will be provided asynchronously * by some external agent . * @ param < F > The type of the asynchronously provided value . * @ param klass A { @ link Class } object used to specify the type of the * asynchronously provided value . * @ return A { @ code PromisedValue } that represents an empty value slot that * will be filled at a later time when the external agent invokes * { @ link PipelineService # submitPromisedValue ( String , Object ) } . This * may be passed in to further invocations of { @ code futureCall ( ) } in * order to specify a data dependency . * @ deprecate Use # newPromise ( ) instead . */ @ Deprecated public < F > PromisedValue < F > newPromise ( Class < F > klass ) { } }
PromisedValueImpl < F > promisedValue = new PromisedValueImpl < > ( getPipelineKey ( ) , thisJobRecord . getKey ( ) , currentRunGUID ) ; updateSpec . getNonTransactionalGroup ( ) . includeSlot ( promisedValue . getSlot ( ) ) ; return promisedValue ;
public class ProviderRegistry { /** * Called when a Liberty user defined feature provider is set or unset * @ param providerService the provider if set , null if unset */ public AuthConfigProvider setProvider ( ProviderService providerService ) { } }
AuthConfigProvider authConfigProvider = null ; if ( providerService != null ) { authConfigProvider = providerService . getAuthConfigProvider ( this ) ; registerConfigProvider ( authConfigProvider , null , null , null ) ; } else { removeRegistration ( defaultRegistrationID . toString ( ) ) ; } return authConfigProvider ;
public class EnvLoader { /** * Returns the environment name . */ public static String getEnvironmentName ( ClassLoader loader ) { } }
for ( ; loader != null ; loader = loader . getParent ( ) ) { if ( loader instanceof EnvironmentClassLoader ) { String name = ( ( EnvironmentClassLoader ) loader ) . getId ( ) ; if ( name != null ) return name ; else return "" ; } } return Thread . currentThread ( ) . getContextClassLoader ( ) . toString ( ) ;
public class LoggingContext { /** * Gets the starting calling stack trace element of a given stack which * matches the given class name . Given the wrapper class name , the match * continues until the last stack trace element of the wrapper class is * matched . * @ param stackClass * - The class to be matched for . Get the last matching class * down the stack * @ return - StackTraceElement which denotes the calling point of given * class or wrapper class */ public StackTraceElement getStackTraceElement ( Class stackClass ) { } }
Stopwatch s = stackTraceTimer . start ( ) ; Throwable t = new Throwable ( ) ; StackTraceElement [ ] stArray = t . getStackTrace ( ) ; int stackSize = stArray . length ; StackTraceElement st = null ; for ( int i = 0 ; i < stackSize ; i ++ ) { boolean found = false ; while ( stArray [ i ] . getClassName ( ) . equals ( stackClass . getName ( ) ) ) { ++ i ; found = true ; } if ( found ) { st = stArray [ i ] ; } } s . stop ( ) ; return st ;
public class Wxs { /** * 检查一下支付平台返回的 xml , 是否签名合法 , 如果合法 , 转换成一个 map * @ param xml * 支付平台返回的 xml * @ param key * 商户秘钥 * @ return 合法的 Map * @ throws " e . wx . sign . invalid " * @ see # checkPayReturnMap ( NutMap , String ) * @ see < a href = * " https : / / pay . weixin . qq . com / wiki / doc / api / jsapi . php ? chapter = 9_1 " > * 支付平台文档 < / a > */ public static NutMap checkPayReturn ( String xml , String key ) { } }
try { NutMap map = Xmls . asMap ( xmls ( ) . parse ( new InputSource ( new StringReader ( xml ) ) ) . getDocumentElement ( ) ) ; return checkPayReturnMap ( map , key ) ; } catch ( Exception e ) { throw Lang . makeThrow ( "e.wx.pay.re.error : %s" , xml ) ; }
public class VisOdomMonoPlaneInfinity { /** * Checks for motion which can ' t be caused by rotations along the y - axis alone . This is done by adjusting the * pointing vector in the plane reference frame such that it has the same y component as when the track was spawned * and that the x - z components are normalized to one , to ensure consistent units . That pointing vector is then * projected back into the image and a pixel difference computed . * @ param pointing Pointing vector of observation in plane reference frame */ protected boolean isRotationFromAxisY ( PointTrack t , Vector3D_F64 pointing ) { } }
VoTrack p = t . getCookie ( ) ; // remove rotations not along x - z plane double normXZ = Math . sqrt ( pointing . x * pointing . x + pointing . z * pointing . z ) ; pointingAdj . set ( pointing . x / normXZ , p . pointingY , pointing . z / normXZ ) ; // Put pointing vector back into camera frame GeometryMath_F64 . multTran ( cameraToPlane . getR ( ) , pointingAdj , pointingAdj ) ; // compute normalized image coordinates n . x = pointingAdj . x / pointingAdj . z ; n . y = pointingAdj . y / pointingAdj . z ; // compute pixel of projected point normToPixel . compute ( n . x , n . y , pixel ) ; // compute error double error = pixel . distance2 ( t ) ; return error < thresholdPixelError * thresholdPixelError ;
public class SplitMergeLineFitLoop { /** * Computes the distance between pairs of points which are separated by 1/2 the contour list . The index of the * first pixel in the pair with the greatest distance is returned * @ return Index of the first pixel which should be used to split the list . The other end is ret + N / 2 */ protected int selectFarthest ( List < Point2D_I32 > contour ) { } }
int bestIndex = - 1 ; int bestDistance = 0 ; int N = contour . size ( ) ; int half = N / 2 ; for ( int i = 0 ; i < half ; i ++ ) { int end = ( i + half ) % N ; Point2D_I32 a = contour . get ( i ) ; Point2D_I32 b = contour . get ( end ) ; int dist = UtilPoint2D_I32 . distanceSq ( a . x , a . y , b . x , b . y ) ; if ( bestDistance < dist ) { bestIndex = i ; bestDistance = dist ; } } // if ( bestIndex = = - 1 ) // System . out . println ( ) ; return bestIndex ;
public class InputLine { /** * Handle backspace . These are not control sequences , so must be handled separately * from those . * @ param ch The character code point . * @ return True if handled . */ private boolean handleBackSpace ( int ch ) { } }
if ( ch == Char . DEL || ch == Char . BS ) { // backspace . . . if ( before . length ( ) > 0 ) { before = before . substring ( 0 , before . length ( ) - 1 ) ; printInputLine ( ) ; } return true ; } return false ;
public class WImage { /** * Creates a dynamic URL that the image can be loaded from . In fact the URL points to the main application servlet , * but includes a non - null for the parameter associated with this WComponent ( ie , its label ) . The handleRequest * method below detects this when the browser requests the image * @ return the url to load the image from */ public String getTargetUrl ( ) { } }
if ( getImageUrl ( ) != null ) { return getImageUrl ( ) ; } Image image = getImage ( ) ; if ( image instanceof InternalResource ) { return ( ( InternalResource ) image ) . getTargetUrl ( ) ; } Environment env = getEnvironment ( ) ; Map < String , String > parameters = env . getHiddenParameters ( ) ; parameters . put ( Environment . TARGET_ID , getTargetId ( ) ) ; if ( Util . empty ( getCacheKey ( ) ) ) { // Add some randomness to the URL to prevent caching String random = WebUtilities . generateRandom ( ) ; parameters . put ( Environment . UNIQUE_RANDOM_PARAM , random ) ; } else { // Remove step counter as not required for cached content parameters . remove ( Environment . STEP_VARIABLE ) ; parameters . remove ( Environment . SESSION_TOKEN_VARIABLE ) ; // Add the cache key parameters . put ( Environment . CONTENT_CACHE_KEY , getCacheKey ( ) ) ; } // this variable needs to be set in the portlet environment . String url = env . getWServletPath ( ) ; return WebUtilities . getPath ( url , parameters , true ) ;
public class ConfigArgP { /** * Returns the default { @ link ConfigurationItem } for the passed name * which can be the item ' s key or cl - option * @ param name The item ' s key or cl - option * @ return The named ConfigurationItem or null if one was not found */ public ConfigurationItem getDefaultItem ( final String name ) { } }
if ( name == null ) throw new IllegalArgumentException ( "The passed name was null" ) ; return defaultConfItems . get ( name ) ;
public class OvercastProperties { /** * Get set of property names directly below path . */ public static Set < String > getOvercastPropertyNames ( final String path ) { } }
Config overcastConfig = getOvercastConfig ( ) ; if ( ! overcastConfig . hasPath ( path ) ) { return new HashSet < > ( ) ; } Config cfg = overcastConfig . getConfig ( path ) ; Set < String > result = new HashSet < > ( ) ; for ( Map . Entry < String , ConfigValue > e : cfg . entrySet ( ) ) { result . add ( ConfigUtil . splitPath ( e . getKey ( ) ) . get ( 0 ) ) ; } return result ;
public class TokVariable { /** * { @ inheritDoc } */ @ Override public boolean accepts ( final char s ) { } }
return Character . isLetter ( s ) || Character . isDigit ( s ) || s == '_' || s == '.' ;
public class WPartialDateField { /** * Returns the java date value , else null if the value cannot be parsed . * @ return the java date or null */ public Date getDate ( ) { } }
if ( getYear ( ) != null && getMonth ( ) != null && getDay ( ) != null ) { return DateUtilities . createDate ( getDay ( ) , getMonth ( ) , getYear ( ) ) ; } return null ;
public class KernelMatrix { /** * Get the kernel similarity for the given objects . * @ param id1 First object * @ param id2 Second object * @ return Similarity . */ public double getSimilarity ( DBIDRef id1 , DBIDRef id2 ) { } }
return kernel [ idmap . getOffset ( id1 ) ] [ idmap . getOffset ( id2 ) ] ;
public class LoadBundleHandler { /** * See taglib documentation . * @ see javax . faces . view . facelets . FaceletHandler # apply ( javax . faces . view . facelets . FaceletContext , javax . faces . component . UIComponent ) */ public void apply ( FaceletContext ctx , UIComponent parent ) throws IOException , FacesException , FaceletException , ELException { } }
UIViewRoot root = ComponentSupport . getViewRoot ( ctx , parent ) ; ResourceBundle bundle = null ; try { String name = this . basename . getValue ( ctx ) ; ClassLoader cl = ClassUtils . getContextClassLoader ( ) ; if ( root != null && root . getLocale ( ) != null ) { bundle = ResourceBundle . getBundle ( name , root . getLocale ( ) , cl ) ; } else { bundle = ResourceBundle . getBundle ( name , Locale . getDefault ( ) , cl ) ; } } catch ( Exception e ) { throw new TagAttributeException ( this . tag , this . basename , e ) ; } ResourceBundleMap map = new ResourceBundleMap ( bundle ) ; FacesContext faces = ctx . getFacesContext ( ) ; faces . getExternalContext ( ) . getRequestMap ( ) . put ( this . var . getValue ( ctx ) , map ) ;
public class FileUtils { /** * Get a file path from a Uri . This will get the the path for Storage Access * Framework Documents , as well as the _ data field for the MediaStore and * other file - based ContentProviders . * @ param context The context . * @ param uri The Uri to query . * @ author paulburke */ @ SuppressLint ( "NewApi" ) public static String getPath ( final Context context , final Uri uri ) { } }
final boolean isKitKat = BuildUtils . isKitKat ( ) ; // DocumentProvider if ( isKitKat && DocumentsContract . isDocumentUri ( context , uri ) ) { // ExternalStorageProvider if ( isExternalStorageDocument ( uri ) ) { final String docId = DocumentsContract . getDocumentId ( uri ) ; final String [ ] split = docId . split ( ":" ) ; final String type = split [ 0 ] ; if ( "primary" . equalsIgnoreCase ( type ) ) { return Environment . getExternalStorageDirectory ( ) + "/" + split [ 1 ] ; } // TODO handle non - primary volumes } // DownloadsProvider else if ( isDownloadsDocument ( uri ) ) { final String id = DocumentsContract . getDocumentId ( uri ) ; final Uri contentUri = ContentUris . withAppendedId ( Uri . parse ( "content://downloads/public_downloads" ) , Long . valueOf ( id ) ) ; return getDataColumn ( context , contentUri , null , null ) ; } // MediaProvider else if ( isMediaDocument ( uri ) ) { final String docId = DocumentsContract . getDocumentId ( uri ) ; final String [ ] split = docId . split ( ":" ) ; final String type = split [ 0 ] ; Uri contentUri = null ; if ( "image" . equals ( type ) ) { contentUri = MediaStore . Images . Media . EXTERNAL_CONTENT_URI ; } else if ( "video" . equals ( type ) ) { contentUri = MediaStore . Video . Media . EXTERNAL_CONTENT_URI ; } else if ( "audio" . equals ( type ) ) { contentUri = MediaStore . Audio . Media . EXTERNAL_CONTENT_URI ; } final String selection = "_id=?" ; final String [ ] selectionArgs = new String [ ] { split [ 1 ] } ; return getDataColumn ( context , contentUri , selection , selectionArgs ) ; } } // MediaStore ( and general ) else if ( "content" . equalsIgnoreCase ( uri . getScheme ( ) ) ) { return getDataColumn ( context , uri , null , null ) ; } // File else if ( "file" . equalsIgnoreCase ( uri . getScheme ( ) ) ) { return uri . getPath ( ) ; } return null ;
public class ApiOvhHostingprivateDatabase { /** * Get this object properties * REST : GET / hosting / privateDatabase / { serviceName } / user / { userName } / grant / { databaseName } * @ param serviceName [ required ] The internal name of your private database * @ param userName [ required ] User name used to connect to your databases * @ param databaseName [ required ] Database name where grant is set */ public OvhGrant serviceName_user_userName_grant_databaseName_GET ( String serviceName , String userName , String databaseName ) throws IOException { } }
String qPath = "/hosting/privateDatabase/{serviceName}/user/{userName}/grant/{databaseName}" ; StringBuilder sb = path ( qPath , serviceName , userName , databaseName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhGrant . class ) ;
public class LogObjectPrinter { /** * Builds log string for exception with stack trace . * @ param throwable the exception * @ return exception description string with stack trace */ public static String printExceptionWithStackInfo ( Throwable throwable ) { } }
StringBuilder out = new StringBuilder ( ) ; printExceptionWithStackInfo ( throwable , out ) ; return out . toString ( ) ;
public class TangoGroupCommand { /** * insert a value per command * @ param value * The values to insert . Size must be equals to the number of commands * @ throws DevFailed */ public void insert ( final Object ... value ) throws DevFailed { } }
if ( value . length == 1 ) { for ( int i = 0 ; i < group . get_size ( true ) ; i ++ ) { final int arginType = group . get_device ( i + 1 ) . command_query ( commandName ) . in_type ; InsertExtractUtils . insert ( inData [ i ] , arginType , value ) ; } } else { if ( value . length != group . get_size ( true ) ) { throw DevFailedUtils . newDevFailed ( TANGO_WRONG_DATA_ERROR , group . get_size ( true ) + " values must be provided" ) ; } for ( int i = 0 ; i < group . get_size ( true ) ; i ++ ) { final int arginType = group . get_device ( i + 1 ) . command_query ( commandName ) . in_type ; InsertExtractUtils . insert ( inData [ i ] , arginType , value [ i ] ) ; } }
public class CreateEventSubscriptionRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( CreateEventSubscriptionRequest createEventSubscriptionRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( createEventSubscriptionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( createEventSubscriptionRequest . getSubscriptionName ( ) , SUBSCRIPTIONNAME_BINDING ) ; protocolMarshaller . marshall ( createEventSubscriptionRequest . getSnsTopicArn ( ) , SNSTOPICARN_BINDING ) ; protocolMarshaller . marshall ( createEventSubscriptionRequest . getSourceType ( ) , SOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( createEventSubscriptionRequest . getEventCategories ( ) , EVENTCATEGORIES_BINDING ) ; protocolMarshaller . marshall ( createEventSubscriptionRequest . getSourceIds ( ) , SOURCEIDS_BINDING ) ; protocolMarshaller . marshall ( createEventSubscriptionRequest . getEnabled ( ) , ENABLED_BINDING ) ; protocolMarshaller . marshall ( createEventSubscriptionRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Humanize { /** * Same as { @ link # nanoTime ( Number ) } for the specified locale . * @ param value * Number of nanoseconds * @ param locale * Target locale * @ return The number preceded by the corresponding SI symbol */ @ Expose public static String nanoTime ( final Number value , final Locale locale ) { } }
return withinLocale ( new Callable < String > ( ) { public String call ( ) { return prefix ( value , 1000 , nanoTimePrefixes ) ; } } , locale ) ;
public class CholeskySparseFactorization { /** * Solves Q . x = b */ public DoubleMatrix1D solve ( DoubleMatrix1D b ) { } }
if ( b . size ( ) != dim ) { log . error ( "wrong dimension of vector b: expected " + dim + ", actual " + b . size ( ) ) ; throw new RuntimeException ( "wrong dimension of vector b: expected " + dim + ", actual " + b . size ( ) ) ; } // with scaling , we must solve U . Q . U . z = U . b , after that we have x = U . z if ( this . rescaler != null ) { // b = ALG . mult ( this . U , b ) ; b = ColtUtils . diagonalMatrixMult ( this . U , b ) ; } final double [ ] y = new double [ dim ] ; // copy System . arraycopy ( b . toArray ( ) , 0 , y , 0 , dim ) ; // Solve L . y = b for ( int j = 0 ; j < dim ; j ++ ) { final double [ ] LTJ = LcolumnsValues [ j ] ; y [ j ] /= LTJ [ 0 ] ; // the diagonal of the matrix L final double yJ = y [ j ] ; for ( int i = j + 1 ; i < dim ; i ++ ) { y [ i ] -= yJ * LTJ [ i - j ] ; } } // Solve L [ T ] . x = y final DoubleMatrix1D x = F1 . make ( dim ) ; for ( int i = dim - 1 ; i > - 1 ; i -- ) { final double [ ] LTI = LcolumnsValues [ i ] ; double sum = 0 ; for ( int j = dim - 1 ; j > i ; j -- ) { sum += LTI [ j - i ] * x . getQuick ( j ) ; } x . setQuick ( i , ( y [ i ] - sum ) / LTI [ 0 ] ) ; } if ( this . rescaler != null ) { // return ALG . mult ( this . U , x ) ; return ColtUtils . diagonalMatrixMult ( this . U , x ) ; } else { return x ; }
public class Parser { /** * Also inlines the call expression productions */ @ SuppressWarnings ( "incomplete-switch" ) private ParseTree parseLeftHandSideExpression ( ) { } }
SourcePosition start = getTreeStartLocation ( ) ; ParseTree operand = parseNewExpression ( ) ; // this test is equivalent to is member expression if ( ! ( operand instanceof NewExpressionTree ) || ( ( NewExpressionTree ) operand ) . arguments != null ) { // The Call expression productions while ( peekCallSuffix ( ) ) { switch ( peekType ( ) ) { case OPEN_PAREN : ArgumentListTree arguments = parseArguments ( ) ; operand = new CallExpressionTree ( getTreeLocation ( start ) , operand , arguments ) ; break ; case OPEN_SQUARE : eat ( TokenType . OPEN_SQUARE ) ; ParseTree member = parseExpression ( ) ; eat ( TokenType . CLOSE_SQUARE ) ; operand = new MemberLookupExpressionTree ( getTreeLocation ( start ) , operand , member ) ; break ; case PERIOD : eat ( TokenType . PERIOD ) ; IdentifierToken id = eatIdOrKeywordAsId ( ) ; operand = new MemberExpressionTree ( getTreeLocation ( start ) , operand , id ) ; break ; case NO_SUBSTITUTION_TEMPLATE : case TEMPLATE_HEAD : operand = parseTemplateLiteral ( operand ) ; break ; default : throw new AssertionError ( "unexpected case: " + peekType ( ) ) ; } } } return operand ;
public class CmsWebdavServlet { /** * Return the relative path associated with this servlet . < p > * @ param request the servlet request we are processing * @ return the relative path of the resource */ private String getRelativePath ( HttpServletRequest request ) { } }
String result = request . getPathInfo ( ) ; if ( result == null ) { // result = request . getServletPath ( ) ; } if ( ( result == null ) || ( result . equals ( "" ) ) ) { result = "/" ; } return ( result ) ;
public class SQLRecoverableUnitSectionImpl { /** * Forces to the underlying recovery log any information in the target recoverable * unit section that has not already been written by a previous call . This ensures * that the recovery log contains an up to date copy of the information retained * in the target recoverable unit section . * The information is written to the underlying recovery log and forced * through to persisent storage . After this call , the information is * guaranteed to be retrieved during any post - failure recovery processing . * This call my be used as part of an optomization when several recoverable unit * sections need to be pushed to disk . For example , the following sequence will * ensure that recoverable unit sections 1 through 4 are all persisted to * physical storage : - * < ul > * < li > RecoverableUnitSection1 . writeSection ( ) < / li > * < li > RecoverableUnitSection2 . writeSection ( ) < / li > * < li > RecoverableUnitSection3 . writeSection ( ) < / li > * < li > RecoverableUnitSection4 . forceSection ( ) < / li > * < / ul > * @ exception InternalLogException An unexpected error has occured . */ public void force ( ) throws InternalLogException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "force" , this ) ; // If the parent recovery log instance has experienced a serious internal error then prevent // this operation from executing . if ( _recLog . failed ( ) ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "force" , this ) ; throw new InternalLogException ( null ) ; } try { this . write ( ) ; } catch ( InternalLogException exc ) { FFDCFilter . processException ( exc , "com.ibm.ws.recoverylog.spi.SQLRecoverableUnitSectionImpl.force" , "509" , this ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "force" , exc ) ; throw exc ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "force" ) ;
public class POSCrossValidator { /** * Cross validate when no separate testset is available . * @ param params * the training parameters */ public final void crossValidate ( final TrainingParameters params ) { } }
POSTaggerCrossValidator validator = null ; try { validator = getPOSTaggerCrossValidator ( params ) ; validator . evaluate ( this . trainSamples , this . folds ) ; } catch ( final IOException e ) { System . err . println ( "IO error while loading training set!" ) ; e . printStackTrace ( ) ; System . exit ( 1 ) ; } finally { try { this . trainSamples . close ( ) ; } catch ( final IOException e ) { System . err . println ( "IO error with the train samples!" ) ; } } if ( this . detailedListener == null ) { System . out . println ( validator . getWordAccuracy ( ) ) ; } else { // TODO add detailed evaluation here System . out . println ( validator . getWordAccuracy ( ) ) ; }
public class CheckOdd { /** * Java method to verify if a number is odd utilizing bitwise operators . * > > > check _ odd ( 5) * True * > > > check _ odd ( 6) * False * > > > check _ odd ( 7) * True */ public static boolean checkOdd ( int num ) { } }
return ( num ^ 1 ) == ( num - 1 ) ;
public class BrokerHelper { /** * Check if the user of the given PBKey was < code > null < / code > , if so we try to * get user / password from the jdbc - connection - descriptor matching the given * PBKey . getAlias ( ) . */ public static PBKey crossCheckPBKey ( PBKey key ) { } }
if ( key . getUser ( ) == null ) { PBKey defKey = MetadataManager . getInstance ( ) . connectionRepository ( ) . getStandardPBKeyForJcdAlias ( key . getAlias ( ) ) ; if ( defKey != null ) { return defKey ; } } return key ;
public class DDPStateSingleton { /** * Handles deleting a document in a collection . * Override if you want to use your own collection data store . * @ param collName collection name * @ param docId document ID * @ return true if doc was deleted , false otherwise */ public boolean removeDoc ( String collName , String docId ) { } }
if ( mCollections . containsKey ( collName ) ) { // remove IDs from collection Map < String , Map < String , Object > > collection = mCollections . get ( collName ) ; if ( BuildConfig . DEBUG ) { log . debug ( "Removed doc: " + docId ) ; } collection . remove ( docId ) ; return true ; } else { log . warn ( "Received invalid removed msg for collection " + collName ) ; return false ; }
public class ReUtil { /** * 给定内容是否匹配正则 * @ param regex 正则 * @ param content 内容 * @ return 正则为null或者 " " 则不检查 , 返回true , 内容为null返回false */ public static boolean isMatch ( String regex , CharSequence content ) { } }
if ( content == null ) { // 提供null的字符串为不匹配 return false ; } if ( StrUtil . isEmpty ( regex ) ) { // 正则不存在则为全匹配 return true ; } // Pattern pattern = Pattern . compile ( regex , Pattern . DOTALL ) ; final Pattern pattern = PatternPool . get ( regex , Pattern . DOTALL ) ; return isMatch ( pattern , content ) ;
public class MultiCameraToEquirectangular { /** * Adds a camera and attempts to compute the mask from the provided distortion model . if a pixel is rendered * outside the bounds in the input image then it is masked out . If the forwards / backwards transform is too * different then it is masked out . * @ param cameraToCommon Rigid body transform from this camera to the common frame the equirectangular image * is in * @ param factory Distortion model * @ param width Input image width * @ param height Input image height */ public void addCamera ( Se3_F32 cameraToCommon , LensDistortionWideFOV factory , int width , int height ) { } }
Point2Transform3_F32 p2s = factory . undistortPtoS_F32 ( ) ; Point3Transform2_F32 s2p = factory . distortStoP_F32 ( ) ; EquiToCamera equiToCamera = new EquiToCamera ( cameraToCommon . getR ( ) , s2p ) ; GrayF32 equiMask = new GrayF32 ( equiWidth , equHeight ) ; PixelTransform < Point2D_F32 > transformEquiToCam = new PixelTransformCached_F32 ( equiWidth , equHeight , new PointToPixelTransform_F32 ( equiToCamera ) ) ; Point3D_F32 p3b = new Point3D_F32 ( ) ; Point2D_F32 p2 = new Point2D_F32 ( ) ; for ( int row = 0 ; row < equHeight ; row ++ ) { for ( int col = 0 ; col < equiWidth ; col ++ ) { equiToCamera . compute ( col , row , p2 ) ; int camX = ( int ) ( p2 . x + 0.5f ) ; int camY = ( int ) ( p2 . y + 0.5f ) ; if ( Double . isNaN ( p2 . x ) || Double . isNaN ( p2 . y ) || camX < 0 || camY < 0 || camX >= width || camY >= height ) continue ; p2s . compute ( p2 . x , p2 . y , p3b ) ; if ( Double . isNaN ( p3b . x ) || Double . isNaN ( p3b . y ) || Double . isNaN ( p3b . z ) ) continue ; double angle = UtilVector3D_F32 . acute ( equiToCamera . unitCam , p3b ) ; if ( angle < maskToleranceAngle ) { equiMask . set ( col , row , 1 ) ; } } } cameras . add ( new Camera ( equiMask , transformEquiToCam ) ) ;
public class ResultSetIterator { public void close ( ) throws SQLException { } }
more = false ; if ( ! exclusive ) return ; if ( rs != null ) { rs . close ( ) ; rs = null ; } if ( stmt != null ) { stmt . close ( ) ; stmt = null ; }
public class ServerSentEventService { /** * Removes all URI resources for a given URL * @ param uri The URI resource for the connection */ public void removeConnections ( String uri ) { } }
Objects . requireNonNull ( uri , Required . URI . toString ( ) ) ; this . cache . remove ( Default . SSE_CACHE_PREFIX . toString ( ) + uri ) ;
public class MemoryFS { private void checkContract ( Path path ) { } }
if ( ! path . isAbsolute ( ) ) { throw new IllegalArgumentException ( "EightFileSystem only works with absolute paths " + path ) ; } if ( ( ( EightyFileSystem ) path . getFileSystem ( ) ) . get80 ( ) != this ) { throw new IllegalArgumentException ( "path called on incorrect 80" ) ; }
public class GattSerialMessage { /** * Create a GattSerialMessage from a byte array payload * In this case , " payload " means the properly packed message ID and message payload . * Example : For the message CC _ LED _ WRITE _ ALL ( 0x2001) * GattSerialMessage . fromPayload ( new byte [ ] { * 0x20 , 0x01 , < - - - Message ID big endian * 0x00 , 0x00 , 0x00 , < - - - Payload data little endian * @ param payload Byte array of the message payload */ public static GattSerialMessage fromPayload ( byte [ ] payload ) { } }
Buffer buffer = new Buffer ( ) ; byte [ ] header = new byte [ 2 ] ; header [ 0 ] = ( byte ) ( payload . length & 0xff ) ; header [ 1 ] = 0 ; int crc = computeCRC16 ( header , 0 , header . length ) ; crc = computeCRC16 ( crc , payload , 0 , payload . length ) ; buffer . write ( header ) ; buffer . write ( payload ) ; buffer . writeByte ( crc & 0xff ) ; buffer . writeByte ( ( crc >> 8 ) & 0xff ) ; return new GattSerialMessage ( buffer ) ;
public class JsonDBTemplate { /** * / * ( non - Javadoc ) * @ see org . jsondb . JsonDBOperations # remove ( java . util . Collection , java . lang . String ) */ @ Override public < T > List < T > remove ( Collection < ? extends T > batchToRemove , String collectionName ) { } }
if ( null == batchToRemove ) { throw new InvalidJsonDbApiUsageException ( "Null Object batch cannot be removed from DB" ) ; } CollectionMetaData cmd = cmdMap . get ( collectionName ) ; cmd . getCollectionLock ( ) . writeLock ( ) . lock ( ) ; try { @ SuppressWarnings ( "unchecked" ) Map < Object , T > collection = ( Map < Object , T > ) collectionsRef . get ( ) . get ( collectionName ) ; if ( null == collection ) { throw new InvalidJsonDbApiUsageException ( "Collection by name '" + collectionName + "' not found. Create collection first." ) ; } Set < Object > removeIds = new HashSet < Object > ( ) ; for ( T o : batchToRemove ) { Object id = Util . getIdForEntity ( o , cmd . getIdAnnotatedFieldGetterMethod ( ) ) ; if ( collection . containsKey ( id ) ) { removeIds . add ( id ) ; } } if ( removeIds . size ( ) < 1 ) { return null ; } JsonWriter jw ; try { jw = new JsonWriter ( dbConfig , cmd , collectionName , fileObjectsRef . get ( ) . get ( collectionName ) ) ; } catch ( IOException ioe ) { logger . error ( "Failed to obtain writer for " + collectionName , ioe ) ; throw new JsonDBException ( "Failed to save " + collectionName , ioe ) ; } boolean substractResult = jw . removeFromJsonFile ( collection , removeIds ) ; List < T > removedObjects = null ; if ( substractResult ) { removedObjects = new ArrayList < T > ( ) ; for ( Object id : removeIds ) { // Don ' t need to clone it , this object no more exists in the collection removedObjects . add ( collection . remove ( id ) ) ; } } return removedObjects ; } finally { cmd . getCollectionLock ( ) . writeLock ( ) . unlock ( ) ; }
public class MonitorableRegistry { /** * Retrieves or creates a centrally - accessible named instance , identified * uniquely by the provided String . This is a convenience method to bridge * between the old - style ' single registry ' model ( see * { @ link # DEFAULT _ REGISTRY } ) and having to pass a MonitorableRegistry down * to the very depths of your class hierarchy . This is especially useful * when instrumenting third - party code which cannot easily get access to a * given MonitorableRegistry from a non - static context . * @ param name * @ return */ public static MonitorableRegistry getNamedInstance ( String name ) { } }
MonitorableRegistry instance = NAMED_INSTANCES . get ( name ) ; if ( instance == null ) { instance = new MonitorableRegistry ( ) ; MonitorableRegistry existing = NAMED_INSTANCES . putIfAbsent ( name , instance ) ; if ( existing != null ) { return existing ; } } return instance ;
public class DeleteAppReplicationConfigurationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteAppReplicationConfigurationRequest deleteAppReplicationConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteAppReplicationConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteAppReplicationConfigurationRequest . getAppId ( ) , APPID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }