signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class AppendableVec { /** * not called distributed . */ synchronized void closeChunk ( int cidx , int len ) { } }
// The Parser will pre - allocate the _ tmp _ espc large enough ( the Parser // knows how many final Chunks there will be up front ) . Other users are // encouraged to set a " large enough " espc - and a shared one at that - to // avoid these copies . // Set the length into the temp ESPC at the Chunk index ( accounting for _ chunkOff ) cidx -= _chunkOff ; while ( cidx >= _tmp_espc . length ) // should not happen if espcs are preallocated and shared ! _tmp_espc = Arrays . copyOf ( _tmp_espc , _tmp_espc . length << 1 ) ; _tmp_espc [ cidx ] = len ;
public class JobsInner { /** * Retrieve the job identified by job id . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param jobId The job id . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < JobInner > getAsync ( String resourceGroupName , String automationAccountName , UUID jobId , final ServiceCallback < JobInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( getWithServiceResponseAsync ( resourceGroupName , automationAccountName , jobId ) , serviceCallback ) ;
public class WstxInputFactory { /** * / / / / Stream reader factory methods */ @ Override public XMLStreamReader createXMLStreamReader ( InputStream in ) throws XMLStreamException { } }
// false for auto - close , since caller has access to the input stream return createSR ( null , in , null , false , false ) ;
public class LinearAlgebra { /** * Pseudo - Inverse of a matrix calculated in the least square sense . * @ param matrix The given matrix A . * @ return pseudoInverse The pseudo - inverse matrix P , such that A * P * A = A and P * A * P = P */ public static double [ ] [ ] pseudoInverse ( double [ ] [ ] matrix ) { } }
if ( isSolverUseApacheCommonsMath ) { // Use LU from common math SingularValueDecomposition svd = new SingularValueDecomposition ( new Array2DRowRealMatrix ( matrix ) ) ; double [ ] [ ] matrixInverse = svd . getSolver ( ) . getInverse ( ) . getData ( ) ; return matrixInverse ; } else { return org . jblas . Solve . pinv ( new org . jblas . DoubleMatrix ( matrix ) ) . toArray2 ( ) ; }
public class CommerceAccountUserRelLocalServiceUtil { /** * Returns the commerce account user rel with the primary key . * @ param commerceAccountUserRelPK the primary key of the commerce account user rel * @ return the commerce account user rel * @ throws PortalException if a commerce account user rel with the primary key could not be found */ public static com . liferay . commerce . account . model . CommerceAccountUserRel getCommerceAccountUserRel ( com . liferay . commerce . account . service . persistence . CommerceAccountUserRelPK commerceAccountUserRelPK ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . getCommerceAccountUserRel ( commerceAccountUserRelPK ) ;
public class CreateFlowLogsResult { /** * Information about the flow logs that could not be created successfully . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setUnsuccessful ( java . util . Collection ) } or { @ link # withUnsuccessful ( java . util . Collection ) } if you want to * override the existing values . * @ param unsuccessful * Information about the flow logs that could not be created successfully . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateFlowLogsResult withUnsuccessful ( UnsuccessfulItem ... unsuccessful ) { } }
if ( this . unsuccessful == null ) { setUnsuccessful ( new com . amazonaws . internal . SdkInternalList < UnsuccessfulItem > ( unsuccessful . length ) ) ; } for ( UnsuccessfulItem ele : unsuccessful ) { this . unsuccessful . add ( ele ) ; } return this ;
public class DescribeParametersRequest { /** * Filters to limit the request results . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setParameterFilters ( java . util . Collection ) } or { @ link # withParameterFilters ( java . util . Collection ) } if you * want to override the existing values . * @ param parameterFilters * Filters to limit the request results . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeParametersRequest withParameterFilters ( ParameterStringFilter ... parameterFilters ) { } }
if ( this . parameterFilters == null ) { setParameterFilters ( new com . amazonaws . internal . SdkInternalList < ParameterStringFilter > ( parameterFilters . length ) ) ; } for ( ParameterStringFilter ele : parameterFilters ) { this . parameterFilters . add ( ele ) ; } return this ;
public class ComposableFutures { /** * sends a callable task to the default thread pool and returns a ComposableFuture that represent the result . * @ param task the task to run . * @ param < T > the future type * @ return a future representing the result . */ public static < T > ComposableFuture < T > submit ( final Callable < T > task ) { } }
return submit ( false , task ) ;
public class BufferedFileImageSequence { /** * Loads the next image into a BufferedImage and returns it . The same instance * or a new instance of a BufferedImage might be returned each time . Don ' t rely * on either behavior being consistent . * @ return A BufferedImage containing the next image . */ public T next ( ) { } }
if ( loop ) { if ( forwards ) { if ( index >= images . length ) { index = images . length - 1 ; forwards = false ; } } else { if ( index < 0 ) { index = 0 ; forwards = true ; } } } this . imageGUI = orig [ index ] ; if ( forwards ) return images [ index ++ ] ; else return images [ index -- ] ;
public class ColVals { /** * < p > Put column with Float val . < / p > * @ param pNm column name * @ param pVal column val */ public final void put ( final String pNm , final Float pVal ) { } }
if ( this . floats == null ) { this . floats = new HashMap < String , Float > ( ) ; } this . floats . put ( pNm , pVal ) ;
public class HealthStatusHttpMapper { /** * Map the specified { @ link Status } to an HTTP status code . * @ param status the health { @ link Status } * @ return the corresponding HTTP status code */ public int mapStatus ( Status status ) { } }
String code = getUniformValue ( status . getCode ( ) ) ; if ( code != null ) { return this . statusMapping . entrySet ( ) . stream ( ) . filter ( ( entry ) -> code . equals ( getUniformValue ( entry . getKey ( ) ) ) ) . map ( Map . Entry :: getValue ) . findFirst ( ) . orElse ( WebEndpointResponse . STATUS_OK ) ; } return WebEndpointResponse . STATUS_OK ;
public class ContentElement { /** * Create an automatic style for this TableCellStyle and this type of cell . * Do not produce any effect if the type is Type . STRING or Type . VOID . * @ param style the style of the cell ( color , data style , etc . ) * @ param type the type of the cell * @ return the created style , or style if the type is Type . STRING or Type . VOID */ public TableCellStyle addChildCellStyle ( final TableCellStyle style , final TableCell . Type type ) { } }
final TableCellStyle newStyle ; final DataStyle dataStyle = this . format . getDataStyle ( type ) ; if ( dataStyle == null ) { newStyle = style ; } else { newStyle = this . stylesContainer . addChildCellStyle ( style , dataStyle ) ; } return newStyle ;
public class UnitResponse { /** * No matter context pretty attribute is true or not , this method formats the json string you want . * @ param pretty pretty or not . * @ return formatted json string . */ public String toVoJSONString ( boolean pretty ) { } }
if ( pretty ) { return JSON . toJSONStringWithDateFormat ( toVoJSONObject ( ) , Constant . DATE_SERIALIZE_FORMAT , SerializerFeature . PrettyFormat ) ; } else { return JSONObject . toJSONStringWithDateFormat ( toVoJSONObject ( ) , Constant . DATE_SERIALIZE_FORMAT ) ; }
public class TaskTextViewerPanel { /** * Displays the error message . * @ param failedTaskReport error message */ public void setText ( FailedTaskReport failedTaskReport ) { } }
final String failedTaskReportString = failedTaskReport == null ? "Failed Task Report is null" : failedTaskReport . toString ( ) ; setText ( failedTaskReportString ) ;
public class CmsDialog { /** * Builds a block with 3D border and optional subheadline in the dialog content area . < p > * @ param segment the HTML segment ( START / END ) * @ param headline the headline String for the block * @ param error if true , an error block will be created * @ return 3D block start / end segment */ public String dialogBlock ( int segment , String headline , boolean error ) { } }
if ( segment == HTML_START ) { StringBuffer result = new StringBuffer ( 512 ) ; String errorStyle = "" ; if ( error ) { errorStyle = " dialogerror" ; } result . append ( "<!-- 3D block start -->\n" ) ; result . append ( "<fieldset class=\"dialogblock\">\n" ) ; if ( CmsStringUtil . isNotEmpty ( headline ) ) { result . append ( "<legend>" ) ; result . append ( "<span class=\"textbold" ) ; result . append ( errorStyle ) ; result . append ( "\" unselectable=\"on\">" ) ; result . append ( headline ) ; result . append ( "</span></legend>\n" ) ; } return result . toString ( ) ; } else { return "</fieldset>\n<!-- 3D block end -->\n" ; }
public class ProducerPool { /** * selects either a synchronous or an asynchronous producer , for the * specified broker id and calls the send API on the selected producer * to publish the data to the specified broker partition * @ param ppd the producer pool request object */ public void send ( ProducerPoolData < V > ppd ) { } }
if ( logger . isDebugEnabled ( ) ) { logger . debug ( "send message: " + ppd ) ; } if ( sync ) { Message [ ] messages = new Message [ ppd . data . size ( ) ] ; int index = 0 ; for ( V v : ppd . data ) { messages [ index ] = serializer . toMessage ( v ) ; index ++ ; } ByteBufferMessageSet bbms = new ByteBufferMessageSet ( config . getCompressionCodec ( ) , messages ) ; ProducerRequest request = new ProducerRequest ( ppd . topic , ppd . partition . partId , bbms ) ; SyncProducer producer = syncProducers . get ( ppd . partition . brokerId ) ; if ( producer == null ) { throw new UnavailableProducerException ( "Producer pool has not been initialized correctly. " + "Sync Producer for broker " + ppd . partition . brokerId + " does not exist in the pool" ) ; } producer . send ( request . topic , request . partition , request . messages ) ; } else { AsyncProducer < V > asyncProducer = asyncProducers . get ( ppd . partition . brokerId ) ; for ( V v : ppd . data ) { asyncProducer . send ( ppd . topic , v , ppd . partition . partId ) ; } }
public class ZoneRules { /** * Finds the offset info for a local date - time and transition . * @ param dt the date - time , not null * @ param trans the transition , not null * @ return the offset info , not null */ private Object findOffsetInfo ( LocalDateTime dt , ZoneOffsetTransition trans ) { } }
LocalDateTime localTransition = trans . getDateTimeBefore ( ) ; if ( trans . isGap ( ) ) { if ( dt . isBefore ( localTransition ) ) { return trans . getOffsetBefore ( ) ; } if ( dt . isBefore ( trans . getDateTimeAfter ( ) ) ) { return trans ; } else { return trans . getOffsetAfter ( ) ; } } else { if ( dt . isBefore ( localTransition ) == false ) { return trans . getOffsetAfter ( ) ; } if ( dt . isBefore ( trans . getDateTimeAfter ( ) ) ) { return trans . getOffsetBefore ( ) ; } else { return trans ; } }
public class SpringMvcEndpointGeneratorMojo { /** * @ return The configuration property < baseUri > ( if set ) or the baseUri from * the RAML spec . */ private String getBasePath ( RamlRoot loadRamlFromFile ) { } }
// we take the given baseUri from raml spec by default . String basePath = loadRamlFromFile . getBaseUri ( ) ; // If the baseUri is explicitly set by the plugin configuration we take // it . if ( baseUri != null ) { basePath = baseUri ; } // Because we can ' t load an empty string parameter value from maven // config // the user needs to set a single " / " , to overrule the raml spec . if ( basePath != null && basePath . equals ( "/" ) ) { // We remove a single " / " cause the leading slash will be generated // by the raml // endpoints . basePath = "" ; } return basePath ;
public class ExpressionEvaluator { /** * Utility method to evaluate both if expressions and statements * @ param node * @ param ifLocation * @ param testExp * @ param thenNode * @ param elseIfNodeList * @ param elseNode * @ param ctxt * @ return * @ throws AnalysisException */ protected Value evalIf ( INode node , ILexLocation ifLocation , PExp testExp , INode thenNode , List < ? extends INode > elseIfNodeList , INode elseNode , Context ctxt ) throws AnalysisException { } }
BreakpointManager . getBreakpoint ( node ) . check ( ifLocation , ctxt ) ; try { if ( testExp . apply ( VdmRuntime . getStatementEvaluator ( ) , ctxt ) . boolValue ( ctxt ) ) { return thenNode . apply ( VdmRuntime . getStatementEvaluator ( ) , ctxt ) ; } else { for ( INode elseif : elseIfNodeList ) { Value r = elseif . apply ( VdmRuntime . getStatementEvaluator ( ) , ctxt ) ; if ( r != null ) { return r ; } } if ( elseNode != null ) { return elseNode . apply ( VdmRuntime . getStatementEvaluator ( ) , ctxt ) ; } return new VoidValue ( ) ; } } catch ( ValueException e ) { return VdmRuntimeError . abort ( ifLocation , e ) ; }
public class JsonUtils { /** * Decode a JSON string as Java map . The string must represent a JSON Object * @ param json The JSON to decode * @ return a map representing the JSON Object */ @ SuppressWarnings ( "unchecked" ) public static Map < String , Object > decodeAsMap ( String json ) { } }
return decode ( json , HashMap . class ) ;
public class Ix { /** * Prints the elements of this sequence to the console , separated * by the given separator and with a line break after roughly the * given charsPerLine amount . * @ param separator the characters to separate the elements * @ param charsPerLine indicates how long a line should be */ public final void print ( CharSequence separator , int charsPerLine ) { } }
boolean first = true ; int len = 0 ; for ( T v : this ) { String s = String . valueOf ( v ) ; if ( first ) { System . out . print ( s ) ; len += s . length ( ) ; first = false ; } else { System . out . print ( separator ) ; len += separator . length ( ) ; if ( len > charsPerLine ) { System . out . println ( ) ; System . out . print ( s ) ; len = s . length ( ) ; } else { System . out . print ( s ) ; len += s . length ( ) ; } } }
public class HilOut { /** * Scan function performs a squential scan over the data . * @ param hf the hilbert features * @ param k0 */ private void scan ( HilbertFeatures hf , int k0 ) { } }
final int mink0 = Math . min ( 2 * k0 , capital_n - 1 ) ; if ( LOG . isDebuggingFine ( ) ) { LOG . debugFine ( "Scanning with k0=" + k0 + " (" + mink0 + ")" + " N*=" + capital_n_star ) ; } for ( int i = 0 ; i < hf . pf . length ; i ++ ) { if ( hf . pf [ i ] . ubound < omega_star ) { continue ; } if ( hf . pf [ i ] . lbound < hf . pf [ i ] . ubound ) { double omega = hf . fastUpperBound ( i ) ; if ( omega < omega_star ) { hf . pf [ i ] . ubound = omega ; } else { int maxcount ; // capital _ n - 1 instead of capital _ n : all , except self if ( hf . top . contains ( hf . pf [ i ] ) ) { maxcount = capital_n - 1 ; } else { maxcount = mink0 ; } innerScan ( hf , i , maxcount ) ; } } if ( hf . pf [ i ] . ubound > 0 ) { hf . updateOUT ( i ) ; } if ( hf . pf [ i ] . lbound > 0 ) { hf . updateWLB ( i ) ; } if ( hf . wlb . size ( ) >= n ) { omega_star = Math . max ( omega_star , hf . wlb . peek ( ) . lbound ) ; } }
public class PrecompileExceptDef { /** * Sets the base - directory */ public void setDir ( final File dir ) throws BuildException { } }
if ( this . localSet == null ) { this . localSet = new ConditionalFileSet ( ) ; this . owner . appendExceptFileSet ( this . localSet ) ; } this . localSet . setDir ( dir ) ;
public class ComicChatOverlay { /** * Clear chat bubbles , either all of them or just the place - oriented ones . */ protected void clearBubbles ( boolean all ) { } }
for ( Iterator < BubbleGlyph > iter = _bubbles . iterator ( ) ; iter . hasNext ( ) ; ) { ChatGlyph rec = iter . next ( ) ; if ( all || isPlaceOrientedType ( rec . getType ( ) ) ) { _target . abortAnimation ( rec ) ; iter . remove ( ) ; } }
public class FieldMetaData { /** * 获取动态属性值 * @ param value * @ param charset * @ return */ @ SuppressWarnings ( "unchecked" ) private byte [ ] getDynamicFieldByteValue ( Object value , Charset charset ) { } }
switch ( dynamicFieldType ) { // 如果是打包剩余的所有Byte case allRestByte : return BytesUtil . objString2Byte ( ( String ) value , charset ) ; // 如果是文件metadata case metadata : return MetadataMapper . toByte ( ( Set < MetaData > ) value , charset ) ; default : return BytesUtil . objString2Byte ( ( String ) value , charset ) ; }
public class TermMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Term term , ProtocolMarshaller protocolMarshaller ) { } }
if ( term == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( term . getSourceText ( ) , SOURCETEXT_BINDING ) ; protocolMarshaller . marshall ( term . getTargetText ( ) , TARGETTEXT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractResourcePool { /** * { @ inheritDoc } * @ throws IllegalArgumentException { @ inheritDoc } */ @ Override public void validateUpdate ( ResourcePool newPool ) { } }
// Replacement must be of the same ResourceType if ( ! this . getType ( ) . equals ( newPool . getType ( ) ) ) { throw new IllegalArgumentException ( "ResourceType " + newPool . getType ( ) + " can not replace " + this . getType ( ) ) ; } // Replacement must have the same persistence if ( this . isPersistent ( ) != newPool . isPersistent ( ) ) { throw new IllegalArgumentException ( "ResourcePool for " + newPool . getType ( ) + " with isPersistent=" + newPool . isPersistent ( ) + " can not replace isPersistent=" + this . isPersistent ( ) ) ; }
public class DTBuilder { /** * behavior undefined unless normalized . * If you ' re not sure whether it ' s appropriate to use this method , use * < code > toDateValue ( ) . compareTo ( dv ) < / code > instead . */ public int compareTo ( DateValue dv ) { } }
long dvComparable = ( ( ( ( ( long ) dv . year ( ) ) << 4 ) + dv . month ( ) ) << 5 ) + dv . day ( ) ; long dtbComparable = ( ( ( ( long ) year << 4 ) + month << 5 ) ) + day ; if ( dv instanceof TimeValue ) { TimeValue tv = ( TimeValue ) dv ; dvComparable = ( ( ( ( ( dvComparable << 5 ) + tv . hour ( ) ) << 6 ) + tv . minute ( ) ) << 6 ) + tv . second ( ) ; dtbComparable = ( ( ( ( ( dtbComparable << 5 ) + hour ) << 6 ) + minute ) << 6 ) + second ; } long delta = dtbComparable - dvComparable ; return delta < 0 ? - 1 : delta == 0 ? 0 : 1 ;
public class JournalDriverImpl { /** * @ Override * public void setMaxCount ( int maxCount ) * _ maxCount = maxCount ; * @ Override * public void setDelay ( long timeout ) * _ timeout = timeout ; * @ Override * public long getDelay ( ) * return _ timeout ; */ @ Override public JournalAmp open ( String name ) // , int maxCount , long timeout ) { } }
JournalSystem system = JournalSystem . getCurrent ( ) ; /* if ( maxCount < 0 ) { maxCount = _ maxCount ; if ( timeout < 0 ) { timeout = _ timeout ; */ if ( system != null ) { return new JournalImpl ( system . openJournal ( name ) ) ; // , maxCount , timeout ) ; } else { throw new ServiceException ( L . l ( "Journals are not supported on this system." ) ) ; }
public class DefaultDataStore { /** * Internal API used by table DAOs that supports a LimitCounter instead of a long limit . */ @ Override public Iterator < Map < String , Object > > scan ( String tableName , @ Nullable String fromKeyExclusive , LimitCounter limit , ReadConsistency consistency ) { } }
return scan ( tableName , fromKeyExclusive , limit , false , consistency ) ;
public class AbstractTraceFactory { /** * ( non - Javadoc ) * @ see com . ibm . ws . objectManager . utils . TraceFactory # getTrace ( java . lang . Class , java . lang . String ) */ public Trace getTrace ( Class sourceClass , String traceGroup ) { } }
synchronized ( activeTrace ) { TraceImpl traceImpl = new TraceImpl ( sourceClass , traceGroup , this ) ; activeTrace . put ( sourceClass , traceImpl ) ; try { applyActiveTrace ( ) ; } catch ( java . io . IOException exception ) { System . out . println ( cclass + ":getTrace() exception" + exception ) ; exception . printStackTrace ( ) ; } // try . return traceImpl ; } // synchronized ( activeTrace ) .
public class ExampleUtils { /** * Tears down external resources that can be deleted upon the example ' s completion . */ private void tearDown ( ) { } }
pendingMessages . add ( "*************************Tear Down*************************" ) ; ExamplePubsubTopicAndSubscriptionOptions pubsubOptions = options . as ( ExamplePubsubTopicAndSubscriptionOptions . class ) ; if ( ! pubsubOptions . getPubsubTopic ( ) . isEmpty ( ) ) { try { deletePubsubTopic ( pubsubOptions . getPubsubTopic ( ) ) ; pendingMessages . add ( "The Pub/Sub topic has been deleted: " + pubsubOptions . getPubsubTopic ( ) ) ; } catch ( IOException e ) { pendingMessages . add ( "Failed to delete the Pub/Sub topic : " + pubsubOptions . getPubsubTopic ( ) ) ; } if ( ! pubsubOptions . getPubsubSubscription ( ) . isEmpty ( ) ) { try { deletePubsubSubscription ( pubsubOptions . getPubsubSubscription ( ) ) ; pendingMessages . add ( "The Pub/Sub subscription has been deleted: " + pubsubOptions . getPubsubSubscription ( ) ) ; } catch ( IOException e ) { pendingMessages . add ( "Failed to delete the Pub/Sub subscription : " + pubsubOptions . getPubsubSubscription ( ) ) ; } } } ExampleBigQueryTableOptions bigQueryTableOptions = options . as ( ExampleBigQueryTableOptions . class ) ; if ( bigQueryTableOptions . getBigQueryDataset ( ) != null && bigQueryTableOptions . getBigQueryTable ( ) != null && bigQueryTableOptions . getBigQuerySchema ( ) != null ) { pendingMessages . add ( "The BigQuery table might contain the example's output, " + "and it is not deleted automatically: " + bigQueryTableOptions . getProject ( ) + ":" + bigQueryTableOptions . getBigQueryDataset ( ) + "." + bigQueryTableOptions . getBigQueryTable ( ) ) ; pendingMessages . add ( "Please go to the Developers Console to delete it manually." + " Otherwise, you may be charged for its usage." ) ; }
public class EpicsApi { /** * Gets all issues that are assigned to an epic and the authenticated user has access to * using the specified page and per page setting . * < pre > < code > GitLab Endpoint : GET / groups / : id / epics / : epic _ iid / issues < / code > < / pre > * @ param groupIdOrPath the group ID , path of the group , or a Group instance holding the group ID or path * @ param epicIid the IID of the epic to get issues for * @ param page the page to get * @ param perPage the number of issues per page * @ return a list of all epic issues belonging to the specified epic in the specified range * @ throws GitLabApiException if any exception occurs */ public List < Epic > getEpicIssues ( Object groupIdOrPath , Integer epicIid , int page , int perPage ) throws GitLabApiException { } }
Response response = get ( Response . Status . OK , getPageQueryParams ( page , perPage ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "epics" , epicIid , "issues" ) ; return ( response . readEntity ( new GenericType < List < Epic > > ( ) { } ) ) ;
public class Matth { /** * Compute the inverse hyperbolic tangent of a number . * @ param a number on which evaluation is done * @ return inverse hyperbolic tangent of a */ public static double atanh ( double a ) { } }
boolean negative = false ; if ( a < 0 ) { negative = true ; a = - a ; } double absAtanh ; if ( a > 0.15 ) { absAtanh = 0.5 * Math . log ( ( 1 + a ) / ( 1 - a ) ) ; } else { final double a2 = a * a ; if ( a > 0.087 ) { absAtanh = a * ( 1 + a2 * ( F_1_3 + a2 * ( F_1_5 + a2 * ( F_1_7 + a2 * ( F_1_9 + a2 * ( F_1_11 + a2 * ( F_1_13 + a2 * ( F_1_15 + a2 * F_1_17 ) ) ) ) ) ) ) ) ; } else if ( a > 0.031 ) { absAtanh = a * ( 1 + a2 * ( F_1_3 + a2 * ( F_1_5 + a2 * ( F_1_7 + a2 * ( F_1_9 + a2 * ( F_1_11 + a2 * F_1_13 ) ) ) ) ) ) ; } else if ( a > 0.003 ) { absAtanh = a * ( 1 + a2 * ( F_1_3 + a2 * ( F_1_5 + a2 * ( F_1_7 + a2 * F_1_9 ) ) ) ) ; } else { absAtanh = a * ( 1 + a2 * ( F_1_3 + a2 * F_1_5 ) ) ; } } return negative ? - absAtanh : absAtanh ;
public class device_profile { /** * < pre > * Use this operation to modify device profile . * < / pre > */ public static device_profile update ( nitro_service client , device_profile resource ) throws Exception { } }
resource . validate ( "modify" ) ; return ( ( device_profile [ ] ) resource . update_resource ( client ) ) [ 0 ] ;
public class ByteBuddy { /** * Creates a new configuration where any { @ link MethodDescription } that matches the provided method matcher is excluded * from instrumentation . Any previous matcher for ignored methods is replaced . By default , Byte Buddy ignores any * synthetic method ( bridge methods are handled automatically ) and the { @ link Object # finalize ( ) } method . Using a latent * matcher gives opportunity to resolve an { @ link ElementMatcher } based on the instrumented type before applying the matcher . * @ param ignoredMethods A matcher for identifying methods to be excluded from instrumentation . * @ return A new Byte Buddy instance that excludes any method from instrumentation if it is matched by the supplied matcher . */ public ByteBuddy ignore ( LatentMatcher < ? super MethodDescription > ignoredMethods ) { } }
return new ByteBuddy ( classFileVersion , namingStrategy , auxiliaryTypeNamingStrategy , annotationValueFilterFactory , annotationRetention , implementationContextFactory , methodGraphCompiler , instrumentedTypeFactory , typeValidation , visibilityBridgeStrategy , classWriterStrategy , ignoredMethods ) ;
public class WriterPoolMember { /** * Generate a new basename by interpolating values in the configured * template . Values come from local state , other configured values , and * global system properties . The recommended default template will * generate a unique basename under reasonable assumptions . */ protected void generateNewBasename ( ) { } }
Properties localProps = new Properties ( ) ; localProps . setProperty ( "prefix" , settings . getPrefix ( ) ) ; synchronized ( this . getClass ( ) ) { // ensure that serialNo and timestamp are minted together ( never inverted sort order ) String paddedSerialNumber = WriterPoolMember . serialNoFormatter . format ( serialNo . getAndIncrement ( ) ) ; String timestamp17 = ArchiveUtils . getUnique17DigitDate ( ) ; String timestamp14 = ArchiveUtils . getUnique14DigitDate ( ) ; currentTimestamp = timestamp17 ; localProps . setProperty ( "serialno" , paddedSerialNumber ) ; localProps . setProperty ( "timestamp17" , timestamp17 ) ; localProps . setProperty ( "timestamp14" , timestamp14 ) ; } currentBasename = PropertyUtils . interpolateWithProperties ( settings . getTemplate ( ) , localProps , System . getProperties ( ) ) ;
public class VorbisFile { /** * clear out the OggVorbis _ File struct */ int clear ( ) { } }
vb . clear ( ) ; vd . clear ( ) ; os . clear ( ) ; if ( vi != null && links != 0 ) { for ( int i = 0 ; i < links ; i ++ ) { vi [ i ] . clear ( ) ; vc [ i ] . clear ( ) ; } vi = null ; vc = null ; } if ( dataoffsets != null ) { dataoffsets = null ; } if ( pcmlengths != null ) { pcmlengths = null ; } if ( serialnos != null ) { serialnos = null ; } if ( offsets != null ) { offsets = null ; } oy . clear ( ) ; return ( 0 ) ;
public class IpRuleItemMarshaller { /** * Marshall the given parameter object . */ public void marshall ( IpRuleItem ipRuleItem , ProtocolMarshaller protocolMarshaller ) { } }
if ( ipRuleItem == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( ipRuleItem . getIpRule ( ) , IPRULE_BINDING ) ; protocolMarshaller . marshall ( ipRuleItem . getRuleDesc ( ) , RULEDESC_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AbstractClientOptionsBuilder { /** * Sets the specified HTTP header . */ public B setHttpHeader ( CharSequence name , Object value ) { } }
requireNonNull ( name , "name" ) ; requireNonNull ( value , "value" ) ; httpHeaders . setObject ( HttpHeaderNames . of ( name ) , value ) ; return self ( ) ;
public class RiakMap { /** * Returns a RiakCounter to which the specified key is mapped , or { @ literal null } * if no RiakCounter is present . * @ param key key whose associated RiakCounter is to be returned . * @ return a RiakCounter , or null if one is not present . */ public RiakCounter getCounter ( BinaryValue key ) { } }
if ( entries . containsKey ( key ) ) { for ( RiakDatatype dt : entries . get ( key ) ) { if ( dt . isCounter ( ) ) { return dt . getAsCounter ( ) ; } } } return null ;
public class SummernoteImageUploadEvent { /** * Fires a summernote image upload event on all registered handlers in the * handler manager . If no such handlers exist , this method will do nothing . * @ param source the source of the handlers */ public static void fire ( final HasSummernoteImageUploadHandlers source , JsArray < ImageFile > images ) { } }
if ( TYPE != null ) { SummernoteImageUploadEvent event = new SummernoteImageUploadEvent ( images ) ; source . fireEvent ( event ) ; }
public class AbstractSphere3F { /** * { @ inheritDoc } */ @ Pure @ Override public double distanceSquared ( Point3D p ) { } }
double d = FunctionalPoint3D . distanceSquaredPointPoint ( getX ( ) , getY ( ) , getZ ( ) , p . getX ( ) , p . getY ( ) , p . getZ ( ) ) - getRadius ( ) ; return MathUtil . max ( 0. , d ) ;
public class RegistriesInner { /** * Creates a container registry with the specified parameters . * @ param resourceGroupName The name of the resource group to which the container registry belongs . * @ param registryName The name of the container registry . * @ param registry The parameters for creating a container registry . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the RegistryInner object if successful . */ public RegistryInner create ( String resourceGroupName , String registryName , RegistryInner registry ) { } }
return createWithServiceResponseAsync ( resourceGroupName , registryName , registry ) . toBlocking ( ) . last ( ) . body ( ) ;
public class MonitorHolder { /** * Perform the scan on monitored resources . This method checks the * destroy flag before the scan begins , and after the scan completes , * to ensure that any scheduled destruction is carried out as soon as * reasonable . * This is not a trivial method - - but we use the trival annotation to prevent * entry / exit trace for every invocation . */ @ Trivial @ FFDCIgnore ( InterruptedException . class ) void scheduledScan ( ) { } }
// Don ' t perform a scheduled scan if this monitor holder is paused if ( isStopped ) return ; // 152229 : Changed this code to get the monitor type locally . That is , now we save the monitor type in the constructor . // We used to get the monitor type here by monitorRef . getProperty ( FileMonitor . MONITOR _ TYPE ) ) . That caused a // ConcurrentModificationException because of interference from the JMocked FileMonitor in the unit test code . // Don ' t do anything if this is an external monitor if ( FileMonitor . MONITOR_TYPE_EXTERNAL . equals ( monitorRef . getProperty ( FileMonitor . MONITOR_TYPE ) ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "scheduledScan - RETURN early - external monitor" ) ; } return ; } // Don ' t do anything if the framework is stopping . Allow normal component cleanup // to deactivate / clean up the scheduled tasks , but make this a no - op if the // server is shutting down . if ( FrameworkState . isStopping ( ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "scheduledScan - RETURN early - framework stopping" ) ; } return ; } // Don ' t do anything unless we can set scanInProgress to true // Use this to prevent scanning while a scan is in progress . Monitor notification must happen // outside of the lock to prevent deadlocks . if ( ! scanInProgress . compareAndSet ( false , true ) ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "scheduledScan - RETURN early - already scan in progress?" ) ; } return ; } try { Set < File > created = new HashSet < File > ( ) ; Set < File > deleted = new HashSet < File > ( ) ; Set < File > modified = new HashSet < File > ( ) ; // Try to obtain the scan lock - - this might fail if the monitor configuration is being updated if ( scanLock . tryLock ( ) ) { try { // Always try destroy when we obtain the lock : it will return true if this is in destroy or destroyed state // Also ( after we have tried doDestroy ) ensure that we are in active state if ( ! doDestroy ( ) && ( monitorState . get ( ) == MonitorState . ACTIVE . ordinal ( ) ) ) { if ( coreService . isDetailedScanTraceEnabled ( ) && TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "File monitor scan: begin" , updateMonitors ) ; } scanForUpdates ( created , deleted , modified ) ; if ( ! created . isEmpty ( ) || ! modified . isEmpty ( ) || ! deleted . isEmpty ( ) ) { // Check again , make sure there have been no further changes since the scan we just // ran ( we don ' t want to read the files until any updates are complete , files may be // in process of being copied ) . // what seems to be the vogue is to do this check to make sure nothing moved twice . // i . e . keep the re - check interval at 100ms , but require two clean go - rounds before // delivering the all clear . boolean oneClean = false ; boolean twoClean = false ; List < File > createdCheck = new ArrayList < File > ( ) ; List < File > deletedCheck = new ArrayList < File > ( ) ; List < File > modifiedCheck = new ArrayList < File > ( ) ; do { // Wait for 100 ms before checking again to give files time to finish // copying if they are mid copy . Note this may not work for copying // large files via programs like FTP where the copy may pause or // if an OS creates the file and sets the size / last modified before // the copy completes , but it should fix it for smaller files or for the // test environment where some files are streamed over rather than copied . try { // Only used once and not sure it needs to be configurable so didn ' t create a // constant for the delay period . Thread . sleep ( TIME_TO_WAIT_FOR_COPY_TO_COMPLETE ) ; } catch ( InterruptedException ex ) { } // Clear the lists , want a clean set rather than appending to existing to check // if this loop is " update free " . Do not clear the deletedCreatedCheck or // deletedModifiedCheck as these need to track status over multiple loops . createdCheck . clear ( ) ; deletedCheck . clear ( ) ; modifiedCheck . clear ( ) ; scanForUpdates ( createdCheck , deletedCheck , modifiedCheck ) ; resolveChangesForScheduledScan ( created , deleted , modified , createdCheck , deletedCheck , modifiedCheck ) ; if ( createdCheck . isEmpty ( ) && modifiedCheck . isEmpty ( ) && deletedCheck . isEmpty ( ) ) { // This run was clean - - hooray ! if ( oneClean ) { twoClean = true ; // < - - loop exit condition } else { oneClean = true ; // < - - hopefully only one more time through } } else { oneClean = false ; // bummer . } // Keep going until we have two 100ms intervals with no changes // ( AND the runtime / framework is still happy ) } while ( ! twoClean && FrameworkState . isValid ( ) ) ; } } } catch ( RuntimeException e ) { // TODO : MUST CATCH exceptions here ( to at least get FFDC ) . . . ick } finally { try { doDestroy ( ) ; // always attempt destroy while we hold the lock } finally { scanLock . unlock ( ) ; } } if ( ! created . isEmpty ( ) || ! modified . isEmpty ( ) || ! deleted . isEmpty ( ) ) { // changes were discovered : trace & call the registered file monitor if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "File monitor scan: end; resources changed" , created . size ( ) + " created" , modified . size ( ) + " modified" , deleted . size ( ) + " deleted" , "running=" + FrameworkState . isValid ( ) ) ; } // Even if we do get into a scan , make sure the framework is still good before we // push the notification of updates - - Avoid propagating change notification // while components that might react to them are being shut down if ( FrameworkState . isValid ( ) ) { try { monitor . onChange ( created , modified , deleted ) ; // If the monitor handled the call cleanly , reset our exception count exceptionCount = 0 ; } catch ( RuntimeException e ) { // FFDC instrumentation will go here // Catch the exception so it doesn ' t kill the whole scheduler exceptionCount ++ ; Tr . warning ( tc , "fileMonitorException" , created , modified , deleted , monitor . getClass ( ) , e . getLocalizedMessage ( ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "scheduledScan - exceptionCount=" + exceptionCount ) ; } // If the monitor has thrown exceptions a few times in a row abandon // monitoring for it if ( exceptionCount >= NUMBER_OF_EXCEPTIONS_BEFORE_DISABLING_MONITOR ) { Tr . warning ( tc , "fileMonitorDisabled" , NUMBER_OF_EXCEPTIONS_BEFORE_DISABLING_MONITOR , monitor . getClass ( ) ) ; // Reset the exceptionCount just in case we get re - enabled by outside forces for some unknown reason exceptionCount = 0 ; destroy ( ) ; } } } else { // no framework , we should try to cleanup . stop ( ) ; } } else if ( coreService . isDetailedScanTraceEnabled ( ) && TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { // If super detailed trace is enabled , we trace the begin / end of all file scans Tr . debug ( this , tc , "File monitor scan: end; no changes" ) ; } } // end if tryLock } finally { scanInProgress . set ( false ) ; }
public class NetworkSadnessTransformer { /** * http : / / diveintodata . org / 2009/09 / zipf - distribution - generator - in - java / */ public int nextZipfDelay ( ) { } }
int value ; double friquency = 0 ; double dice ; value = rand . nextInt ( size ) ; friquency = ( 1.0d / Math . pow ( value , this . skew ) ) / this . bottom ; dice = rand . nextDouble ( ) ; while ( ! ( dice < friquency ) ) { value = rand . nextInt ( size ) ; friquency = ( 1.0d / Math . pow ( value , this . skew ) ) / this . bottom ; dice = rand . nextDouble ( ) ; } return value ;
public class DescribeFolderContentsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeFolderContentsRequest describeFolderContentsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeFolderContentsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeFolderContentsRequest . getAuthenticationToken ( ) , AUTHENTICATIONTOKEN_BINDING ) ; protocolMarshaller . marshall ( describeFolderContentsRequest . getFolderId ( ) , FOLDERID_BINDING ) ; protocolMarshaller . marshall ( describeFolderContentsRequest . getSort ( ) , SORT_BINDING ) ; protocolMarshaller . marshall ( describeFolderContentsRequest . getOrder ( ) , ORDER_BINDING ) ; protocolMarshaller . marshall ( describeFolderContentsRequest . getLimit ( ) , LIMIT_BINDING ) ; protocolMarshaller . marshall ( describeFolderContentsRequest . getMarker ( ) , MARKER_BINDING ) ; protocolMarshaller . marshall ( describeFolderContentsRequest . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( describeFolderContentsRequest . getInclude ( ) , INCLUDE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PropertyAdapter { /** * Retrieves the exclusiveMaximum value of a property * @ return the exclusiveMaximum value of the property */ public boolean getExclusiveMax ( ) { } }
if ( property instanceof AbstractNumericProperty ) { AbstractNumericProperty numericProperty = ( AbstractNumericProperty ) property ; return BooleanUtils . isTrue ( ( numericProperty . getExclusiveMaximum ( ) ) ) ; } return false ;
public class GinBridgeClassLoader { /** * @ inheritDoc * Gin class loading implementation , making sure that classes are loaded consistently and can be * GWT generated or super - source classes . See description { @ link GinBridgeClassLoader above } . */ @ Override protected Class < ? > loadClass ( String name , boolean resolve ) throws ClassNotFoundException { } }
Class < ? > clazz = findLoadedClass ( name ) ; if ( clazz == null ) { if ( inExceptedPackage ( name ) ) { clazz = super . loadClass ( name , false ) ; } else { try { clazz = findClass ( name ) ; } catch ( ClassNotFoundException e ) { clazz = super . loadClass ( name , false ) ; if ( ! clazz . isAnnotation ( ) ) { // Annotations are always safe to load logger . log ( Type . WARN , String . format ( "Class %s is used in Gin, but not available in GWT client code." , name ) ) ; } } } } if ( resolve ) { resolveClass ( clazz ) ; } return clazz ;
public class JobLauncherUtils { /** * Cleanup the staging data for a list of Gobblin tasks . This method calls the * { @ link # cleanTaskStagingData ( State , Logger ) } method . * @ param states a { @ link List } of { @ link State } s that need their staging data cleaned */ public static void cleanStagingData ( List < ? extends State > states , Logger logger ) throws IOException { } }
for ( State state : states ) { JobLauncherUtils . cleanTaskStagingData ( state , logger ) ; }
public class MoreLikeThis { /** * Return a query that will return docs like the passed lucene document ID . * @ param docNum the documentID of the lucene doc to generate the ' More Like This " query for . * @ return a query that will return docs like the passed lucene document ID . */ public Query like ( int docNum ) throws IOException { } }
if ( fieldNames == null ) { setFieldNames ( ) ; } return createQuery ( retrieveTerms ( docNum ) ) ;
public class Rasterlite2Coverage { /** * Extract an image from the database . * @ param geom the image bounding box geometry . * @ param width the pixel width of the expected image . * @ param height the pixel height of the expected image . * @ return the image bytes . * @ throws Exception */ public byte [ ] getRL2Image ( Geometry geom , String geomEpsg , int width , int height ) throws Exception { } }
String sql ; String rasterName = getName ( ) ; if ( geomEpsg != null ) { sql = "select GetMapImageFromRaster('" + rasterName + "', ST_Transform(ST_GeomFromText('" + geom . toText ( ) + "', " + geomEpsg + "), " + srid + ") , " + width + " , " + height + ", 'default', 'image/png', '#ffffff', 0, 80, 1 )" ; } else { sql = "select GetMapImageFromRaster('" + rasterName + "', ST_GeomFromText('" + geom . toText ( ) + "') , " + width + " , " + height + ", 'default', 'image/png', '#ffffff', 0, 80, 1 )" ; } return database . execOnConnection ( mConn -> { try ( IHMStatement stmt = mConn . createStatement ( ) ) { IHMResultSet resultSet = stmt . executeQuery ( sql ) ; if ( resultSet . next ( ) ) { byte [ ] bytes = resultSet . getBytes ( 1 ) ; return bytes ; } } return null ; } ) ;
public class MutableArray { /** * Inserts a Number object at the given index . * @ param index the index . This value must not exceed the bounds of the array . * @ param value the Number object * @ return The self object */ @ NonNull @ Override public MutableArray insertNumber ( int index , Number value ) { } }
return insertValue ( index , value ) ;
public class Gradient { /** * Set the values of all the knots . * This version does not require the " extra " knots at - 1 and 256 * @ param x the knot positions * @ param rgb the knot colors * @ param types the knot types */ public void setKnots ( int [ ] x , int [ ] rgb , byte [ ] types ) { } }
numKnots = rgb . length + 2 ; xKnots = new int [ numKnots ] ; yKnots = new int [ numKnots ] ; knotTypes = new byte [ numKnots ] ; if ( x != null ) System . arraycopy ( x , 0 , xKnots , 1 , numKnots - 2 ) ; else for ( int i = 1 ; i > numKnots - 1 ; i ++ ) xKnots [ i ] = 255 * i / ( numKnots - 2 ) ; System . arraycopy ( rgb , 0 , yKnots , 1 , numKnots - 2 ) ; if ( types != null ) System . arraycopy ( types , 0 , knotTypes , 1 , numKnots - 2 ) ; else for ( int i = 0 ; i > numKnots ; i ++ ) knotTypes [ i ] = RGB | SPLINE ; sortKnots ( ) ; rebuildGradient ( ) ;
public class Block { /** * Checks the block contents * @ param height block height , if known , or - 1 otherwise . If valid , used * to validate the coinbase input script of v2 and above blocks . * @ param flags flags to indicate which tests should be applied ( i . e . * whether to test for height in the coinbase transaction ) . * @ throws VerificationException if there was an error verifying the block . */ public void verifyTransactions ( final int height , final EnumSet < VerifyFlag > flags ) throws VerificationException { } }
// Now we need to check that the body of the block actually matches the headers . The network won ' t generate // an invalid block , but if we didn ' t validate this then an untrusted man - in - the - middle could obtain the next // valid block from the network and simply replace the transactions in it with their own fictional // transactions that reference spent or non - existent inputs . if ( transactions . isEmpty ( ) ) throw new VerificationException ( "Block had no transactions" ) ; if ( this . getOptimalEncodingMessageSize ( ) > MAX_BLOCK_SIZE ) throw new VerificationException ( "Block larger than MAX_BLOCK_SIZE" ) ; checkTransactions ( height , flags ) ; checkMerkleRoot ( ) ; checkSigOps ( ) ; for ( Transaction transaction : transactions ) transaction . verify ( ) ;
public class AbstractStreamExporterImpl { /** * Obtains an { @ link OutputStream } to the provided { @ link File } . * @ param target * @ param overwrite * Whether we may overwrite an existing file * @ return * @ throws FileExistsException * If the specified file exists and the overwrite flag is false * @ throws IllegalArgumentException * If the file target is not specified */ protected final OutputStream getOutputStreamToFile ( final File target , final boolean overwrite ) throws FileExistsException { } }
// Precondition checks if ( target == null ) { throw new IllegalArgumentException ( "Target file must be specified" ) ; } if ( target . isDirectory ( ) ) { throw new IllegalArgumentException ( "Cannot export a stream file to existing directory: " + target . getAbsolutePath ( ) ) ; } // If target exists and we ' re not allowed to overwrite it if ( target . exists ( ) && ! overwrite ) { throw new FileExistsException ( "Target exists and we haven't been flagged to overwrite it: " + target . getAbsolutePath ( ) ) ; } // Get Stream final OutputStream out ; try { out = new FileOutputStream ( target ) ; } catch ( final FileNotFoundException e ) { throw new ArchiveExportException ( "File could not be created: " + target ) ; } // Return return out ;
public class AbstractSecretColumnSqlFilter { /** * { @ inheritDoc } * パラメータが暗号化対象のパラメータ名と一致する場合 、 パラメータの値を暗号化する * @ see jp . co . future . uroborosql . filter . AbstractSqlFilter # doParameter ( jp . co . future . uroborosql . parameter . Parameter ) */ @ Override public Parameter doParameter ( final Parameter parameter ) { } }
if ( skipFilter || parameter == null ) { return parameter ; } if ( Parameter . class . equals ( parameter . getClass ( ) ) ) { // 通常のパラメータの場合 String key = parameter . getParameterName ( ) ; if ( getCryptParamKeys ( ) . contains ( CaseFormat . CAMEL_CASE . convert ( key ) ) ) { Object obj = parameter . getValue ( ) ; if ( obj != null && obj instanceof String ) { String objStr = obj . toString ( ) ; if ( StringUtils . isNotEmpty ( objStr ) ) { try { synchronized ( encryptCipher ) { return new Parameter ( key , encrypt ( encryptCipher , secretKey , objStr ) ) ; } } catch ( Exception ex ) { return parameter ; } } } } } return parameter ;
public class ReactiveMongoOperationsSessionRepository { /** * Deletes the { @ link MongoSession } with the given { @ link MongoSession # getId ( ) } or does nothing if the * { @ link MongoSession } is not found . * @ param id the { @ link MongoSession # getId ( ) } to delete */ @ Override public Mono < Void > deleteById ( String id ) { } }
return findSession ( id ) . flatMap ( document -> this . mongoOperations . remove ( document , this . collectionName ) . then ( Mono . just ( document ) ) ) . map ( document -> convertToSession ( this . mongoSessionConverter , document ) ) . doOnSuccess ( mongoSession -> publishEvent ( new SessionDeletedEvent ( this , mongoSession ) ) ) . then ( ) ;
public class MainActivity { /** * Request to read and write external storage permissions . */ private void requestPermissionForInstallPackage ( ) { } }
AndPermission . with ( this ) . runtime ( ) . permission ( Permission . Group . STORAGE ) . rationale ( new RuntimeRationale ( ) ) . onGranted ( new Action < List < String > > ( ) { @ Override public void onAction ( List < String > data ) { new WriteApkTask ( MainActivity . this , new Runnable ( ) { @ Override public void run ( ) { installPackage ( ) ; } } ) . execute ( ) ; } } ) . onDenied ( new Action < List < String > > ( ) { @ Override public void onAction ( List < String > data ) { toast ( R . string . message_install_failed ) ; } } ) . start ( ) ;
public class UcsApi { /** * Delete an existing contact * @ param id id of the Contact ( required ) * @ param deletelData Request parameters . ( optional ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > deleteContactWithHttpInfo ( String id , DeletelData deletelData ) throws ApiException { } }
com . squareup . okhttp . Call call = deleteContactValidateBeforeCall ( id , deletelData , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class SquareCrossClustersIntoGrids { /** * Finds a seed with 1 or 2 edges . */ static SquareNode findSeedNode ( List < SquareNode > cluster ) { } }
SquareNode seed = null ; for ( int i = 0 ; i < cluster . size ( ) ; i ++ ) { SquareNode n = cluster . get ( i ) ; int numConnections = n . getNumberOfConnections ( ) ; if ( numConnections == 0 || numConnections > 2 ) continue ; seed = n ; break ; } return seed ;
public class AbstractRefreshableDataStore { /** * Triggers the update of the < code > DataStore < / code > . When this action is executed , the current data URL will be * read in and the < code > DataReader < / code > parses and builds a new Data instance . Finally , the currently set * < code > Data < / code > reference will be replaced by the new one . * < b > Attention < / b > : This method is implemented as background task . You can not assume that you immediately get an * updated data store . */ @ Override public void refresh ( ) { } }
try { updateOperation . run ( ) ; } catch ( final CanNotOpenStreamException e ) { LOG . warn ( String . format ( MSG_URL_NOT_READABLE , e . getLocalizedMessage ( ) ) ) ; } catch ( final IllegalArgumentException e ) { LOG . warn ( MSG_FAULTY_CONTENT + " " + e . getLocalizedMessage ( ) ) ; } catch ( final RuntimeException e ) { LOG . warn ( MSG_FAULTY_CONTENT , e ) ; }
public class TelegramBot { /** * Use this method to start the update thread which will begin retrieving messages from the API and firing the * relevant events for you to process the data * @ param getPreviousUpdates Whether you want to retrieve any updates that haven ' t been processed before , but were * created prior to calling the startUpdates method * @ return True if the updater was started , otherwise False */ public boolean startUpdates ( boolean getPreviousUpdates ) { } }
if ( updateManager == null ) updateManager = new RequestUpdatesManager ( this , getPreviousUpdates ) ; if ( ! updateManager . isRunning ( ) ) { updateManager . startUpdates ( ) ; return true ; } return false ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcIndexedTriangleTextureMap ( ) { } }
if ( ifcIndexedTriangleTextureMapEClass == null ) { ifcIndexedTriangleTextureMapEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 323 ) ; } return ifcIndexedTriangleTextureMapEClass ;
public class DiskCacheSizeInfo { /** * Call this method to check and add volume . If current disk cache size in GB plus request of new volume is over limit , * it will return false . * @ param type - type of volume ( TYPE _ CACHE _ DATA , TYPE _ DEPENDENCY _ ID _ DATA , TYPE _ TEMPLATE _ DATA ) * @ param vol - volume * @ return boolean false means no more volume can be added . */ public synchronized boolean checkAddVolume ( int type , int vol ) { } }
final String methodName = "checkAddVolume()" ; if ( this . diskCacheSizeInGBLimit > 0 ) { boolean bCalculateHighAndLow = false ; int minGB = 0 ; switch ( type ) { case TYPE_CACHE_DATA : if ( this . currentDataGB >= ( vol + 1 ) ) { return true ; } if ( this . allowOverflow ) { traceDebug ( methodName , "cacheName=" + this . cacheName + " allow overflow for data file vol=" + vol ) ; return true ; } minGB = this . currentDataGB + this . currentDependencyIdGB + this . currentTemplateGB + 1 ; if ( this . diskCacheSizeInGBLimit >= minGB ) { this . currentDataGB ++ ; } else { traceDebug ( methodName , "data over limit cacheName=" + this . cacheName + " add type=" + type + " diskCacheSizeInGBLimit=" + this . diskCacheSizeInGBLimit + " currentDataGB=" + this . currentDataGB + " currentDependencyIdGB=" + this . currentDependencyIdGB + " currentTemplateGB=" + this . currentTemplateGB ) ; return false ; } break ; case TYPE_DEPENDENCY_ID_DATA : if ( this . currentDependencyIdGB >= ( vol + 1 ) ) { return true ; } if ( this . allowOverflow ) { traceDebug ( methodName , "cacheName=" + this . cacheName + " allow overflow for dependency file id vol=" + vol ) ; return true ; } minGB = this . currentDataGB + this . currentDependencyIdGB + this . currentTemplateGB + 1 ; if ( this . diskCacheSizeInGBLimit >= minGB ) { this . currentDependencyIdGB ++ ; bCalculateHighAndLow = true ; } else { traceDebug ( methodName , "depId over limit cacheName=" + this . cacheName + " add type=" + type + " diskCacheSizeInGBLimit=" + this . diskCacheSizeInGBLimit + " currentDataGB=" + this . currentDataGB + " currentDependencyIdGB=" + this . currentDependencyIdGB + " currentTemplateGB=" + this . currentTemplateGB ) ; return false ; } break ; case TYPE_TEMPLATE_DATA : if ( this . currentTemplateGB >= ( vol + 1 ) ) { return true ; } if ( this . allowOverflow ) { traceDebug ( methodName , "cacheName=" + this . cacheName + " allow overflow for template file vol=" + vol ) ; return true ; } minGB = this . currentDataGB + this . currentDependencyIdGB + this . currentTemplateGB + 1 ; if ( this . diskCacheSizeInGBLimit >= minGB ) { this . currentTemplateGB ++ ; bCalculateHighAndLow = true ; } else { traceDebug ( methodName , "template over limit cacheName=" + this . cacheName + " add type=" + type + " diskCacheSizeInGBLimit=" + this . diskCacheSizeInGBLimit + " currentDataGB=" + this . currentDataGB + " currentDependencyIdGB=" + this . currentDependencyIdGB + " currentTemplateGB=" + this . currentTemplateGB ) ; return false ; } break ; } if ( bCalculateHighAndLow ) { this . diskCacheSizeInBytesLimit = ( diskCacheSizeInGBLimit - this . currentDependencyIdGB - this . currentTemplateGB ) * GB_SIZE ; this . diskCacheSizeInBytesHighLimit = ( this . diskCacheSizeInBytesLimit * ( long ) this . highThreshold ) / 100l ; this . diskCacheSizeInBytesLowLimit = ( this . diskCacheSizeInBytesLimit * ( long ) this . lowThreshold ) / 100l ; traceDebug ( methodName , "new limit: cacheName=" + this . cacheName + " add type=" + type + " diskCacheSizeInBytesLimit=" + this . diskCacheSizeInBytesLimit + " diskCacheSizeInBytesHighLimit=" + this . diskCacheSizeInBytesHighLimit + " diskCacheSizeInBytesLowLimit=" + this . diskCacheSizeInBytesLowLimit ) ; } traceDebug ( methodName , "cacheName=" + this . cacheName + " add volume=" + vol + " type=" + type + " currentDataGB=" + this . currentDataGB + " currentDependencyIdGB=" + this . currentDependencyIdGB + " currentTemplateGB=" + this . currentTemplateGB ) ; } return true ;
public class DataTableInterceptor { /** * 获取 dao 返回类型是否是DataTable * @ param resultSetHandler * @ return */ protected boolean isDataTableResultType ( DefaultResultSetHandler resultSetHandler ) { } }
MappedStatement mappedStatement = reflect ( resultSetHandler ) ; List < ResultMap > res = mappedStatement . getResultMaps ( ) ; if ( res . size ( ) == 1 && res . get ( 0 ) . getType ( ) . equals ( DataTable . class ) ) { return true ; } return false ;
public class GraphHopperGeocoding { /** * Perform a geocoding request . Both forward and revers are possible , just configure the < code > request < / code > * accordingly . * @ param request the request to send to the API * @ return found results for your request */ public GHGeocodingResponse geocode ( GHGeocodingRequest request ) { } }
String url = buildUrl ( request ) ; try { Request okRequest = new Request . Builder ( ) . url ( url ) . build ( ) ; ResponseBody rspBody = getClientForRequest ( request ) . newCall ( okRequest ) . execute ( ) . body ( ) ; return objectMapper . readValue ( rspBody . bytes ( ) , GHGeocodingResponse . class ) ; } catch ( Exception ex ) { throw new RuntimeException ( "Problem performing geocoding for " + url + ": " + ex . getMessage ( ) , ex ) ; }
public class AmazonSimpleEmailServiceClient { /** * Enables or disables email sending for messages sent using a specific configuration set in a given AWS Region . You * can use this operation in conjunction with Amazon CloudWatch alarms to temporarily pause email sending for a * configuration set when the reputation metrics for that configuration set ( such as your bounce on complaint rate ) * exceed certain thresholds . * You can execute this operation no more than once per second . * @ param updateConfigurationSetSendingEnabledRequest * Represents a request to enable or disable the email sending capabilities for a specific configuration set . * @ return Result of the UpdateConfigurationSetSendingEnabled operation returned by the service . * @ throws ConfigurationSetDoesNotExistException * Indicates that the configuration set does not exist . * @ sample AmazonSimpleEmailService . UpdateConfigurationSetSendingEnabled * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / email - 2010-12-01 / UpdateConfigurationSetSendingEnabled " * target = " _ top " > AWS API Documentation < / a > */ @ Override public UpdateConfigurationSetSendingEnabledResult updateConfigurationSetSendingEnabled ( UpdateConfigurationSetSendingEnabledRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeUpdateConfigurationSetSendingEnabled ( request ) ;
public class CmsADEManager { /** * Gets the detail page for a content element . < p > * @ param cms the CMS context * @ param pageRootPath the element ' s root path * @ param originPath the path in which the the detail page is being requested * @ param targetDetailPage the target detail page to use * @ return the detail page for the content element */ public String getDetailPage ( CmsObject cms , String pageRootPath , String originPath , String targetDetailPage ) { } }
boolean online = isOnline ( cms ) ; String resType = getCacheState ( online ) . getParentFolderType ( pageRootPath ) ; if ( resType == null ) { return null ; } if ( ( targetDetailPage != null ) && getDetailPages ( cms , resType ) . contains ( targetDetailPage ) ) { return targetDetailPage ; } String originRootPath = cms . getRequestContext ( ) . addSiteRoot ( originPath ) ; CmsADEConfigData configData = lookupConfiguration ( cms , originRootPath ) ; CmsADEConfigData targetConfigData = lookupConfiguration ( cms , pageRootPath ) ; boolean targetFirst = targetConfigData . isPreferDetailPagesForLocalContents ( ) ; List < CmsADEConfigData > configs = targetFirst ? Arrays . asList ( targetConfigData , configData ) : Arrays . asList ( configData , targetConfigData ) ; for ( CmsADEConfigData config : configs ) { List < CmsDetailPageInfo > pageInfo = config . getDetailPagesForType ( resType ) ; if ( ( pageInfo != null ) && ! pageInfo . isEmpty ( ) ) { return pageInfo . get ( 0 ) . getUri ( ) ; } } return null ;
public class SVGHyperCube { /** * Recursive helper for hypercube drawing . * @ param path path * @ param minx starting corner * @ param miny starting corner * @ param r _ edges edge vectors * @ param b bit set of drawn edges */ private static void recDrawEdges ( SVGPath path , double minx , double miny , List < double [ ] > r_edges , long [ ] b ) { } }
// Draw all " missing " edges for ( int i = 0 ; i < r_edges . size ( ) ; i ++ ) { if ( BitsUtil . get ( b , i ) ) { continue ; } final double [ ] edge = r_edges . get ( i ) ; final double x_i = minx + edge [ 0 ] , y_i = miny + edge [ 1 ] ; if ( ! isFinite ( x_i ) || ! isFinite ( y_i ) ) { continue ; } path . moveTo ( minx , miny ) . drawTo ( x_i , y_i ) ; // Recursion BitsUtil . setI ( b , i ) ; recDrawEdges ( path , x_i , y_i , r_edges , b ) ; BitsUtil . clearI ( b , i ) ; }
public class CmsEditUserAddInfoDialog { /** * Initializes the additional info bean to work with , depending on the dialog state and request parameters . < p > */ @ SuppressWarnings ( "unchecked" ) protected void initUserObject ( ) { } }
try { if ( CmsStringUtil . isEmpty ( getParamAction ( ) ) || CmsDialog . DIALOG_INITIAL . equals ( getParamAction ( ) ) ) { // edit an existing user , get the user object from db m_user = getCms ( ) . readUser ( new CmsUUID ( getParamUserid ( ) ) ) ; if ( ! Boolean . valueOf ( getParamEditall ( ) ) . booleanValue ( ) ) { m_addInfoList = createAddInfoList ( m_user ) ; } else { setAddInfoMaps ( ) ; } return ; } else { // this is not the initial call , get the user object from session m_user = getCms ( ) . readUser ( new CmsUUID ( getParamUserid ( ) ) ) ; if ( ! Boolean . valueOf ( getParamEditall ( ) ) . booleanValue ( ) ) { m_addInfoList = ( List < CmsUserAddInfoBean > ) getDialogObject ( ) ; } else { Map < String , SortedMap < String , Object > > dObj = ( Map < String , SortedMap < String , Object > > ) getDialogObject ( ) ; m_addInfoEditable = dObj . get ( "editable" ) ; m_addInfoReadOnly = dObj . get ( "readonly" ) ; } return ; } } catch ( Exception e ) { // noop } // create a new user object try { m_user = getCms ( ) . readUser ( new CmsUUID ( getParamUserid ( ) ) ) ; } catch ( CmsException e ) { // ignore } if ( ! Boolean . valueOf ( getParamEditall ( ) ) . booleanValue ( ) ) { m_addInfoList = createAddInfoList ( m_user ) ; } else { setAddInfoMaps ( ) ; }
public class Table { /** * Moves the contents of matrix down by the number of purged rows and resizes the matrix accordingly . The * capacity of the matrix should be size * resize _ factor . Caller must hold the lock . */ @ GuardedBy ( "lock" ) protected void _compact ( ) { } }
// This is the range we need to copy into the new matrix ( including from and to ) int from = computeRow ( low ) , to = computeRow ( hr ) ; int range = to - from + 1 ; // e . g . from = 3 , to = 5 , new _ size has to be [ 3 . . 5 ] ( = 3) int new_size = ( int ) Math . max ( ( double ) range * resize_factor , ( double ) range + 1 ) ; new_size = Math . max ( new_size , num_rows ) ; // don ' t fall below the initial size defined if ( new_size < matrix . length ) { T [ ] [ ] new_matrix = ( T [ ] [ ] ) new Object [ new_size ] [ ] ; System . arraycopy ( matrix , from , new_matrix , 0 , range ) ; matrix = new_matrix ; offset += from * elements_per_row ; num_compactions ++ ; }
public class DiscoverItems { /** * Adds a collection of items to the discovered information . Does nothing if itemsToAdd is null * @ param itemsToAdd */ public void addItems ( Collection < Item > itemsToAdd ) { } }
if ( itemsToAdd == null ) return ; for ( Item i : itemsToAdd ) { addItem ( i ) ; }
public class Parameters { /** * Gets a directory which already exists . */ public File getExistingDirectory ( final String param ) { } }
return get ( param , new StringToFile ( ) , new And < > ( new FileExists ( ) , new IsDirectory ( ) ) , "existing directory" ) ;
public class FieldConstraintsBuilder { /** * Creates range constraints according to CronFieldName parameter . * @ param field - CronFieldName * @ return FieldConstraintsBuilder instance */ public FieldConstraintsBuilder forField ( final CronFieldName field ) { } }
switch ( field ) { case SECOND : case MINUTE : endRange = 59 ; return this ; case HOUR : endRange = 23 ; return this ; case DAY_OF_WEEK : stringMapping = daysOfWeekMapping ( ) ; endRange = 6 ; return this ; case DAY_OF_MONTH : startRange = 1 ; endRange = 31 ; return this ; case MONTH : stringMapping = monthsMapping ( ) ; startRange = 1 ; endRange = 12 ; return this ; case DAY_OF_YEAR : startRange = 1 ; endRange = 366 ; return this ; default : return this ; }
public class CassandraClientBase { /** * Compute entity via super columns . * @ param m * the m * @ param isRelation * the is relation * @ param relations * the relations * @ param entities * the entities * @ param qResults * the q results */ protected void computeEntityViaSuperColumns ( EntityMetadata m , boolean isRelation , List < String > relations , List < Object > entities , Map < ByteBuffer , List < SuperColumn > > qResults ) { } }
for ( ByteBuffer key : qResults . keySet ( ) ) { onSuperColumn ( m , isRelation , relations , entities , qResults . get ( key ) , key ) ; }
public class JGTProcessingRegion { /** * Transforms a GRASS resolution string in metric or degree to decimal . * @ param ewres the x resolution string . * @ param nsres the y resolution string . * @ return the array of x and y resolution doubles . */ private double [ ] xyResStringToNumbers ( String ewres , String nsres ) { } }
double xres = - 1.0 ; double yres = - 1.0 ; if ( ewres . indexOf ( ':' ) != - 1 ) { xres = degreeToNumber ( ewres ) ; } else { xres = Double . parseDouble ( ewres ) ; } if ( nsres . indexOf ( ':' ) != - 1 ) { yres = degreeToNumber ( nsres ) ; } else { yres = Double . parseDouble ( nsres ) ; } return new double [ ] { xres , yres } ;
public class TypeDescriptor { /** * The column size for this type . * For numeric data this is the maximum precision . * For character data this is the length in characters . * For datetime types this is the length in characters of the String representation * ( assuming the maximum allowed precision of the fractional seconds component ) . * For binary data this is the length in bytes . * Null is returned for data types where the column size is not applicable . */ public Integer getColumnSize ( ) { } }
if ( type . isNumericType ( ) ) { return getPrecision ( ) ; } switch ( type ) { case STRING_TYPE : case BINARY_TYPE : return Integer . MAX_VALUE ; case CHAR_TYPE : case VARCHAR_TYPE : return typeQualifiers . getCharacterMaximumLength ( ) ; case DATE_TYPE : return 10 ; case TIMESTAMP_TYPE : return 29 ; default : return null ; }
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcSpatialZoneTypeEnum createIfcSpatialZoneTypeEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcSpatialZoneTypeEnum result = IfcSpatialZoneTypeEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class AbstractItemLink { /** * Use this to lock persistently . Invoke indirectly by the API . * @ param transaction */ final void cmdPersistLock ( final PersistentTransaction transaction ) throws SevereMessageStoreException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "cmdPersistLock" , new Object [ ] { "Item Link: " + this , "Stream Link: " + _owningStreamLink , "Transaction: " + transaction } ) ; if ( null != transaction ) { boolean hasBecomeNonReleasable = false ; synchronized ( this ) { if ( ItemLinkState . STATE_LOCKED == _itemLinkState ) { _transactionId = transaction . getPersistentTranId ( ) ; hasBecomeNonReleasable = _declareNotDiscardable ( _getAndAssertItem ( ) ) ; _itemLinkState = ItemLinkState . STATE_PERSISTING_LOCK ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) SibTr . event ( this , tc , "Invalid Item state: " + _itemLinkState ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "cmdPersistLock" ) ; throw new StateException ( _itemLinkState . toString ( ) ) ; } } if ( hasBecomeNonReleasable ) { _declareNotReleasable ( ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "cmdPersistLock" ) ;
public class GyroscopeRenderer { /** * This methods generates the HTML code of the current b : gyroscope . * @ param context the FacesContext . * @ param component the current b : gyroscope . * @ throws IOException thrown if something goes wrong when writing the HTML code . */ @ Override public void encodeBegin ( FacesContext context , UIComponent component ) throws IOException { } }
if ( ! component . isRendered ( ) ) { return ; } Gyroscope gyroscope = ( Gyroscope ) component ; ResponseWriter rw = context . getResponseWriter ( ) ; String clientId = gyroscope . getClientId ( ) ; rw . startElement ( "input" , component ) ; rw . writeAttribute ( "id" , clientId + ".alpha" , null ) ; rw . writeAttribute ( "name" , clientId + ".alpha" , null ) ; rw . writeAttribute ( "type" , "hidden" , null ) ; rw . endElement ( "input" ) ; rw . startElement ( "input" , component ) ; rw . writeAttribute ( "id" , clientId + ".beta" , null ) ; rw . writeAttribute ( "name" , clientId + ".beta" , null ) ; rw . writeAttribute ( "type" , "hidden" , null ) ; rw . endElement ( "input" ) ; rw . startElement ( "input" , component ) ; rw . writeAttribute ( "id" , clientId + ".gamma" , null ) ; rw . writeAttribute ( "name" , clientId + ".gamma" , null ) ; rw . writeAttribute ( "type" , "hidden" , null ) ; rw . endElement ( "input" ) ; rw . startElement ( "input" , component ) ; rw . writeAttribute ( "id" , clientId + ".timer" , null ) ; rw . writeAttribute ( "name" , clientId + ".timer" , null ) ; rw . writeAttribute ( "value" , "0" , null ) ; rw . writeAttribute ( "type" , "hidden" , null ) ; rw . endElement ( "input" ) ; rw . startElement ( "script" , component ) ; rw . writeAttribute ( "id" , clientId , null ) ; StringBuilder jsCode = new StringBuilder ( ) ; // Render Ajax Capabilities AJAXRenderer . generateAJAXCallForASingleEvent ( FacesContext . getCurrentInstance ( ) , gyroscope , rw , null , null , null , false , "rotation" , jsCode , null ) ; String js = jsCode . toString ( ) . replace ( "callAjax(this," , "callAjax(document.getElementById('" + clientId + ".alpha')," ) ; rw . write ( "window.addEventListener('deviceorientation', function(event) {\n" ) ; rw . write ( " var oldAlpha = document.getElementById('" + clientId + ".alpha').value;" ) ; rw . write ( " var alpha = Math.round(event.alpha);" ) ; rw . write ( " var oldBeta = document.getElementById('" + clientId + ".beta').value;" ) ; rw . write ( " var beta = Math.round(event.beta);" ) ; rw . write ( " var oldGamma = document.getElementById('" + clientId + ".gamma').value;" ) ; rw . write ( " var gamma = Math.round(event.gamma);" ) ; rw . write ( " if (alpha==oldAlpha && beta == oldBeta && gamma == oldGamma) return;" ) ; rw . write ( " if (new Date().getTime() < document.getElementById('" + clientId + ".timer').value) return;" ) ; rw . write ( " document.getElementById('" + clientId + ".alpha').value = alpha;" ) ; rw . write ( " document.getElementById('" + clientId + ".beta').value = beta;" ) ; rw . write ( " document.getElementById('" + clientId + ".gamma').value = gamma;" ) ; rw . write ( " document.getElementById('" + clientId + ".timer').value = new Date().getTime()+100;" ) ; rw . write ( js ) ; rw . write ( "}, true);\n" ) ; // rw . write ( " window . addEventListener ( ' compassneedscalibration ' , function ( event ) { \ n " ) ; // rw . write ( " alert ( ' Your compass needs calibrating ! Wave your device in a figure - eight motion ' ) ; " ) ; // rw . write ( " event . preventDefault ( ) ; " ) ; // rw . write ( " } , true ) ; \ n " ) ; rw . endElement ( "script" ) ;
public class BinarySparseDataset { /** * Returns the value at entry ( i , j ) by binary search . * @ param i the row index . * @ param j the column index . */ public int get ( int i , int j ) { } }
if ( i < 0 || i >= size ( ) ) { throw new IllegalArgumentException ( "Invalid index: i = " + i ) ; } int [ ] x = get ( i ) . x ; if ( x . length == 0 ) { return 0 ; } int low = 0 ; int high = x . length - 1 ; int mid = ( low + high ) / 2 ; while ( j != x [ mid ] && low <= high ) { mid = ( low + high ) / 2 ; if ( j < x [ mid ] ) high = mid - 1 ; else low = mid + 1 ; } if ( j == x [ mid ] ) { return 1 ; } else { return 0 ; }
public class Layout { /** * Invalidate the item in layout * @ param dataIndex data index */ public void invalidate ( final int dataIndex ) { } }
synchronized ( mMeasuredChildren ) { Log . d ( Log . SUBSYSTEM . LAYOUT , TAG , "invalidate [%d]" , dataIndex ) ; mMeasuredChildren . remove ( dataIndex ) ; }
public class SimpleSynchronousNotificationExecutor { /** * Fire notification of an entry that was just removed from the registry . * @ param removeKey key identifying the entry removed from the registry . * @ param removeValue value of the entry in the registry . */ public void fireRemoveNotification ( Iterator < RegistryListener < K , V > > listeners , K removeKey , V removeValue ) { } }
while ( listeners . hasNext ( ) ) { listeners . next ( ) . onRemoveEntry ( removeKey , removeValue ) ; }
public class GrammaticalRelation { /** * Returns a < code > String < / code > representation of this * < code > GrammaticalRelation < / code > and the hierarchy below * it , with one node per line , indented according to * < code > indentLevel < / code > . * @ param indentLevel how many levels to indent ( 0 for root node ) */ private void toPrettyString ( int indentLevel , StringBuilder buf ) { } }
for ( int i = 0 ; i < indentLevel ; i ++ ) { buf . append ( " " ) ; } buf . append ( shortName ) . append ( ": " ) . append ( targetPatterns ) ; for ( GrammaticalRelation child : children ) { buf . append ( '\n' ) ; child . toPrettyString ( indentLevel + 1 , buf ) ; }
public class PrcBankStatementSave { /** * < p > Process entity request . < / p > * @ param pAddParam additional param , e . g . return this line ' s * document in " nextEntity " for farther process * @ param pRequestData Request Data * @ param pEntity Entity to process * @ return Entity processed for farther process or null * @ throws Exception - an exception */ @ Override public final BankStatement process ( final Map < String , Object > pAddParam , final BankStatement pEntity , final IRequestData pRequestData ) throws Exception { } }
// if exist file name : String fileToUploadName = ( String ) pRequestData . getAttribute ( "fileToUploadName" ) ; if ( fileToUploadName != null ) { String bankCsvMethodId = pRequestData . getParameter ( "bankCsvMethod" ) ; BankCsvMethod bankCsvMethod = getSrvOrm ( ) . retrieveEntityById ( pAddParam , BankCsvMethod . class , Long . parseLong ( bankCsvMethodId ) ) ; bankCsvMethod . getCsvMethod ( ) . setColumns ( getSrvOrm ( ) . retrieveListWithConditions ( pAddParam , CsvColumn . class , "where ITSOWNER=" + bankCsvMethod . getCsvMethod ( ) . getItsId ( ) ) ) ; pEntity . setSourceName ( fileToUploadName + "/" + bankCsvMethod . getItsName ( ) ) ; if ( pEntity . getIsNew ( ) ) { getSrvOrm ( ) . insertEntity ( pAddParam , pEntity ) ; } else { getSrvOrm ( ) . updateEntity ( pAddParam , pEntity ) ; } InputStreamReader reader = null ; SimpleDateFormat sdf = null ; try { sdf = new SimpleDateFormat ( bankCsvMethod . getDateCol ( ) . getDataFormat ( ) ) ; } catch ( Exception ee ) { throw new ExceptionWithCode ( ExceptionWithCode . CONFIGURATION_MISTAKE , "Wrong date format! Format: " + bankCsvMethod . getDateCol ( ) . getDataFormat ( ) , ee ) ; } String [ ] seps = null ; if ( bankCsvMethod . getAmountCol ( ) . getDataFormat ( ) != null ) { try { seps = bankCsvMethod . getAmountCol ( ) . getDataFormat ( ) . split ( "," ) ; for ( int i = 0 ; i < 2 ; i ++ ) { if ( "SPACE" . equals ( seps [ i ] ) ) { seps [ i ] = " " ; } else if ( "COMMA" . equals ( seps [ i ] ) ) { seps [ i ] = "," ; } } } catch ( Exception ee ) { throw new ExceptionWithCode ( ExceptionWithCode . CONFIGURATION_MISTAKE , "Wrong amount format! Format: " + bankCsvMethod . getAmountCol ( ) . getDataFormat ( ) , ee ) ; } } try { InputStream ins = ( InputStream ) pRequestData . getAttribute ( "fileToUploadInputStream" ) ; reader = new InputStreamReader ( ins , Charset . forName ( bankCsvMethod . getCsvMethod ( ) . getCharsetName ( ) ) . newDecoder ( ) ) ; List < String > csvRow ; int r = 0 ; while ( ( csvRow = this . csvReader . readNextRow ( pAddParam , reader , bankCsvMethod . getCsvMethod ( ) ) ) != null ) { r ++ ; if ( r == 1 && bankCsvMethod . getCsvMethod ( ) . getHasHeader ( ) ) { continue ; } BankStatementLine bsl = new BankStatementLine ( ) ; bsl . setIdDatabaseBirth ( getSrvOrm ( ) . getIdDatabase ( ) ) ; bsl . setItsOwner ( pEntity ) ; String dateStr = csvRow . get ( bankCsvMethod . getDateCol ( ) . getItsIndex ( ) - 1 ) ; try { bsl . setItsDate ( sdf . parse ( dateStr ) ) ; } catch ( Exception ee ) { throw new ExceptionWithCode ( ExceptionWithCode . WRONG_PARAMETER , "Wrong date value! Value/Format: " + dateStr + "/" + bankCsvMethod . getDateCol ( ) . getDataFormat ( ) , ee ) ; } String amountStr = csvRow . get ( bankCsvMethod . getAmountCol ( ) . getItsIndex ( ) - 1 ) ; try { if ( seps != null ) { if ( ! "NONE" . equals ( seps [ 0 ] ) ) { amountStr = amountStr . replace ( seps [ 0 ] , "." ) ; } if ( ! "NONE" . equals ( seps [ 1 ] ) ) { amountStr = amountStr . replace ( seps [ 1 ] , "" ) ; } } bsl . setItsAmount ( new BigDecimal ( amountStr ) ) ; } catch ( Exception ee ) { throw new ExceptionWithCode ( ExceptionWithCode . CONFIGURATION_MISTAKE , "Wrong amount value! Value/Format: " + amountStr + "/" + bankCsvMethod . getAmountCol ( ) . getDataFormat ( ) , ee ) ; } String descr = null ; if ( bankCsvMethod . getDescriptionCol ( ) != null ) { descr = csvRow . get ( bankCsvMethod . getDescriptionCol ( ) . getItsIndex ( ) - 1 ) ; } if ( bankCsvMethod . getStatusCol ( ) != null ) { String statusStr = csvRow . get ( bankCsvMethod . getStatusCol ( ) . getItsIndex ( ) - 1 ) ; if ( descr == null ) { descr = statusStr ; } else { descr += "/" + statusStr ; } if ( bankCsvMethod . getAcceptedWords ( ) != null && ! bankCsvMethod . getAcceptedWords ( ) . contains ( statusStr ) ) { bsl . setItsStatus ( EBankEntryStatus . OTHER ) ; } if ( bankCsvMethod . getVoidedWords ( ) != null && bankCsvMethod . getVoidedWords ( ) . contains ( statusStr ) ) { bsl . setItsStatus ( EBankEntryStatus . VOIDED ) ; } } bsl . setDescriptionStatus ( descr ) ; getSrvOrm ( ) . insertEntity ( pAddParam , bsl ) ; } } finally { if ( reader != null ) { reader . close ( ) ; } } } else { if ( pEntity . getIsNew ( ) ) { getSrvOrm ( ) . insertEntity ( pAddParam , pEntity ) ; } else { getSrvOrm ( ) . updateEntity ( pAddParam , pEntity ) ; } } return pEntity ;
public class EncryptKit { /** * HmacSHA256加密 * @ param data 明文字符串 * @ param key 秘钥 * @ return 16进制密文 */ public static String hmacSHA256 ( String data , String key ) { } }
return hmacSHA256 ( data . getBytes ( ) , key . getBytes ( ) ) ;
public class InstallerModule { /** * Bind extension to guice context . * @ param item extension item descriptor * @ param installer detected extension installer * @ param holder extensions holder bean */ @ SuppressWarnings ( "unchecked" ) private void bindExtension ( final ExtensionItemInfo item , final FeatureInstaller installer , final ExtensionsHolder holder ) { } }
final Class < ? extends FeatureInstaller > installerClass = installer . getClass ( ) ; final Class < ? > type = item . getType ( ) ; holder . register ( installerClass , type ) ; if ( installer instanceof BindingInstaller ) { ( ( BindingInstaller ) installer ) . install ( binder ( ) , type , item . isLazy ( ) ) ; } else if ( ! item . isLazy ( ) ) { // if installer isn ' t install binding manually , lazy simply disable registration binder ( ) . bind ( type ) ; }
public class HighlightGenerator { /** * Create the shape which will highlight the provided atom . * @ param atom the atom to highlight * @ param radius the specified radius * @ return the shape which will highlight the atom */ private static Shape createAtomHighlight ( IAtom atom , double radius ) { } }
double x = atom . getPoint2d ( ) . x ; double y = atom . getPoint2d ( ) . y ; return new RoundRectangle2D . Double ( x - radius , y - radius , 2 * radius , 2 * radius , 2 * radius , 2 * radius ) ;
public class IdentityHashSet { /** * Serialize this Object in a manner which is binary - compatible with the * JDK . */ private void writeObject ( ObjectOutputStream s ) throws IOException { } }
Iterator < E > it = iterator ( ) ; s . writeInt ( size ( ) * 2 ) ; // expectedMaxSize s . writeInt ( size ( ) ) ; while ( it . hasNext ( ) ) s . writeObject ( it . next ( ) ) ;
public class SparkDl4jMultiLayer { /** * Score the examples individually , using a specified batch size . Unlike { @ link # calculateScore ( JavaRDD , boolean ) } , * this method returns a score for each example separately . If scoring is needed for specific examples use either * { @ link # scoreExamples ( JavaPairRDD , boolean ) } or { @ link # scoreExamples ( JavaPairRDD , boolean , int ) } which can have * a key for each example . * @ param data Data to score * @ param includeRegularizationTerms If true : include the l1 / l2 regularization terms with the score ( if any ) * @ param batchSize Batch size to use when doing scoring * @ return A JavaDoubleRDD containing the scores of each example * @ see MultiLayerNetwork # scoreExamples ( DataSet , boolean ) */ public JavaDoubleRDD scoreExamples ( JavaRDD < DataSet > data , boolean includeRegularizationTerms , int batchSize ) { } }
return data . mapPartitionsToDouble ( new ScoreExamplesFunction ( sc . broadcast ( network . params ( ) ) , sc . broadcast ( conf . toJson ( ) ) , includeRegularizationTerms , batchSize ) ) ;
public class HistoricExternalTaskLogQueryImpl { /** * query parameter / / / / / */ @ Override public HistoricExternalTaskLogQuery logId ( String historicExternalTaskLogId ) { } }
ensureNotNull ( NotValidException . class , "historicExternalTaskLogId" , historicExternalTaskLogId ) ; this . id = historicExternalTaskLogId ; return this ;
public class BlobStoreUtils { /** * no need to synchronize , since EventManager will execute sequentially */ public static void downloadDistributeStormCode ( Map conf , String topologyId , String masterCodeDir ) throws IOException , TException { } }
String tmpToot = null ; try { // STORM _ LOCAL _ DIR / supervisor / tmp / ( UUID ) tmpToot = StormConfig . supervisorTmpDir ( conf ) + File . separator + UUID . randomUUID ( ) . toString ( ) ; // STORM _ LOCAL _ DIR / supervisor / stormdist / topologyId String stormRoot = StormConfig . supervisor_stormdist_root ( conf , topologyId ) ; // JStormServerUtils . downloadCodeFromMaster ( conf , tmproot , masterCodeDir , topologyId , true ) ; JStormServerUtils . downloadCodeFromBlobStore ( conf , tmpToot , topologyId ) ; // tmproot / stormjar . jar String localFileJarTmp = StormConfig . stormjar_path ( tmpToot ) ; // extract dir from jar JStormUtils . extractDirFromJar ( localFileJarTmp , StormConfig . RESOURCES_SUBDIR , tmpToot ) ; File srcDir = new File ( tmpToot ) ; File destDir = new File ( stormRoot ) ; try { FileUtils . moveDirectory ( srcDir , destDir ) ; } catch ( FileExistsException e ) { FileUtils . copyDirectory ( srcDir , destDir ) ; FileUtils . deleteQuietly ( srcDir ) ; } } finally { if ( tmpToot != null ) { File srcDir = new File ( tmpToot ) ; FileUtils . deleteQuietly ( srcDir ) ; } }
public class Assert { /** * Formats the specified { @ link String message } with the given { @ link Object arguments } . * @ param message { @ link String } containing the message to format . * @ param arguments array of { @ link Object arguments } used when formatting the { @ link String message } . * @ return the { @ link String message } formatted with the { @ link Object arguments } . * @ see # messageFormat ( String , Object . . . ) * @ see # stringFormat ( String , Object . . . ) */ private static String format ( String message , Object ... arguments ) { } }
return stringFormat ( messageFormat ( message , arguments ) , arguments ) ;
public class ChromeCast { /** * < p > Loads and starts playing media in specified URL < / p > * < p > If no application is running at the moment then exception is thrown . < / p > * @ param url media url * @ return The new media status that resulted from loading the media . * @ throws IOException */ public final MediaStatus load ( String url ) throws IOException { } }
return load ( url . substring ( url . lastIndexOf ( '/' ) + 1 , url . lastIndexOf ( '.' ) ) , null , url , null ) ;
public class ClassUtil { /** * loads a class from a String classname * @ param className * @ param defaultValue * @ return matching Class */ public static Class loadClass ( String className , Class defaultValue ) { } }
// OSGI env Class clazz = _loadClass ( new OSGiBasedClassLoading ( ) , className , null , null ) ; if ( clazz != null ) return clazz ; // core classloader clazz = _loadClass ( new ClassLoaderBasedClassLoading ( SystemUtil . getCoreClassLoader ( ) ) , className , null , null ) ; if ( clazz != null ) return clazz ; // loader classloader clazz = _loadClass ( new ClassLoaderBasedClassLoading ( SystemUtil . getLoaderClassLoader ( ) ) , className , null , null ) ; if ( clazz != null ) return clazz ; return defaultValue ;
public class LocalPDBDirectory { /** * Download a structure , but don ' t parse it yet or store it in memory . * Used to pre - fetch large numbers of structures . * @ param pdbId * @ throws IOException */ public void prefetchStructure ( String pdbId ) throws IOException { } }
if ( pdbId . length ( ) != 4 ) throw new IOException ( "The provided ID does not look like a PDB ID : " + pdbId ) ; // Check existing File file = downloadStructure ( pdbId ) ; if ( ! file . exists ( ) ) { throw new IOException ( "Structure " + pdbId + " not found and unable to download." ) ; }
public class XSLMessages { /** * Creates a message from the specified key and replacement * arguments , localized to the given locale . * @ param msgKey The key for the message text . * @ param args The arguments to be used as replacement text * in the message created . * @ return The formatted message string . */ public static final String createMessage ( String msgKey , Object args [ ] ) // throws Exception { } }
// BEGIN android - changed // don ' t localize resources return createMsg ( XSLTBundle , msgKey , args ) ; // END android - changed
public class LauncherDelegateImpl { /** * { @ inheritDoc } */ @ Override public void launchFramework ( ) { } }
ClassLoader loader = config . getFrameworkClassloader ( ) ; if ( loader == null ) loader = this . getClass ( ) . getClassLoader ( ) ; try { // initialize RAS LogProvider provider = getLogProviderImpl ( loader , config ) ; // get framework / platform manager manager = new FrameworkManager ( ) ; managerLatch . countDown ( ) ; // Update framework configuration FrameworkConfigurator . configure ( config ) ; doFrameworkLaunch ( provider ) ; } catch ( LaunchException le ) { throw le ; } catch ( RuntimeException re ) { throw re ; } catch ( Throwable e ) { throw new RuntimeException ( e ) ; } finally { // In case an error occurs before launching . managerLatch . countDown ( ) ; }
public class DistCacheWriterInterceptor { /** * - - - - WRITE commands */ @ Override public Object visitPutKeyValueCommand ( InvocationContext ctx , PutKeyValueCommand command ) throws Throwable { } }
return invokeNextThenApply ( ctx , command , ( rCtx , rCommand , rv ) -> { PutKeyValueCommand putKeyValueCommand = ( PutKeyValueCommand ) rCommand ; Object key = putKeyValueCommand . getKey ( ) ; if ( ! putKeyValueCommand . hasAnyFlag ( FlagBitSets . ROLLING_UPGRADE ) && ( ! isStoreEnabled ( putKeyValueCommand ) || rCtx . isInTxScope ( ) || ! putKeyValueCommand . isSuccessful ( ) ) ) return rv ; if ( ! isProperWriter ( rCtx , putKeyValueCommand , putKeyValueCommand . getKey ( ) ) ) return rv ; storeEntry ( rCtx , key , putKeyValueCommand ) ; return rv ; } ) ;