signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SegmentTransactionalInsertAction { /** * Behaves similarly to * { @ link org . apache . druid . indexing . overlord . IndexerMetadataStorageCoordinator # announceHistoricalSegments ( Set , DataSourceMetadata , DataSourceMetadata ) } . */ @ Override public SegmentPublishResult perform ( Task task , TaskActionToolbox toolbox ) { } }
TaskActionPreconditions . checkLockCoversSegments ( task , toolbox . getTaskLockbox ( ) , segments ) ; final SegmentPublishResult retVal ; try { retVal = toolbox . getTaskLockbox ( ) . doInCriticalSection ( task , segments . stream ( ) . map ( DataSegment :: getInterval ) . collect ( Collectors . toList ( ) ) , CriticalAction . < SegmentPublishResult > builder ( ) . onValidLocks ( ( ) -> toolbox . getIndexerMetadataStorageCoordinator ( ) . announceHistoricalSegments ( segments , startMetadata , endMetadata ) ) . onInvalidLocks ( ( ) -> SegmentPublishResult . fail ( "Invalid task locks. Maybe they are revoked by a higher priority task." + " Please check the overlord log for details." ) ) . build ( ) ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } // Emit metrics final ServiceMetricEvent . Builder metricBuilder = new ServiceMetricEvent . Builder ( ) ; IndexTaskUtils . setTaskDimensions ( metricBuilder , task ) ; if ( retVal . isSuccess ( ) ) { toolbox . getEmitter ( ) . emit ( metricBuilder . build ( "segment/txn/success" , 1 ) ) ; } else { toolbox . getEmitter ( ) . emit ( metricBuilder . build ( "segment/txn/failure" , 1 ) ) ; } // getSegments ( ) should return an empty set if announceHistoricalSegments ( ) failed for ( DataSegment segment : retVal . getSegments ( ) ) { metricBuilder . setDimension ( DruidMetrics . INTERVAL , segment . getInterval ( ) . toString ( ) ) ; toolbox . getEmitter ( ) . emit ( metricBuilder . build ( "segment/added/bytes" , segment . getSize ( ) ) ) ; } return retVal ;
public class CmsContentService { /** * Decodes the newlink request parameter if possible . < p > * @ param newLink the parameter to decode * @ return the decoded value */ protected String decodeNewLink ( String newLink ) { } }
String result = newLink ; if ( result == null ) { return null ; } try { result = CmsEncoder . decode ( result ) ; try { result = CmsEncoder . decode ( result ) ; } catch ( Throwable e ) { LOG . info ( e . getLocalizedMessage ( ) , e ) ; } } catch ( Throwable e ) { LOG . info ( e . getLocalizedMessage ( ) , e ) ; } return result ;
public class AbstractHibernateCriteriaBuilder { /** * Adds a sql projection to the criteria * @ param sql SQL projecting * @ param columnAliases List of column aliases for the projected values * @ param types List of types for the projected values */ protected void sqlProjection ( String sql , List < String > columnAliases , List < Type > types ) { } }
projectionList . add ( Projections . sqlProjection ( sql , columnAliases . toArray ( new String [ columnAliases . size ( ) ] ) , types . toArray ( new Type [ types . size ( ) ] ) ) ) ;
public class UpdateScheduleRequest { /** * Rotations of schedule */ public List < ScheduleRotation > getRotations ( ) { } }
if ( getTimeZone ( ) != null && rotations != null ) for ( ScheduleRotation scheduleRotation : rotations ) scheduleRotation . setScheduleTimeZone ( getTimeZone ( ) ) ; return rotations ;
public class JaegerConfiguration { /** * Sets the reporter configuration . * @ param reporterConfiguration The reporter configuration */ @ Inject public void setReporterConfiguration ( @ Nullable JaegerReporterConfiguration reporterConfiguration ) { } }
if ( reporterConfiguration != null ) { configuration . withReporter ( reporterConfiguration . configuration ) ; }
public class AbstractX509FileSystemStore { /** * Return a unique identifier appropriate for a file name . If the certificate as a subject key identifier , the * result is this encoded identifier . Else , use the concatenation of the certificate serial number and the issuer * name . * @ param publicKey the certificate . * @ return a unique identifier . * @ throws java . io . IOException on error . */ protected String getCertIdentifier ( X509CertifiedPublicKey publicKey ) throws IOException { } }
byte [ ] keyId = publicKey . getSubjectKeyIdentifier ( ) ; if ( keyId != null ) { return this . hex . encode ( keyId ) ; } return publicKey . getSerialNumber ( ) . toString ( ) + ", " + publicKey . getIssuer ( ) . getName ( ) ;
public class NLPSeg { /** * internal method to define the composed entity * for numeric and unit word composed word * @ paramnumeric * @ paramunitWord * @ returnIWord */ private IWord getNumericUnitComposedWord ( String numeric , IWord unitWord ) { } }
IStringBuffer sb = new IStringBuffer ( ) ; sb . clear ( ) . append ( numeric ) . append ( unitWord . getValue ( ) ) ; IWord wd = new Word ( sb . toString ( ) , IWord . T_CJK_WORD ) ; String [ ] entity = unitWord . getEntity ( ) ; int eIdx = ArrayUtil . startsWith ( Entity . E_TIME_P , entity ) ; if ( eIdx > - 1 ) { sb . clear ( ) . append ( entity [ eIdx ] . replace ( "time." , "datetime." ) ) ; } else { sb . clear ( ) . append ( Entity . E_NUC_PREFIX ) . append ( unitWord . getEntity ( 0 ) ) ; } wd . setEntity ( new String [ ] { sb . toString ( ) } ) ; wd . setPartSpeech ( IWord . QUANTIFIER ) ; sb . clear ( ) ; sb = null ; return wd ;
public class ProgramMeta { /** * The program type definition * @ return Optional of the < code > program < / code > field value . */ @ javax . annotation . Nonnull public java . util . Optional < net . morimekta . providence . model . ProgramType > optionalProgram ( ) { } }
return java . util . Optional . ofNullable ( mProgram ) ;
public class Cob2Xsd { /** * Parses a COBOL source into an in - memory model . * @ param cobolReader reads the raw COBOL source * @ return a list of root COBOL data items * @ throws RecognizerException if COBOL recognition fails */ public List < CobolDataItem > toModel ( final Reader cobolReader ) throws RecognizerException { } }
return emitModel ( parse ( lex ( clean ( cobolReader ) ) ) ) ;
public class DefaultGroovyMethods { /** * Creates a new List by inserting all of the elements in the given Iterable * to the elements from this List at the specified index . * @ param self an original list * @ param additions an Iterable containing elements to be merged with the elements from the original List * @ param index index at which to insert the first element from the given additions Iterable * @ return the new list * @ since 1.8.7 * @ see # plus ( List , int , List ) */ public static < T > List < T > plus ( List < T > self , int index , Iterable < T > additions ) { } }
return plus ( self , index , toList ( additions ) ) ;
public class AccessPoint { /** * If closest * @ param currRequest * @ param revisitRecord * @ param closest * @ return the payload resource * @ throws ResourceNotAvailableException * @ throws ConfigurationException * @ throws AccessControlException * @ throws BadQueryException * @ throws ResourceNotInArchiveException * @ throws ResourceIndexNotAvailableException * @ see WARCRevisitAnnotationFilter */ protected CaptureSearchResult retrievePayloadForIdenticalContentRevisit ( WaybackRequest currRequest , Resource revisitRecord , CaptureSearchResult closest ) throws ResourceIndexNotAvailableException , ResourceNotInArchiveException , BadQueryException , AccessControlException , ConfigurationException , ResourceNotAvailableException { } }
if ( ! closest . isRevisitDigest ( ) ) { LOGGER . warning ( "Revisit: record is not a revisit by identical content digest " + closest . getCaptureTimestamp ( ) + " " + closest . getOriginalUrl ( ) ) ; return null ; } CaptureSearchResult payloadLocation = null ; // Revisit from same url - - should have been found by the loader if ( closest . getDuplicatePayloadFile ( ) != null && closest . getDuplicatePayloadOffset ( ) != null ) { payloadLocation = new CaptureSearchResult ( ) ; payloadLocation . setFile ( closest . getDuplicatePayloadFile ( ) ) ; payloadLocation . setOffset ( closest . getDuplicatePayloadOffset ( ) ) ; payloadLocation . setCompressedLength ( closest . getDuplicatePayloadCompressedLength ( ) ) ; return payloadLocation ; } // Url Agnostic Revisit with target - uri and refers - to - date String payloadUri = revisitRecord . getRefersToTargetURI ( ) ; String payloadTimestamp = revisitRecord . getRefersToDate ( ) ; if ( payloadUri != null && payloadTimestamp != null ) { WaybackRequest wbr = currRequest . clone ( ) ; wbr . setReplayTimestamp ( payloadTimestamp ) ; wbr . setAnchorTimestamp ( payloadTimestamp ) ; wbr . setTimestampSearchKey ( true ) ; wbr . setRequestUrl ( payloadUri ) ; // experimental parameter to tell EmbeddedCDXServerIndex // that it ' s looking up the payload of URL - agnostic revisit . // EmbeddedCDXServerIndex will include soft - blocked captures // in the result . wbr . put ( EmbeddedCDXServerIndex . REQUEST_REVISIT_LOOKUP , "true" ) ; CaptureSearchResults payloadCaptureResults = searchCaptures ( wbr ) ; // closest may not be the one pointed by payloadTimestamp ReplayCaptureSelector captureSelector = new DefaultReplayCaptureSelector ( getReplay ( ) ) ; captureSelector . setRequest ( wbr ) ; captureSelector . setCaptures ( payloadCaptureResults ) ; payloadLocation = captureSelector . next ( ) ; // closest will not be the one pointed by payloadTimestamp if revisited // capture is missing ( can happen for many reasons ; not indexed yet , archive // has gone missing , for example ) . // TODO : this is pretty inefficient . should have a method for searching // just one capture at specific timestamp . Perhaps timestampSearchKey // is meant for this purpose , but it ' s not working as expected , apparently . if ( payloadLocation != null ) { String captureTimestamp = payloadLocation . getCaptureTimestamp ( ) ; // not supporting captureTimestamp less than 14 digits . if ( ! captureTimestamp . equals ( payloadTimestamp ) ) payloadLocation = null ; } } // if ( payloadLocation ! = null ) { // return payloadLocation ; // Less common less recommended revisit with specific warc / filename // WarcResource wr = ( WarcResource ) revisitRecord ; // warcHeaders = wr . getWarcHeaders ( ) . getHeaderFields ( ) ; // String payloadWarcFile = ( String ) warcHeaders . get ( " WARC - Refers - To - Filename " ) ; // String offsetStr = ( String ) warcHeaders . get ( " WARC - Refers - To - File - Offset " ) ; // if ( payloadWarcFile ! = null & & offsetStr ! = null ) { // payloadLocation = new CaptureSearchResult ( ) ; // payloadLocation . setFile ( payloadWarcFile ) ; // payloadLocation . setOffset ( Long . parseLong ( offsetStr ) ) ; return payloadLocation ;
public class CDIUtils { /** * public static BeanManager getBeanManagerFromJNDI ( ) * try * in an application server * return ( BeanManager ) InitialContext . doLookup ( " java : comp / BeanManager " ) ; * catch ( NamingException e ) * silently ignore * try * in a servlet container * return ( BeanManager ) InitialContext . doLookup ( " java : comp / env / BeanManager " ) ; * catch ( NamingException e ) * silently ignore * return null ; */ @ SuppressWarnings ( "unchecked" ) public static < T > T lookup ( BeanManager bm , Class < T > clazz ) { } }
Iterator < Bean < ? > > iter = bm . getBeans ( clazz ) . iterator ( ) ; if ( ! iter . hasNext ( ) ) { throw new IllegalStateException ( "CDI BeanManager cannot find an instance of requested type " + clazz . getName ( ) ) ; } Bean < T > bean = ( Bean < T > ) iter . next ( ) ; CreationalContext < T > ctx = bm . createCreationalContext ( bean ) ; T dao = ( T ) bm . getReference ( bean , clazz , ctx ) ; return dao ;
public class ClassDescSupport { /** * インポート名を追加します 。 * @ param classDesc クラス記述 * @ param importedClass インポートされるクラス */ public void addImportName ( ClassDesc classDesc , ClassConstants importedClass ) { } }
String packageName = importedClass . getPackageName ( ) ; if ( isImportTargetPackage ( classDesc , packageName ) ) { classDesc . addImportName ( importedClass . getQualifiedName ( ) ) ; }
public class DumpProcessingController { /** * Processes the most recent dump of the given type using the given dump * processor . * @ see DumpProcessingController # processMostRecentMainDump ( ) * @ see DumpProcessingController # processAllRecentRevisionDumps ( ) * @ param dumpContentType * the type of dump to process * @ param dumpFileProcessor * the processor to use * @ deprecated Use { @ link # getMostRecentDump ( DumpContentType ) } and * { @ link # processDump ( MwDumpFile ) } instead ; method will vanish * in WDTK 0.5 */ @ Deprecated public void processMostRecentDump ( DumpContentType dumpContentType , MwDumpFileProcessor dumpFileProcessor ) { } }
MwDumpFile dumpFile = getMostRecentDump ( dumpContentType ) ; if ( dumpFile != null ) { processDumpFile ( dumpFile , dumpFileProcessor ) ; }
public class RedisClientFactory { /** * Gets or Creates a { @ link IRedisClient } object . * @ param host * @ param port * @ return */ public IRedisClient getRedisClient ( String host , int port ) { } }
return getRedisClient ( host , port , null , null ) ;
public class StoryRunner { /** * Runs a Story with the given configuration and steps , applying the given * meta filter . * @ param configuration the Configuration used to run story * @ param candidateSteps the List of CandidateSteps containing the candidate * steps methods * @ param story the Story to run * @ param filter the Filter to apply to the story Meta * @ throws Throwable if failures occurred and FailureStrategy dictates it to * be re - thrown . */ public void run ( Configuration configuration , List < CandidateSteps > candidateSteps , Story story , MetaFilter filter ) throws Throwable { } }
run ( configuration , candidateSteps , story , filter , null ) ;
public class FastMathCalc { /** * Recompute a split . * @ param a input / out array containing the split , changed * on output */ private static void resplit ( final double a [ ] ) { } }
final double c = a [ 0 ] + a [ 1 ] ; final double d = - ( c - a [ 0 ] - a [ 1 ] ) ; if ( c < 8e298 && c > - 8e298 ) { // MAGIC NUMBER double z = c * HEX_40000000 ; a [ 0 ] = ( c + z ) - z ; a [ 1 ] = c - a [ 0 ] + d ; } else { double z = c * 9.31322574615478515625E-10 ; a [ 0 ] = ( c + z - c ) * HEX_40000000 ; a [ 1 ] = c - a [ 0 ] + d ; }
public class ProjectMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Project project , ProtocolMarshaller protocolMarshaller ) { } }
if ( project == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( project . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( project . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( project . getDescription ( ) , DESCRIPTION_BINDING ) ; protocolMarshaller . marshall ( project . getSource ( ) , SOURCE_BINDING ) ; protocolMarshaller . marshall ( project . getSecondarySources ( ) , SECONDARYSOURCES_BINDING ) ; protocolMarshaller . marshall ( project . getArtifacts ( ) , ARTIFACTS_BINDING ) ; protocolMarshaller . marshall ( project . getSecondaryArtifacts ( ) , SECONDARYARTIFACTS_BINDING ) ; protocolMarshaller . marshall ( project . getCache ( ) , CACHE_BINDING ) ; protocolMarshaller . marshall ( project . getEnvironment ( ) , ENVIRONMENT_BINDING ) ; protocolMarshaller . marshall ( project . getServiceRole ( ) , SERVICEROLE_BINDING ) ; protocolMarshaller . marshall ( project . getTimeoutInMinutes ( ) , TIMEOUTINMINUTES_BINDING ) ; protocolMarshaller . marshall ( project . getQueuedTimeoutInMinutes ( ) , QUEUEDTIMEOUTINMINUTES_BINDING ) ; protocolMarshaller . marshall ( project . getEncryptionKey ( ) , ENCRYPTIONKEY_BINDING ) ; protocolMarshaller . marshall ( project . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( project . getCreated ( ) , CREATED_BINDING ) ; protocolMarshaller . marshall ( project . getLastModified ( ) , LASTMODIFIED_BINDING ) ; protocolMarshaller . marshall ( project . getWebhook ( ) , WEBHOOK_BINDING ) ; protocolMarshaller . marshall ( project . getVpcConfig ( ) , VPCCONFIG_BINDING ) ; protocolMarshaller . marshall ( project . getBadge ( ) , BADGE_BINDING ) ; protocolMarshaller . marshall ( project . getLogsConfig ( ) , LOGSCONFIG_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SourceStreamManager { /** * Process an Ack control message but use the specified ack prefix to decide if the * ack prefix can be advanced . * @ param ackMsg The Ack message * @ param ackPrefix The new ack prefix * @ return List of messages which are now completed and may be deleted * @ throws SIResourceException */ public List processAck ( ControlAck ackMsg , long ackPrefix ) throws SIRollbackException , SIConnectionLostException , SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "processAck" , new Object [ ] { ackMsg , new Long ( ackPrefix ) } ) ; List indexList = null ; // Short circuit if incoming message has wrong stream ID if ( ! hasStream ( ackMsg . getGuaranteedStreamUUID ( ) ) ) { // Bogus stream , ignore the message // Return a null indexList to ensure no further processing is // attempted by the caller if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processAck" , new Object [ ] { "unknown stream ID - returning null array list(message ignored)" } ) ; return indexList ; } int priority = ackMsg . getPriority ( ) . intValue ( ) ; Reliability reliability = ackMsg . getReliability ( ) ; // There is no stream for BestEffort non persistent messages if ( reliability . compareTo ( Reliability . BEST_EFFORT_NONPERSISTENT ) > 0 ) { StreamSet streamSet = getStreamSet ( ) ; SourceStream sourceStream = ( SourceStream ) streamSet . getStream ( priority , reliability ) ; // If there ' s no source stream then priority was bogus so ignore the ack if ( sourceStream == null ) { // Return a null indexList to ensure no further processing is // attempted by the caller if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processAck" , new Object [ ] { "unknown priority - returning null array list(message ignored)" } ) ; return indexList ; } // If this increases the finality prefix then // update it and delete the acked messages from the ItemStream long completedPrefix = sourceStream . getAckPrefix ( ) ; if ( ackPrefix > completedPrefix ) { // Update the completedPrefix and the oack value for the stream // returns a lit of the itemStream ids of the newly Acked messages // which we can then remove from the itemStream indexList = sourceStream . writeAckPrefix ( ackPrefix ) ; } } else { // We didn ' t expect an Ack for a best effort message so write an entry // to the trace log indicating that it happened if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Unexpected Ack message for BEST_EFFORT_NONPERSISTENT message " ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "processAck" , indexList ) ; return indexList ;
public class SCoverageReportMojo { /** * Generates SCoverage report . * @ throws MojoExecutionException if unexpected problem occurs */ @ Override public void execute ( ) throws MojoExecutionException { } }
if ( ! canGenerateReport ( ) ) { getLog ( ) . info ( "Skipping SCoverage report generation" ) ; return ; } try { RenderingContext context = new RenderingContext ( outputDirectory , getOutputName ( ) + ".html" ) ; SiteRendererSink sink = new SiteRendererSink ( context ) ; Locale locale = Locale . getDefault ( ) ; generate ( sink , locale ) ; } catch ( MavenReportException e ) { String prefix = "An error has occurred in " + getName ( Locale . ENGLISH ) + " report generation" ; throw new MojoExecutionException ( prefix + ": " + e . getMessage ( ) , e ) ; }
public class HystrixPlugins { /** * Reset all of the HystrixPlugins to null . You may invoke this directly , or it also gets invoked via < code > Hystrix . reset ( ) < / code > */ public static void reset ( ) { } }
getInstance ( ) . notifier . set ( null ) ; getInstance ( ) . concurrencyStrategy . set ( null ) ; getInstance ( ) . metricsPublisher . set ( null ) ; getInstance ( ) . propertiesFactory . set ( null ) ; getInstance ( ) . commandExecutionHook . set ( null ) ; HystrixMetricsPublisherFactory . reset ( ) ;
public class ApiOvhOrder { /** * Retrieve configuration item * REST : GET / order / cart / { cartId } / item / { itemId } / configuration / { configurationId } * @ param cartId [ required ] Cart identifier * @ param itemId [ required ] Product item identifier * @ param configurationId [ required ] Configuration item identifier */ public OvhConfigurationItem cart_cartId_item_itemId_configuration_configurationId_GET ( String cartId , Long itemId , Long configurationId ) throws IOException { } }
String qPath = "/order/cart/{cartId}/item/{itemId}/configuration/{configurationId}" ; StringBuilder sb = path ( qPath , cartId , itemId , configurationId ) ; String resp = execN ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhConfigurationItem . class ) ;
public class URLEncoder { /** * Translates a string into < code > x - www - form - urlencoded < / code > format . This method uses the * platform ' s default encoding as the encoding scheme to obtain the bytes for unsafe characters . * @ param s < code > String < / code > to be translated . * @ deprecated The resulting string may vary depending on the platform ' s default encoding . Instead , * use the encode ( String , String ) method to specify the encoding . * @ return the translated < code > String < / code > . */ @ Deprecated public static String encode ( String s ) { } }
s = java . net . URLEncoder . encode ( s ) ; if ( s . indexOf ( '+' ) != - 1 ) s = StringUtil . replace ( s , "+" , "%20" , false ) ; return s ;
public class HttpPostMultipartRequestDecoder { /** * This getMethod will parse as much as possible data and fill the list and map * @ throws ErrorDataDecoderException * if there is a problem with the charset decoding or other * errors */ private void parseBody ( ) { } }
if ( currentStatus == MultiPartStatus . PREEPILOGUE || currentStatus == MultiPartStatus . EPILOGUE ) { if ( isLastChunk ) { currentStatus = MultiPartStatus . EPILOGUE ; } return ; } parseBodyMultipart ( ) ;
public class ImageLoader { /** * Checks if the item is available in the cache . * @ param requestUrl The url of the remote image * @ param maxWidth The maximum width of the returned image . * @ param maxHeight The maximum height of the returned image . * @ return True if the item exists in cache , false otherwise . */ public boolean isCached ( String requestUrl , int maxWidth , int maxHeight ) { } }
return isCached ( requestUrl , maxWidth , maxHeight , ScaleType . CENTER_INSIDE ) ;
public class ReportTaskProgressRequest { /** * Key - value pairs that define the properties of the ReportTaskProgressInput object . * @ param fields * Key - value pairs that define the properties of the ReportTaskProgressInput object . */ public void setFields ( java . util . Collection < Field > fields ) { } }
if ( fields == null ) { this . fields = null ; return ; } this . fields = new com . amazonaws . internal . SdkInternalList < Field > ( fields ) ;
public class ExamplesImpl { /** * Returns examples to be reviewed . * @ param appId The application ID . * @ param versionId The version ID . * @ param listOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the List & lt ; LabeledUtterance & gt ; object if successful . */ public List < LabeledUtterance > list ( UUID appId , String versionId , ListExamplesOptionalParameter listOptionalParameter ) { } }
return listWithServiceResponseAsync ( appId , versionId , listOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class ArrayUtils { /** * Counts the number of elements in the array matching the criteria ( rules ) defined by the { @ link Filter } . * @ param < T > Class type of the elements in the array . * @ param array array to search . * @ param filter { @ link Filter } used to match elements in the array and tally the count . * @ return an integer value indicating the number of elements in the array matching the criteria ( rules ) * defined by the { @ link Filter } . * @ throws IllegalArgumentException if { @ link Filter } is null . * @ see org . cp . elements . lang . Filter */ public static < T > long count ( T [ ] array , Filter < T > filter ) { } }
Assert . notNull ( filter , "Filter is required" ) ; return stream ( nullSafeArray ( array ) ) . filter ( filter :: accept ) . count ( ) ;
public class HandlerContainer { /** * Dispatch message received from the remote to proper event handler . * @ param value Remote message , encoded as byte [ ] . */ @ Override @ SuppressWarnings ( "checkstyle:diamondoperatorforvariabledefinition" ) public synchronized void onNext ( final RemoteEvent < byte [ ] > value ) { } }
LOG . log ( Level . FINER , "RemoteManager: {0} value: {1}" , new Object [ ] { this . name , value } ) ; final T decodedEvent = this . codec . decode ( value . getEvent ( ) ) ; final Class < ? extends T > clazz = ( Class < ? extends T > ) decodedEvent . getClass ( ) ; LOG . log ( Level . FINEST , "RemoteManager: {0} decoded event {1} :: {2}" , new Object [ ] { this . name , clazz . getCanonicalName ( ) , decodedEvent } ) ; // check remote identifier and message type final SocketRemoteIdentifier id = new SocketRemoteIdentifier ( ( InetSocketAddress ) value . remoteAddress ( ) ) ; final Tuple2 < RemoteIdentifier , Class < ? extends T > > tuple = new Tuple2 < RemoteIdentifier , Class < ? extends T > > ( id , clazz ) ; final EventHandler < ? super T > tupleHandler = this . tupleToHandlerMap . get ( tuple ) ; if ( tupleHandler != null ) { LOG . log ( Level . FINER , "Tuple handler: {0},{1}" , new Object [ ] { tuple . getT1 ( ) , tuple . getT2 ( ) . getCanonicalName ( ) } ) ; tupleHandler . onNext ( decodedEvent ) ; } else { final EventHandler < RemoteMessage < ? extends T > > messageHandler = this . msgTypeToHandlerMap . get ( clazz ) ; if ( messageHandler == null ) { final RuntimeException ex = new RemoteRuntimeException ( "Unknown message type in dispatch: " + clazz . getCanonicalName ( ) + " from " + id ) ; LOG . log ( Level . WARNING , "Unknown message type in dispatch." , ex ) ; throw ex ; } LOG . log ( Level . FINER , "Message handler: {0}" , clazz . getCanonicalName ( ) ) ; messageHandler . onNext ( new DefaultRemoteMessage < > ( id , decodedEvent ) ) ; }
public class OutputElementBase { /** * Method called to reuse a pooled instance . */ protected void relink ( OutputElementBase parent ) { } }
mNsMapping = parent . mNsMapping ; mNsMapShared = ( mNsMapping != null ) ; mDefaultNsURI = parent . mDefaultNsURI ; mRootNsContext = parent . mRootNsContext ;
public class AttributeList { /** * print detail */ public void print ( ) { } }
System . out . println ( "========================= attrs =========================" ) ; for ( Map . Entry < String , Attribute > entry : this . attributes . entrySet ( ) ) { Attribute attr = entry . getValue ( ) ; System . out . println ( entry . getKey ( ) + ": " + attr . getName ( ) + ": " + entry . getValue ( ) ) ; }
public class GroupHandlerImpl { /** * { @ inheritDoc } */ public Collection < Group > findGroups ( Group parent ) throws Exception { } }
Session session = service . getStorageSession ( ) ; try { return findGroups ( session , parent , false ) ; } finally { session . logout ( ) ; }
public class PerformanceLogger { /** * Log line of measured performance of single operation specifying by performance metrics parameter . * @ param marker log marker * @ param metrics PerformanceMetrics a result of measurement */ public static void log ( Marker marker , PerformanceMetrics metrics ) { } }
if ( log . isDebugEnabled ( ) ) { log . debug ( marker , getEndMetrics ( metrics ) ) ; }
public class HttpUtil { /** * Some calls in the Heroku API decode strings in a different way from URLEncoder . This is a method for handling those * special cases . First , urlencode ( ) is called . Then , . - * _ are replaced with their hexadecimal equivalent . * @ param toEncode string to encode * @ return A string representation of encoded parameters . */ public static String encodeIncludingSpecialCharacters ( String toEncode ) { } }
String encoded = urlencode ( toEncode , "Unable to urlencode " + toEncode ) ; for ( Map . Entry < String , String > s : specialChars . entrySet ( ) ) { encoded = encoded . replace ( s . getKey ( ) , s . getValue ( ) ) ; } return encoded ;
public class BucketConfig { /** * Returns an abbreviation for the Bucket ' s TimeUnit . */ public String getTimeUnitAbbreviation ( ) { } }
switch ( timeUnit ) { case DAYS : return "day" ; case HOURS : return "hr" ; case MICROSECONDS : return "\u00B5s" ; case MILLISECONDS : return "ms" ; case MINUTES : return "min" ; case NANOSECONDS : return "ns" ; case SECONDS : return "s" ; default : return "unkwn" ; }
public class JsonParser { /** * 获取String * @ param key 例如 : country . province [ 13 ] . name * @ return { @ link String } */ public String getStringUseEval ( String key ) { } }
Object object = eval ( key ) ; return Checker . isNull ( object ) ? null : object . toString ( ) ;
public class CassandraUtils { /** * Get a list of the existing keyspaces in Cassandra . * @ return { @ code List < String > } */ public List < String > getKeyspaces ( ) { } }
ArrayList < String > result = new ArrayList < String > ( ) ; this . metadata = this . cluster . getMetadata ( ) ; if ( ! ( metadata . getKeyspaces ( ) . isEmpty ( ) ) ) { for ( KeyspaceMetadata k : this . metadata . getKeyspaces ( ) ) { result . add ( k . getName ( ) ) ; } } return result ;
public class CmsListCsvExportDialog { /** * Generates the CSV file for the given list . < p > * @ return CSV file * @ throws ClassNotFoundException if the list dialog class is not found */ public String generateCsv ( ) throws ClassNotFoundException { } }
CmsHtmlList list = A_CmsListDialog . getListObject ( Class . forName ( getParamListclass ( ) ) , getSettings ( ) ) ; return list . listCsv ( ) ;
public class JCudaDriver { /** * Query attributes of a given memory range . < br > * < br > * Query attributes of the memory range starting at devPtr with a size of * count bytes . The memory range must refer to managed memory allocated via * cuMemAllocManaged or declared via _ _ managed _ _ variables . The attributes * array will be interpreted to have numAttributes entries . The dataSizes * array will also be interpreted to have numAttributes entries . The results * of the query will be stored in data . < br > * < br > * < br > * The list of supported attributes are given below . Please refer to * { @ link JCudaDriver # cuMemRangeGetAttribute } for attribute descriptions and * restrictions . * < ul > * < li > CU _ MEM _ RANGE _ ATTRIBUTE _ READ _ MOSTLY < / li > * < li > CU _ MEM _ RANGE _ ATTRIBUTE _ PREFERRED _ LOCATION < / li > * < li > CU _ MEM _ RANGE _ ATTRIBUTE _ ACCESSED _ BY < / li > * < li > CU _ MEM _ RANGE _ ATTRIBUTE _ LAST _ PREFETCH _ LOCATION < / li > * < / ul > * @ param data A two - dimensional array containing pointers to memory * locations where the result of each attribute query will be written * to . * @ param dataSizes Array containing the sizes of each result * @ param attributes An array of attributes to query ( numAttributes and the * number of attributes in this array should match ) * @ param numAttributes Number of attributes to query * @ param devPtr Start of the range to query * @ param count Size of the range to query * @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , * CUDA _ ERROR _ INVALID _ CONTEXT , CUDA _ ERROR _ INVALID _ VALUE , * CUDA _ ERROR _ INVALID _ DEVICE */ public static int cuMemRangeGetAttributes ( Pointer data [ ] , long dataSizes [ ] , int attributes [ ] , long numAttributes , CUdeviceptr devPtr , long count ) { } }
return checkResult ( cuMemRangeGetAttributesNative ( data , dataSizes , attributes , numAttributes , devPtr , count ) ) ;
public class AtomContainer { /** * { @ inheritDoc } */ @ Override public void addSingleElectron ( ISingleElectron singleElectron ) { } }
ensureElectronCapacity ( singleElectronCount + 1 ) ; singleElectrons [ singleElectronCount ++ ] = singleElectron ; notifyChanged ( ) ;
public class BufferedISPNCache { /** * { @ inheritDoc } */ public void putAll ( Map < ? extends CacheKey , ? extends Object > map , long lifespan , TimeUnit unit ) { } }
parentCache . putAll ( map , lifespan , unit ) ;
public class ValidationUtilities { /** * Determine if the given value is valid as defined by the specified * { @ link AnnotationDefinition } . A valid value is one that is valid for the * specified domain of the { @ link AnnotationDefinition } . * @ param annoDef * a < code > non - null < / code > { @ link AnnotationDefinition } . Although * the { @ link AnnotationType } of the annoDef may be null , this * method will always evaluate to < code > false < / code > in such a * case . * @ param value * the value to validate , possibly < code > null < / code > * @ return whether the given value is valid as defined by the given * { @ link AnnotationDefinition } * @ throws PatternSyntaxException * if the { @ link AnnotationDefinition } used by the * { @ link Annotation } is of type * { @ value AnnotationType # REGULAR _ EXPRESSION } and the specified * pattern is invalid . * @ throws InvalidArgument * Thrown if the < tt > annoDef < / tt > argument isnull */ public static boolean isValid ( AnnotationDefinition annoDef , String value ) throws PatternSyntaxException { } }
if ( annoDef == null ) { throw new InvalidArgument ( "annoDef" , annoDef ) ; } if ( annoDef . getType ( ) == null ) { return false ; } switch ( annoDef . getType ( ) ) { case ENUMERATION : return validateEnumeration ( annoDef . getEnums ( ) , value ) ; case REGULAR_EXPRESSION : return validateRegExp ( annoDef . getValue ( ) , value ) ; default : return false ; }
public class LoggingHelper { /** * Enable logging using String . format internally only if debug level is * enabled . * @ param logger * the logger that will be used to log the message * @ param format * the format string ( the template string ) * @ param throwable * a throwable object that holds the throwable information * @ param converter * the converter used to convert the param arguments in case the * trace level is enabled * @ param params * the parameters to be formatted into it the string format */ public static void trace ( final Logger logger , final String format , final Throwable throwable , final AbstractLoggingHelperConverter converter , final Object ... params ) { } }
if ( logger . isTraceEnabled ( ) ) { Object [ ] formatParams = params ; if ( converter != null ) { formatParams = converter . convert ( params ) ; } final String message = String . format ( format , formatParams ) ; logger . trace ( message , throwable ) ; }
public class SingleLinePatternLayout { /** * Method to prevent log forging . * @ param logMsg * @ return Encoded message */ private String preventLogForging ( String logMsg ) { } }
String result = logMsg ; // use precompiled pattern for performance reasons result = LINEBREAK_PATTERN . matcher ( logMsg ) . replaceAll ( SingleLinePatternLayout . LINE_SEP ) ; return result ;
public class ConfigSettings { /** * ( non - Javadoc ) * @ see * nyla . solutions . core . util . Settings # setProperties ( java . util . Properties ) */ @ Override public synchronized void setProperties ( Map < Object , Object > properties ) { } }
if ( this . properties == null ) this . properties = new Properties ( ) ; this . properties . putAll ( properties ) ;
public class Postconditions { /** * < p > Evaluate all of the given { @ code conditions } using { @ code value } as * input . < / p > * < p > All of the conditions are evaluated and the function throws { @ link * PostconditionViolationException } if any of the conditions are false , or * raise an exception that is not of type { @ link Error } . Exceptions of type * { @ link Error } are propagated immediately , without any further contract * checking . < / p > * @ param value The value * @ param conditions The set of conditions * @ param < T > The type of values * @ return value * @ throws PostconditionViolationException If any of the conditions are false */ @ SafeVarargs public static < T > T checkPostconditions ( final T value , final ContractConditionType < T > ... conditions ) throws PostconditionViolationException { } }
final Violations violations = innerCheckAll ( value , conditions ) ; if ( violations != null ) { throw failed ( null , value , violations ) ; } return value ;
public class TextFormat { /** * Like { @ code print ( ) } , but writes directly to a { @ code String } and * returns it . */ public static String printToString ( final MessageOrBuilder message ) { } }
try { final StringBuilder text = new StringBuilder ( ) ; print ( message , text ) ; return text . toString ( ) ; } catch ( IOException e ) { throw new IllegalStateException ( e ) ; }
public class StringMan { /** * Returns true if the string is ' TRUE ' or ' YES ' or ' 1 ' , case insensitive . * False for null , empty , etc . */ public static boolean isStringTrue ( String in ) { } }
if ( in == null ) return false ; return in . equalsIgnoreCase ( "TRUE" ) || in . equalsIgnoreCase ( "YES" ) || in . equals ( "1" ) ;
public class RDFReader { /** * create graph doc * return ContentPermission [ ] for the graph */ public ContentPermission [ ] insertGraphDoc ( String graph ) throws IOException { } }
ArrayList < ContentPermission > perms = new ArrayList < ContentPermission > ( ) ; ContentPermission [ ] permissions = defaultPerms ; StringBuilder sb = graphQry ; if ( countPerBatch >= MAXGRAPHSPERREQUEST ) { countPerBatch = 0 ; submitGraphQuery ( ) ; graphQry . setLength ( 0 ) ; } String escapedGraph = escapeXml ( graph ) ; sb . append ( "if(fn:empty(fn:doc(\"" ) . append ( escapedGraph ) . append ( "\"))) then sem:create-graph-document(sem:iri(\"" ) . append ( escapedGraph ) . append ( "\"),(" ) ; if ( permissions != null && permissions . length > 0 ) { for ( int i = 0 ; i < permissions . length ; i ++ ) { ContentPermission cp = permissions [ i ] ; if ( i > 0 ) sb . append ( "," ) ; sb . append ( "xdmp:permission(\"" ) ; sb . append ( cp . getRole ( ) ) ; sb . append ( "\",\"" ) ; sb . append ( cp . getCapability ( ) ) ; sb . append ( "\")" ) ; } sb . append ( ")" ) ; } else { sb . append ( "xdmp:default-permissions())" ) ; } sb . append ( ") else ();\n" ) ; countPerBatch ++ ; return perms . toArray ( new ContentPermission [ 0 ] ) ;
public class AbstractQueryProtocol { /** * Specific execution for batch rewrite that has specific query for memory . * @ param results result * @ param prepareResult prepareResult * @ param parameterList parameters * @ param rewriteValues is rewritable flag * @ throws SQLException exception */ private void executeBatchRewrite ( Results results , final ClientPrepareResult prepareResult , List < ParameterHolder [ ] > parameterList , boolean rewriteValues ) throws SQLException { } }
cmdPrologue ( ) ; ParameterHolder [ ] parameters ; int currentIndex = 0 ; int totalParameterList = parameterList . size ( ) ; try { do { currentIndex = ComQuery . sendRewriteCmd ( writer , prepareResult . getQueryParts ( ) , currentIndex , prepareResult . getParamCount ( ) , parameterList , rewriteValues ) ; getResult ( results ) ; if ( Thread . currentThread ( ) . isInterrupted ( ) ) { throw new SQLException ( "Interrupted during batch" , INTERRUPTED_EXCEPTION . getSqlState ( ) , - 1 ) ; } } while ( currentIndex < totalParameterList ) ; } catch ( SQLException sqlEx ) { throw logQuery . exceptionWithQuery ( sqlEx , prepareResult ) ; } catch ( IOException e ) { throw handleIoException ( e ) ; } finally { results . setRewritten ( rewriteValues ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcSoundScaleEnum ( ) { } }
if ( ifcSoundScaleEnumEEnum == null ) { ifcSoundScaleEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 899 ) ; } return ifcSoundScaleEnumEEnum ;
public class SignedJarBuilder { /** * Adds an entry to the output jar , and write its content from the { @ link InputStream } * @ param input The input stream from where to write the entry content . * @ param entry the entry to write in the jar . * @ throws IOException */ private void writeEntry ( InputStream input , JarEntry entry ) throws IOException { } }
// add the entry to the jar archive mOutputJar . putNextEntry ( entry ) ; // read the content of the entry from the input stream , and write it into the archive . int count ; while ( ( count = input . read ( mBuffer ) ) != - 1 ) { mOutputJar . write ( mBuffer , 0 , count ) ; // update the digest if ( mMessageDigest != null ) { mMessageDigest . update ( mBuffer , 0 , count ) ; } } // close the entry for this file mOutputJar . closeEntry ( ) ; if ( mManifest != null ) { // update the manifest for this entry . Attributes attr = mManifest . getAttributes ( entry . getName ( ) ) ; if ( attr == null ) { attr = new Attributes ( ) ; mManifest . getEntries ( ) . put ( entry . getName ( ) , attr ) ; } attr . putValue ( DIGEST_ATTR , mBase64Encoder . encodeToString ( mMessageDigest . digest ( ) ) ) ; }
public class Template { /** * Merge this template . * @ param vars * @ param out * @ return Context * @ throws ScriptRuntimeException * @ throws ParseException */ public Context merge ( final Vars vars , final OutputStream out ) { } }
return merge ( vars , new OutputStreamOut ( out , engine ) ) ;
public class TrainingsImpl { /** * Get the list of exports for a specific iteration . * @ param projectId The project id * @ param iterationId The iteration id * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; Export & gt ; object */ public Observable < List < Export > > getExportsAsync ( UUID projectId , UUID iterationId ) { } }
return getExportsWithServiceResponseAsync ( projectId , iterationId ) . map ( new Func1 < ServiceResponse < List < Export > > , List < Export > > ( ) { @ Override public List < Export > call ( ServiceResponse < List < Export > > response ) { return response . body ( ) ; } } ) ;
public class TaxServiceDeserializer { /** * { @ inheritDoc } } */ @ SuppressWarnings ( "deprecation" ) @ Override public TaxService deserialize ( JsonParser jp , DeserializationContext desContext ) throws IOException { } }
ObjectMapper mapper = new ObjectMapper ( ) ; TaxService taxService = new TaxService ( ) ; // Make the mapper JAXB annotations aware AnnotationIntrospector primary = new JaxbAnnotationIntrospector ( ) ; AnnotationIntrospector secondary = new JacksonAnnotationIntrospector ( ) ; AnnotationIntrospector pair = new AnnotationIntrospectorPair ( primary , secondary ) ; mapper . setAnnotationIntrospector ( pair ) ; mapper . configure ( DeserializationFeature . FAIL_ON_UNKNOWN_PROPERTIES , false ) ; // mapper . setPropertyNamingStrategy ( PascalCaseStrategy ) ; // Read the QueryResponse as a tree JsonNode jn = jp . readValueAsTree ( ) ; // Iterate over the field names Iterator < String > ite = jn . fieldNames ( ) ; // Create the QueryResponse to be returned IntuitResponse qr = new IntuitResponse ( ) ; // List to store taxrateDetails List < TaxRateDetails > taxRateDetailsList = new ArrayList < TaxRateDetails > ( ) ; while ( ite . hasNext ( ) ) { String key = ite . next ( ) ; // Attributes if ( key . equalsIgnoreCase ( FAULT ) ) { qr . setFault ( mapper . treeToValue ( jn . get ( FAULT ) , Fault . class ) ) ; taxService . setFault ( mapper . treeToValue ( jn . get ( FAULT ) , Fault . class ) ) ; continue ; } else if ( key . equalsIgnoreCase ( "TaxCode" ) ) { taxService . setTaxCode ( mapper . treeToValue ( jn . get ( key ) , String . class ) ) ; } else if ( key . equalsIgnoreCase ( "TaxCodeId" ) ) { taxService . setTaxCodeId ( mapper . treeToValue ( jn . get ( key ) , String . class ) ) ; } else if ( key . equalsIgnoreCase ( "TaxRateDetails" ) ) { // add a loop to read all tax rate details JsonNode jn1 = jn . get ( key ) ; if ( jn1 . isArray ( ) ) { Iterator < JsonNode > iteJson = jn1 . iterator ( ) ; while ( iteJson . hasNext ( ) ) { JsonNode jn2 = iteJson . next ( ) ; TaxRateDetails taxRateDetails = getTaxRateDetails ( jn2 ) ; // mapper . readValue ( jn2 . get ( key ) , TaxRateDetails . class ) ; taxRateDetailsList . add ( taxRateDetails ) ; } } taxService . setTaxRateDetails ( taxRateDetailsList ) ; } } return taxService ;
public class LogicFile { /** * Set up the key areas . */ public void setupKeys ( ) { } }
KeyAreaInfo keyArea = null ; keyArea = new KeyAreaInfo ( this , Constants . UNIQUE , ID_KEY ) ; keyArea . addKeyField ( ID , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . UNIQUE , METHOD_CLASS_NAME_KEY ) ; keyArea . addKeyField ( METHOD_CLASS_NAME , Constants . ASCENDING ) ; keyArea . addKeyField ( METHOD_NAME , Constants . ASCENDING ) ; keyArea = new KeyAreaInfo ( this , Constants . NOT_UNIQUE , SEQUENCE_KEY ) ; keyArea . addKeyField ( METHOD_CLASS_NAME , Constants . ASCENDING ) ; keyArea . addKeyField ( SEQUENCE , Constants . ASCENDING ) ; keyArea . addKeyField ( METHOD_NAME , Constants . ASCENDING ) ;
public class ModulePlaybackImpl { /** * { @ inheritDoc } */ public SetTimeSeekVodOperation buildSetTimeSeekVodOperation ( int hours , int minutes , int seconds ) { } }
return new SetTimeSeekVodOperation ( getOperationFactory ( ) , hours , minutes , seconds ) ;
public class EtherNetIpShared { /** * Release / shutdown / cleanup any shared resources that were created . */ public static synchronized void releaseSharedResources ( ) { } }
if ( SHARED_EVENT_LOOP != null ) { SHARED_EVENT_LOOP . shutdownGracefully ( ) ; SHARED_EVENT_LOOP = null ; } if ( SHARED_WHEEL_TIMER != null ) { SHARED_WHEEL_TIMER . stop ( ) ; SHARED_WHEEL_TIMER = null ; } if ( SHARED_EXECUTOR != null ) { SHARED_EXECUTOR . shutdown ( ) ; SHARED_EXECUTOR = null ; } if ( SHARED_SCHEDULED_EXECUTOR != null ) { SHARED_SCHEDULED_EXECUTOR . shutdown ( ) ; SHARED_SCHEDULED_EXECUTOR = null ; }
public class CoreRepositorySetupService { /** * nodes */ protected Node makeNodeAvailable ( @ Nonnull final Session session , @ Nonnull final String path , @ Nonnull final String primaryType ) throws RepositoryException { } }
Node node ; try { node = session . getNode ( StringUtils . isNotBlank ( path ) ? path : "/" ) ; } catch ( PathNotFoundException nf ) { LOG . info ( "createNode({},{})" , path , primaryType ) ; Node parent = makeNodeAvailable ( session , StringUtils . substringBeforeLast ( path , "/" ) , primaryType ) ; node = parent . addNode ( StringUtils . substringAfterLast ( path , "/" ) , primaryType ) ; } catch ( RepositoryException e ) { LOG . error ( "Error in makeNodeAvailable({},{}) : {}" , new Object [ ] { path , primaryType , e . toString ( ) } ) ; throw e ; } return node ;
public class JavaLexer { /** * $ ANTLR start " T _ _ 114" */ public final void mT__114 ( ) throws RecognitionException { } }
try { int _type = T__114 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 95:8 : ( ' transient ' ) // src / main / resources / org / drools / compiler / semantics / java / parser / Java . g : 95:10 : ' transient ' { match ( "transient" ) ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class DM { /** * This method does training on previously unseen paragraph , and returns inferred vector * @ param sequence * @ param nr * @ param learningRate * @ return */ @ Override public INDArray inferSequence ( Sequence < T > sequence , long nr , double learningRate , double minLearningRate , int iterations ) { } }
AtomicLong nextRandom = new AtomicLong ( nr ) ; // we probably don ' t want subsampling here // Sequence < T > seq = cbow . applySubsampling ( sequence , nextRandom ) ; // if ( sequence . getSequenceLabel ( ) = = null ) throw new IllegalStateException ( " Label is NULL " ) ; if ( sequence . isEmpty ( ) ) return null ; Random random = Nd4j . getRandomFactory ( ) . getNewRandomInstance ( configuration . getSeed ( ) * sequence . hashCode ( ) , lookupTable . layerSize ( ) + 1 ) ; INDArray ret = Nd4j . rand ( new int [ ] { 1 , lookupTable . layerSize ( ) } , random ) . subi ( 0.5 ) . divi ( lookupTable . layerSize ( ) ) ; log . info ( "Inf before: {}" , ret ) ; for ( int iter = 0 ; iter < iterations ; iter ++ ) { for ( int i = 0 ; i < sequence . size ( ) ; i ++ ) { nextRandom . set ( Math . abs ( nextRandom . get ( ) * 25214903917L + 11 ) ) ; dm ( i , sequence , ( int ) nextRandom . get ( ) % window , nextRandom , learningRate , null , true , ret , null ) ; } learningRate = ( ( learningRate - minLearningRate ) / ( iterations - iter ) ) + minLearningRate ; } finish ( ) ; return ret ;
public class ActionValidator { /** * similar logic is on action response reflector */ public static boolean cannotBeValidatable ( Object value ) { } }
// called by e . g . ResponseBeanValidator return value instanceof String // yes - yes - yes || value instanceof Number // e . g . Integer || DfTypeUtil . isAnyLocalDate ( value ) // e . g . LocalDate || value instanceof Boolean // of course || value instanceof Classification // e . g . CDef || value . getClass ( ) . isPrimitive ( ) // probably no way , just in case ;
public class JSPExtensionFactory { /** * Inject an < code > WrapperExpressionFactory < / code > service instance . * @ param expressionFactoryService * an expressionFactory service to wrap the default ExpressionFactory */ @ Reference ( cardinality = ReferenceCardinality . OPTIONAL , policyOption = ReferencePolicyOption . GREEDY ) protected void setExpressionFactoryService ( ServiceReference < ELFactoryWrapperForCDI > expressionFactoryService ) { } }
this . expressionFactoryService . setReference ( expressionFactoryService ) ;
public class KeyTranslatorImpl { /** * Adds a mapping from a key release to an action command string . Overwrites any existing * mapping that may already have been registered . */ public void addReleaseCommand ( int keyCode , String command ) { } }
KeyRecord krec = getKeyRecord ( keyCode ) ; krec . releaseCommand = command ;
public class XPathContext { /** * Get the ErrorListener where errors and warnings are to be reported . * @ return A non - null ErrorListener reference . */ public final ErrorListener getErrorListener ( ) { } }
if ( null != m_errorListener ) return m_errorListener ; ErrorListener retval = null ; try { if ( null != m_ownerGetErrorListener ) retval = ( ErrorListener ) m_ownerGetErrorListener . invoke ( m_owner , new Object [ ] { } ) ; } catch ( Exception e ) { } if ( null == retval ) { if ( null == m_defaultErrorListener ) m_defaultErrorListener = new org . apache . xml . utils . DefaultErrorHandler ( ) ; retval = m_defaultErrorListener ; } return retval ;
public class WSubMenuRenderer { /** * Paints the given WSubMenu . * @ param component the WSubMenu to paint . * @ param renderContext the RenderContext to paint to . */ @ Override public void doRender ( final WComponent component , final WebXmlRenderContext renderContext ) { } }
WSubMenu menu = ( WSubMenu ) component ; XmlStringBuilder xml = renderContext . getWriter ( ) ; xml . appendTagOpen ( "ui:submenu" ) ; xml . appendAttribute ( "id" , component . getId ( ) ) ; xml . appendOptionalAttribute ( "class" , component . getHtmlClass ( ) ) ; xml . appendOptionalAttribute ( "track" , component . isTracking ( ) , "true" ) ; if ( isTree ( menu ) ) { xml . appendAttribute ( "open" , String . valueOf ( isOpen ( menu ) ) ) ; } xml . appendOptionalAttribute ( "disabled" , menu . isDisabled ( ) , "true" ) ; xml . appendOptionalAttribute ( "hidden" , menu . isHidden ( ) , "true" ) ; if ( menu . isTopLevelMenu ( ) ) { xml . appendOptionalAttribute ( "accessKey" , menu . getAccessKeyAsString ( ) ) ; } else { xml . appendAttribute ( "nested" , "true" ) ; } xml . appendOptionalAttribute ( "type" , getMenuType ( menu ) ) ; switch ( menu . getMode ( ) ) { case CLIENT : xml . appendAttribute ( "mode" , "client" ) ; break ; case LAZY : xml . appendAttribute ( "mode" , "lazy" ) ; break ; case EAGER : xml . appendAttribute ( "mode" , "eager" ) ; break ; case DYNAMIC : case SERVER : // mode server mapped to mode dynamic as per https : / / github . com / BorderTech / wcomponents / issues / 687 xml . appendAttribute ( "mode" , "dynamic" ) ; break ; default : throw new SystemException ( "Unknown menu mode: " + menu . getMode ( ) ) ; } xml . appendClose ( ) ; // Paint label menu . getDecoratedLabel ( ) . paint ( renderContext ) ; MenuMode mode = menu . getMode ( ) ; // Paint submenu items xml . appendTagOpen ( "ui:content" ) ; xml . appendAttribute ( "id" , component . getId ( ) + "-content" ) ; xml . appendClose ( ) ; // Render content if not EAGER Mode or is EAGER and is the current AJAX request if ( mode != MenuMode . EAGER || AjaxHelper . isCurrentAjaxTrigger ( menu ) ) { // Visibility of content set in prepare paint menu . paintMenuItems ( renderContext ) ; } xml . appendEndTag ( "ui:content" ) ; xml . appendEndTag ( "ui:submenu" ) ;
public class VTimeZone { /** * Writes the closing section of zone properties */ private static void endZoneProps ( Writer writer , boolean isDst ) throws IOException { } }
// END : STANDARD or END : DAYLIGHT writer . write ( ICAL_END ) ; writer . write ( COLON ) ; if ( isDst ) { writer . write ( ICAL_DAYLIGHT ) ; } else { writer . write ( ICAL_STANDARD ) ; } writer . write ( NEWLINE ) ;
public class TiffDocument { /** * Adds the tag . * @ param tagName the tag name * @ param tagValue the tag value * @ return true , if successful */ public boolean addTag ( String tagName , String tagValue ) { } }
boolean result = false ; if ( firstIFD != null ) { if ( firstIFD . containsTagId ( TiffTags . getTagId ( tagName ) ) ) { firstIFD . removeTag ( tagName ) ; } firstIFD . addTag ( tagName , tagValue ) ; createMetadataDictionary ( ) ; } return result ;
public class DefaultGroovyMethods { /** * Sorts all Iterable members into groups determined by the supplied mapping closure . * The closure should return the key that this item should be grouped by . The returned * LinkedHashMap will have an entry for each distinct key returned from the closure , * with each value being a list of items for that group . * Example usage : * < pre class = " groovyTestCase " > * assert [ 0 : [ 2,4,6 ] , 1 : [ 1,3,5 ] ] = = [ 1,2,3,4,5,6 ] . groupBy { it % 2 } * < / pre > * @ param self a collection to group * @ param closure a closure mapping entries on keys * @ return a new Map grouped by keys * @ since 2.2.0 */ public static < K , T > Map < K , List < T > > groupBy ( Iterable < T > self , @ ClosureParams ( FirstParam . FirstGenericType . class ) Closure < K > closure ) { } }
Map < K , List < T > > answer = new LinkedHashMap < K , List < T > > ( ) ; for ( T element : self ) { K value = closure . call ( element ) ; groupAnswer ( answer , element , value ) ; } return answer ;
public class SameSizeKMeansAlgorithm { /** * Compute the distances of each object to all means . Update * { @ link Meta # secondary } to point to the best cluster number except the * current cluster assignment * @ param relation Data relation * @ param means Means * @ param metas Metadata storage * @ param df Distance function */ protected void updateDistances ( Relation < V > relation , double [ ] [ ] means , final WritableDataStore < Meta > metas , NumberVectorDistanceFunction < ? super V > df ) { } }
for ( DBIDIter id = relation . iterDBIDs ( ) ; id . valid ( ) ; id . advance ( ) ) { Meta c = metas . get ( id ) ; V fv = relation . get ( id ) ; // Update distances to means . c . secondary = - 1 ; for ( int i = 0 ; i < k ; i ++ ) { c . dists [ i ] = df . distance ( fv , DoubleVector . wrap ( means [ i ] ) ) ; if ( c . primary != i ) { if ( c . secondary < 0 || c . dists [ i ] < c . dists [ c . secondary ] ) { c . secondary = i ; } } } metas . put ( id , c ) ; // Changed . }
public class FrameMetadata { /** * count of rows with nas */ public long rowsWithNa ( ) { } }
if ( _rowsWithNa != - 1 ) return _rowsWithNa ; String x = String . format ( "(na.omit %s)" , _fr . _key ) ; Val res = Rapids . exec ( x ) ; Frame f = res . getFrame ( ) ; long cnt = _fr . numRows ( ) - f . numRows ( ) ; f . delete ( ) ; return ( _rowsWithNa = cnt ) ;
public class BlobServicesInner { /** * Sets the properties of a storage account ’ s Blob service , including properties for Storage Analytics and CORS ( Cross - Origin Resource Sharing ) rules . * @ param resourceGroupName The name of the resource group within the user ' s subscription . The name is case insensitive . * @ param accountName The name of the storage account within the specified resource group . Storage account names must be between 3 and 24 characters in length and use numbers and lower - case letters only . * @ param parameters The properties of a storage account ’ s Blob service , including properties for Storage Analytics and CORS ( Cross - Origin Resource Sharing ) rules . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the BlobServicePropertiesInner object */ public Observable < ServiceResponse < BlobServicePropertiesInner > > setServicePropertiesWithServiceResponseAsync ( String resourceGroupName , String accountName , BlobServicePropertiesInner parameters ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( accountName == null ) { throw new IllegalArgumentException ( "Parameter accountName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } if ( parameters == null ) { throw new IllegalArgumentException ( "Parameter parameters is required and cannot be null." ) ; } Validator . validate ( parameters ) ; final String blobServicesName = "default" ; return service . setServiceProperties ( resourceGroupName , accountName , this . client . subscriptionId ( ) , blobServicesName , this . client . apiVersion ( ) , parameters , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < BlobServicePropertiesInner > > > ( ) { @ Override public Observable < ServiceResponse < BlobServicePropertiesInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < BlobServicePropertiesInner > clientResponse = setServicePropertiesDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class HttpRedirectBindingUtil { /** * Validates a signature in the specified { @ link AggregatedHttpMessage } . */ private static void validateSignature ( Credential validationCredential , SamlParameters parameters , String messageParamName ) { } }
requireNonNull ( validationCredential , "validationCredential" ) ; requireNonNull ( parameters , "parameters" ) ; requireNonNull ( messageParamName , "messageParamName" ) ; final String signature = parameters . getFirstValue ( SIGNATURE ) ; final String sigAlg = parameters . getFirstValue ( SIGNATURE_ALGORITHM ) ; // The order is one of the followings : // - SAMLRequest = { value } & RelayState = { value } = SigAlg = { value } // - SAMLResponse = { value } & RelayState = { value } = SigAlg = { value } final QueryStringEncoder encoder = new QueryStringEncoder ( "" ) ; encoder . addParam ( messageParamName , parameters . getFirstValue ( messageParamName ) ) ; final String relayState = parameters . getFirstValueOrNull ( RELAY_STATE ) ; if ( relayState != null ) { encoder . addParam ( RELAY_STATE , relayState ) ; } encoder . addParam ( SIGNATURE_ALGORITHM , sigAlg ) ; final byte [ ] input = encoder . toString ( ) . substring ( 1 ) . getBytes ( StandardCharsets . UTF_8 ) ; try { final byte [ ] decodedSignature = Base64 . getMimeDecoder ( ) . decode ( signature ) ; if ( ! XMLSigningUtil . verifyWithURI ( validationCredential , sigAlg , decodedSignature , input ) ) { throw new SamlException ( "failed to validate a signature" ) ; } } catch ( IllegalArgumentException e ) { throw new SamlException ( "failed to decode a base64 signature string" , e ) ; } catch ( SecurityException e ) { throw new SamlException ( "failed to validate a signature" , e ) ; }
public class CmsParameterConfiguration { /** * Returns the integer associated with the given parameter , * or the default value in case there is no integer value for this parameter . < p > * @ param key the parameter to look up the value for * @ param defaultValue the default value * @ return the integer associated with the given parameter , * or the default value in case there is no integer value for this parameter */ public int getInteger ( String key , int defaultValue ) { } }
Object value = m_configurationObjects . get ( key ) ; if ( value instanceof Integer ) { return ( ( Integer ) value ) . intValue ( ) ; } else if ( value instanceof String ) { Integer i = new Integer ( ( String ) value ) ; m_configurationObjects . put ( key , i ) ; return i . intValue ( ) ; } else { return defaultValue ; }
public class CuratorUtils { /** * Create znode if it does not already exist . If it does already exist , update the payload ( but not the create mode ) . * If someone deletes the znode while we ' re trying to set it , just let it stay deleted . * @ param curatorFramework curator * @ param path path * @ param mode create mode * @ param rawBytes payload * @ param maxZnodeBytes maximum payload size * @ throws IllegalArgumentException if rawBytes . length > maxZnodeBytes * @ throws Exception if Curator throws an Exception */ public static void createOrSet ( CuratorFramework curatorFramework , String path , CreateMode mode , byte [ ] rawBytes , int maxZnodeBytes ) throws Exception { } }
verifySize ( path , rawBytes , maxZnodeBytes ) ; boolean created = false ; if ( curatorFramework . checkExists ( ) . forPath ( path ) == null ) { try { curatorFramework . create ( ) . creatingParentsIfNeeded ( ) . withMode ( mode ) . forPath ( path , rawBytes ) ; created = true ; } catch ( KeeperException . NodeExistsException e ) { log . debug ( "Path [%s] created while we were running, will setData instead." , path ) ; } } if ( ! created ) { try { curatorFramework . setData ( ) . forPath ( path , rawBytes ) ; } catch ( KeeperException . NoNodeException e ) { log . warn ( "Someone deleted path[%s] while we were trying to set it. Leaving it deleted." , path ) ; } }
public class Trestle { /** * Set multiple spans */ public static CharSequence getFormattedText ( List < Span > spans ) { } }
CharSequence formattedText = null ; if ( spans != null ) { int size = spans . size ( ) ; List < SpannableString > spannableStrings = new ArrayList < > ( size ) ; for ( Span span : spans ) { SpannableString ss = setUpSpannableString ( span ) ; spannableStrings . add ( ss ) ; } formattedText = TextUtils . concat ( spannableStrings . toArray ( new SpannableString [ size ] ) ) ; } return formattedText ;
public class FDBigInt { /** * Compare FDBigInt with another FDBigInt . Return an integer * > 0 : this > other * 0 : this = = other * < 0 : this < other */ public int cmp ( FDBigInt other ) { } }
int i ; if ( this . nWords > other . nWords ) { // if any of my high - order words is non - zero , // then the answer is evident int j = other . nWords - 1 ; for ( i = this . nWords - 1 ; i > j ; i -- ) if ( this . data [ i ] != 0 ) return 1 ; } else if ( this . nWords < other . nWords ) { // if any of other ' s high - order words is non - zero , // then the answer is evident int j = this . nWords - 1 ; for ( i = other . nWords - 1 ; i > j ; i -- ) if ( other . data [ i ] != 0 ) return - 1 ; } else { i = this . nWords - 1 ; } for ( ; i > 0 ; i -- ) if ( this . data [ i ] != other . data [ i ] ) break ; // careful ! want unsigned compare ! // use brute force here . int a = this . data [ i ] ; int b = other . data [ i ] ; if ( a < 0 ) { // a is really big , unsigned if ( b < 0 ) { return a - b ; // both big , negative } else { return 1 ; // b not big , answer is obvious ; } } else { // a is not really big if ( b < 0 ) { // but b is really big return - 1 ; } else { return a - b ; } }
public class ArtifactCollector { /** * Get base name for artifact files for the specified test result . * < br > < br > * < b > NOTE < / b > : The base name is derived from the name of the current test . * If the method is parameterized , a hash code is computed from the parameter * values and appended to the base name as an 8 - digit hexadecimal integer . * @ return artifact file base name */ private String getArtifactBaseName ( ) { } }
int hashcode = getParameters ( ) . hashCode ( ) ; if ( hashcode != 0 ) { String hashStr = String . format ( "%08X" , hashcode ) ; return getDescription ( ) . getMethodName ( ) + "-" + hashStr ; } else { return getDescription ( ) . getMethodName ( ) ; }
public class TagFilter { /** * The tag values . * @ param tagValues * The tag values . */ public void setTagValues ( java . util . Collection < String > tagValues ) { } }
if ( tagValues == null ) { this . tagValues = null ; return ; } this . tagValues = new java . util . ArrayList < String > ( tagValues ) ;
public class SessionBeanO { /** * d367572.1 added entire method . */ protected void callLifecycleInterceptors ( InterceptorProxy [ ] proxies , int methodId ) { } }
final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; try { if ( isTraceOn ) // d527372 { if ( TEBeanLifeCycleInfo . isTraceEnabled ( ) ) TEBeanLifeCycleInfo . traceEJBCallEntry ( LifecycleInterceptorWrapper . TRACE_NAMES [ methodId ] ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "callLifecycleInterceptors" ) ; } InvocationContextImpl < ? > inv = getInvocationContext ( ) ; BeanMetaData bmd = home . beanMetaData ; // d450431 inv . doLifeCycle ( proxies , bmd . _moduleMetaData ) ; // d450431 , F743-14982 } catch ( Throwable t ) { // FFDCFilter . processException ( t , CLASS _ NAME + " . SessionBeanO " , " 251 " , this ) ; // Lifecycle interceptors are allowed to throw system runtime exceptions , // but NOT application exceptions . Therefore , wrap the caught Throwable // in a javax . ejb . EJBException and throw it so that it gets handled as // an unchecked exception . if ( isTraceOn && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "SessionBean PostConstruct failure" , t ) ; } throw ExceptionUtil . EJBException ( "session bean lifecycle interceptor failure" , t ) ; } finally { if ( isTraceOn && // d527372 TEBeanLifeCycleInfo . isTraceEnabled ( ) ) { TEBeanLifeCycleInfo . traceEJBCallExit ( LifecycleInterceptorWrapper . TRACE_NAMES [ methodId ] ) ; } }
public class SchemaTool { /** * Scans the schemaDirectory for avro schemas , and creates or migrates HBase * Common managed schemas managed by this instances entity manager . * @ param schemaDirectory * The directory to recursively scan for avro schema files . This * directory can be a directory on the classpath , including a * directory that is embeddded in a jar on the classpath . In both of * those cases , the schemaDirectory should be prefixed with * classpath : * @ param createTableAndFamilies * If true , will create the table for each schema if it doesn ' t * exist , and will create families if they don ' t exist . */ public void createOrMigrateSchemaDirectory ( String schemaDirectory , boolean createTableAndFamilies ) throws InterruptedException { } }
List < String > schemaStrings ; if ( schemaDirectory . startsWith ( CLASSPATH_PREFIX ) ) { URL dirURL = getClass ( ) . getClassLoader ( ) . getResource ( schemaDirectory . substring ( CLASSPATH_PREFIX . length ( ) ) ) ; if ( dirURL != null && dirURL . getProtocol ( ) . equals ( "file" ) ) { try { schemaStrings = getSchemaStringsFromDir ( new File ( dirURL . toURI ( ) ) ) ; } catch ( URISyntaxException e ) { throw new DatasetException ( e ) ; } } else if ( dirURL != null && dirURL . getProtocol ( ) . equals ( "jar" ) ) { String jarPath = dirURL . getPath ( ) . substring ( 5 , dirURL . getPath ( ) . indexOf ( "!" ) ) ; schemaStrings = getSchemaStringsFromJar ( jarPath , schemaDirectory . substring ( CLASSPATH_PREFIX . length ( ) ) ) ; } else { String msg = "Could not find classpath resource: " + schemaDirectory ; LOG . error ( msg ) ; throw new DatasetException ( msg ) ; } } else { schemaStrings = getSchemaStringsFromDir ( new File ( schemaDirectory ) ) ; } Map < String , List < String > > tableEntitySchemaMap = new HashMap < String , List < String > > ( ) ; for ( String schemaString : schemaStrings ) { List < String > tables = getTablesFromSchemaString ( schemaString ) ; for ( String table : tables ) { if ( tableEntitySchemaMap . containsKey ( table ) ) { tableEntitySchemaMap . get ( table ) . add ( schemaString ) ; } else { List < String > entityList = new ArrayList < String > ( ) ; entityList . add ( schemaString ) ; tableEntitySchemaMap . put ( table , entityList ) ; } } } // Validate if for every key schema there is atleast one entity schemas for ( Entry < String , List < String > > entry : tableEntitySchemaMap . entrySet ( ) ) { String table = entry . getKey ( ) ; List < String > entitySchemas = entry . getValue ( ) ; if ( entitySchemas . size ( ) == 0 ) { String msg = "Table requested, but no entity schemas for Table: " + table ; LOG . error ( msg ) ; throw new ValidationException ( msg ) ; } } // Migrate the schemas in a batch , collect all the table descriptors // that require a schema migration Collection < HTableDescriptor > tableDescriptors = Lists . newArrayList ( ) ; for ( Entry < String , List < String > > entry : tableEntitySchemaMap . entrySet ( ) ) { String table = entry . getKey ( ) ; for ( String entitySchemaString : entry . getValue ( ) ) { boolean migrationRequired = prepareManagedSchema ( table , entitySchemaString ) ; // Optimization : If no migration is req , then no change in the table if ( migrationRequired ) { tableDescriptors . add ( prepareTableDescriptor ( table , entitySchemaString ) ) ; } } } if ( createTableAndFamilies ) { createTables ( tableDescriptors ) ; }
public class BooleanIOSetting { /** * Sets the setting for a certain question . The setting * is a boolean , and it accepts only " true " and " false " . */ @ Override public void setSetting ( String setting ) throws CDKException { } }
if ( setting . equals ( "true" ) || setting . equals ( "false" ) ) { this . setting = setting ; } else if ( setting . equals ( "yes" ) || setting . equals ( "y" ) ) { this . setting = "true" ; } else if ( setting . equals ( "no" ) || setting . equals ( "n" ) ) { this . setting = "false" ; } else { throw new CDKException ( "Setting " + setting + " is not a boolean." ) ; }
public class SuspiciousLoopSearch { /** * overrides the visitor to initialize and tear down the opcode stack * @ param classContext * the context object of the currently parsed class */ @ Override public void visitClassContext ( ClassContext classContext ) { } }
try { ifBlocks = new ArrayList < > ( ) ; loadedRegs = new HashMap < > ( ) ; loopLocations = new BitSet ( ) ; stack = new OpcodeStack ( ) ; super . visitClassContext ( classContext ) ; } finally { ifBlocks = null ; loadedRegs = null ; loopLocations = null ; stack = null ; }
public class PlayerAbstract { /** * Audio */ @ Override public void play ( ) { } }
final String name = media . getPath ( ) ; if ( Medias . getResourcesLoader ( ) . isPresent ( ) ) { if ( cache == null ) { cache = extractFromJar ( media ) ; } play ( cache , name ) ; } else { play ( media . getFile ( ) . getAbsolutePath ( ) , name ) ; }
public class VirtualMachineScaleSetExtensionsInner { /** * The operation to create or update an extension . * @ param resourceGroupName The name of the resource group . * @ param vmScaleSetName The name of the VM scale set where the extension should be create or updated . * @ param vmssExtensionName The name of the VM scale set extension . * @ param extensionParameters Parameters supplied to the Create VM scale set Extension operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < VirtualMachineScaleSetExtensionInner > createOrUpdateAsync ( String resourceGroupName , String vmScaleSetName , String vmssExtensionName , VirtualMachineScaleSetExtensionInner extensionParameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , vmScaleSetName , vmssExtensionName , extensionParameters ) . map ( new Func1 < ServiceResponse < VirtualMachineScaleSetExtensionInner > , VirtualMachineScaleSetExtensionInner > ( ) { @ Override public VirtualMachineScaleSetExtensionInner call ( ServiceResponse < VirtualMachineScaleSetExtensionInner > response ) { return response . body ( ) ; } } ) ;
public class SARLSourceViewerPreferenceAccess { /** * Enable or disable the auto - formatting feature into the SARL editor . * @ param enable is { @ code true } if it is enabled ; { @ code false } if it is disable ; { @ code null } * to restore the default value . * @ since 0.8 * @ see # getWritablePreferenceStore ( Object ) */ public void setAutoFormattingEnabled ( Boolean enable ) { } }
final IPreferenceStore store = getWritablePreferenceStore ( null ) ; if ( enable == null ) { store . setToDefault ( AUTOFORMATTING_PROPERTY ) ; } else { store . setValue ( AUTOFORMATTING_PROPERTY , enable . booleanValue ( ) ) ; }
public class SwaggerBuilder { /** * Determines if a parameter is Required or not . * @ param parameter * @ return true if the parameter is required */ protected boolean isRequired ( Parameter parameter ) { } }
return parameter . isAnnotationPresent ( Body . class ) || parameter . isAnnotationPresent ( Required . class ) || parameter . isAnnotationPresent ( NotNull . class ) ;
public class OcAgentTraceServiceConfigRpcHandler { /** * subsequent updated library configs , unless the stream is interrupted . */ synchronized void sendInitialMessage ( Node node ) { } }
io . opencensus . proto . trace . v1 . TraceConfig currentTraceConfigProto = TraceProtoUtils . getCurrentTraceConfig ( traceConfig ) ; // First config must have Node set . CurrentLibraryConfig firstConfig = CurrentLibraryConfig . newBuilder ( ) . setNode ( node ) . setConfig ( currentTraceConfigProto ) . build ( ) ; sendCurrentConfig ( firstConfig ) ;
public class SqlInsertBuilder { /** * Generate insert used in content provider class . * @ param classBuilder * the class builder * @ param method * the method * @ param insertResultType * the insert result type */ private static void generateInsertForContentProvider ( TypeSpec . Builder classBuilder , final SQLiteModelMethod method , InsertType insertResultType ) { } }
final SQLiteDaoDefinition daoDefinition = method . getParent ( ) ; final SQLiteEntity entity = method . getEntity ( ) ; final Set < String > columns = new LinkedHashSet < > ( ) ; MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( method . contentProviderMethodName ) ; if ( ! method . getParent ( ) . hasSamePackageOfSchema ( ) ) { methodBuilder . addModifiers ( Modifier . PUBLIC ) ; } ParameterSpec parameterSpec ; parameterSpec = ParameterSpec . builder ( Uri . class , "uri" ) . build ( ) ; methodBuilder . addParameter ( parameterSpec ) ; parameterSpec = ParameterSpec . builder ( ContentValues . class , "contentValues" ) . build ( ) ; methodBuilder . addParameter ( parameterSpec ) ; methodBuilder . returns ( Long . TYPE ) ; SqlBuilderHelper . generateLogForContentProviderBeginning ( method , methodBuilder ) ; // just detect which columns are admitted JQLChecker . getInstance ( ) . replace ( method , method . jql , new JQLReplacerListenerImpl ( method ) { @ Override public String onColumnName ( String columnName ) { SQLProperty tempProperty = entity . get ( columnName ) ; AssertKripton . assertTrueOrUnknownPropertyInJQLException ( tempProperty != null , method , columnName ) ; columns . add ( tempProperty . columnName ) ; return tempProperty . columnName ; } @ Override public String onColumnFullyQualifiedName ( String tableName , String columnName ) { AssertKripton . fail ( "Inconsistent state" ) ; return null ; } } ) ; // generate columnCheckSet SqlBuilderHelper . generateColumnCheckSet ( classBuilder , method , columns ) ; // retrieve content values methodBuilder . addStatement ( "$T _contentValues=contentValuesForContentProvider(contentValues)" , KriptonContentValues . class ) ; // generate column check SqlBuilderHelper . forEachColumnInContentValue ( methodBuilder , method , "_contentValues.values().keySet()" , true , null ) ; methodBuilder . addCode ( "\n" ) ; String rxIdGetter = "result" ; // extract pathVariables // generate get uri variables in content values // every controls was done in constructor of SQLiteModelMethod for ( ContentUriPlaceHolder variable : method . contentProviderUriVariables ) { SQLProperty entityProperty = entity . get ( variable . value ) ; if ( entityProperty != null ) { methodBuilder . addCode ( "// Add parameter $L at path segment $L\n" , variable . value , variable . pathSegmentIndex ) ; TypeName entityPropertyType = entityProperty . getPropertyType ( ) . getTypeName ( ) ; if ( TypeUtility . isString ( entityPropertyType ) ) { methodBuilder . addStatement ( "contentValues.put($S, uri.getPathSegments().get($L))" , entityProperty . columnName , variable . pathSegmentIndex ) ; } else { methodBuilder . addStatement ( "contentValues.put($S, Long.valueOf(uri.getPathSegments().get($L)))" , entityProperty . columnName , variable . pathSegmentIndex ) ; } } if ( entityProperty . isPrimaryKey ( ) ) { rxIdGetter = PropertyUtility . getter ( entityProperty ) ; } } // generate log for inser operation SqlBuilderHelper . generateLogForContentValuesContentProvider ( method , methodBuilder ) ; ConflictAlgorithmType conflictAlgorithmType = InsertBeanHelper . getConflictAlgorithmType ( method ) ; String conflictString1 = "" ; String conflictString2 = "" ; if ( conflictAlgorithmType != ConflictAlgorithmType . NONE ) { conflictString1 = "WithOnConflict" ; conflictString2 = ", " + conflictAlgorithmType . getConflictAlgorithm ( ) ; methodBuilder . addCode ( "// conflict algorithm $L\n" , method . jql . conflictAlgorithmType ) ; } methodBuilder . addComment ( "insert operation" ) ; methodBuilder . addStatement ( "long result = database().insert$L($S, null, _contentValues.values()$L)" , conflictString1 , entity . getTableName ( ) , conflictString2 ) ; if ( method . getParent ( ) . getParent ( ) . generateRx ) { SQLProperty primaryKey = entity . getPrimaryKey ( ) ; if ( primaryKey . columnType == ColumnType . PRIMARY_KEY ) { // long autogenerated rxIdGetter = "result" ; } else { if ( primaryKey . isType ( String . class ) ) { rxIdGetter = String . format ( "contentValues.getAsString(\"%s\")" , primaryKey . columnName ) ; } else { rxIdGetter = String . format ( "contentValues.getAsLong(\"%s\")" , primaryKey . columnName ) ; } } GenericSQLHelper . generateSubjectNext ( entity , methodBuilder , SubjectType . INSERT , rxIdGetter ) ; } // support for livedata if ( daoDefinition . hasLiveData ( ) ) { methodBuilder . addComment ( "support for livedata" ) ; methodBuilder . addStatement ( BindDaoBuilder . METHOD_NAME_REGISTRY_EVENT + "(result>0?1:0)" ) ; } methodBuilder . addStatement ( "return result" ) ; // javadoc // we add at last javadoc , because need info is built at last . SqlBuilderHelper . generateJavaDocForContentProvider ( method , methodBuilder ) ; methodBuilder . addJavadoc ( "@param uri $S\n" , method . contentProviderUriTemplate . replace ( "*" , "[*]" ) ) ; methodBuilder . addJavadoc ( "@param contentValues content values\n" ) ; methodBuilder . addJavadoc ( "@return new row's id\n" ) ; classBuilder . addMethod ( methodBuilder . build ( ) ) ;
public class AnimationFactory { /** * Create push down animation for entering . * @ return Animation */ public static Animation pushDownIn ( ) { } }
AnimationSet animationSet = new AnimationSet ( true ) ; animationSet . setFillAfter ( true ) ; animationSet . addAnimation ( new TranslateAnimation ( 0 , 0 , - 100 , 0 ) ) ; animationSet . addAnimation ( new AlphaAnimation ( 0.0f , 1.0f ) ) ; return animationSet ;
public class StreamTransformation { /** * Returns the output type of this { @ code StreamTransformation } as a { @ link TypeInformation } . Once * this is used once the output type cannot be changed anymore using { @ link # setOutputType } . * @ return The output type of this { @ code StreamTransformation } */ public TypeInformation < T > getOutputType ( ) { } }
if ( outputType instanceof MissingTypeInfo ) { MissingTypeInfo typeInfo = ( MissingTypeInfo ) this . outputType ; throw new InvalidTypesException ( "The return type of function '" + typeInfo . getFunctionName ( ) + "' could not be determined automatically, due to type erasure. " + "You can give type information hints by using the returns(...) " + "method on the result of the transformation call, or by letting " + "your function implement the 'ResultTypeQueryable' " + "interface." , typeInfo . getTypeException ( ) ) ; } typeUsed = true ; return this . outputType ;
public class SqlDatabase { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . DatabaseIF # open ( java . lang . String ) */ @ Override public final void open ( String path ) throws Exception { } }
// ZAP : Added log statement . logger . debug ( "open " + path ) ; setDatabaseServer ( createDatabaseServer ( path ) ) ; notifyListenersDatabaseOpen ( internalDatabaseListeners , getDatabaseServer ( ) ) ; notifyListenersDatabaseOpen ( getDatabaseServer ( ) ) ;
public class ProxyQueueConversationGroupImpl { /** * If a session is failed to be created then we need to bury it so * that it never bothers us again . The queue is simply removed from * the conversation group - no attempt is made to remove messages . * It is assumed that if something went wrong in the creation then no * messages would ever get to the queue . * @ param queue */ public synchronized void bury ( ProxyQueue queue ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "bury" ) ; // Get the proxy queue id short id = queue . getId ( ) ; // Remove it from the table mutableId . setValue ( id ) ; idToProxyQueueMap . remove ( mutableId ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "bury" ) ;
public class MetricRegistryImpl { /** * Returns a map of all the timers in the registry and their names which match the given filter . * @ param filter the metric filter to match * @ return all the timers in the registry */ @ Override public SortedMap < String , Timer > getTimers ( MetricFilter filter ) { } }
return getMetrics ( Timer . class , filter ) ;
public class CommandBuilder { /** * Adds new option and checks if option overlaps with existing options * @ param option to be added * @ throws IllegalArgumentException in case option overlaps with existing options by name , long name or setting */ public void add ( CommandOption option ) { } }
Assert . notNull ( option , "Missing command line option" ) ; CommandOption found = find ( option ) ; Assert . isNull ( found , "Given option: " + option + " overlaps with: " + found ) ; options . add ( option ) ;
public class ArrayCoreMap { /** * { @ inheritDoc } */ public Set < Class < ? > > keySet ( ) { } }
return new AbstractSet < Class < ? > > ( ) { @ Override public Iterator < Class < ? > > iterator ( ) { return new Iterator < Class < ? > > ( ) { private int i ; public boolean hasNext ( ) { return i < size ; } public Class < ? > next ( ) { try { return keys [ i ++ ] ; } catch ( ArrayIndexOutOfBoundsException aioobe ) { throw new NoSuchElementException ( "ArrayCoreMap keySet iterator exhausted" ) ; } } @ SuppressWarnings ( "unchecked" ) public void remove ( ) { ArrayCoreMap . this . remove ( ( Class ) keys [ i ] ) ; } } ; } @ Override public int size ( ) { return size ; } } ;
public class ScheduledService { /** * 停止执行定时任务 , 还可以重新start */ public synchronized void stop ( ) { } }
if ( ! started ) { return ; } try { if ( future != null ) { future . cancel ( true ) ; future = null ; } if ( scheduledExecutorService != null ) { scheduledExecutorService . shutdownNow ( ) ; scheduledExecutorService = null ; } } catch ( Throwable t ) { LOGGER . warn ( t . getMessage ( ) , t ) ; } finally { SCHEDULED_SERVICE_MAP . remove ( this ) ; started = false ; }
public class S3CryptoModuleAE { /** * Returns an updated object where the object content input stream contains the decrypted contents . * @ param wrapper * The object whose contents are to be decrypted . * @ param cekMaterial * The instruction that will be used to decrypt the object data . * @ return * The updated object where the object content input stream contains the decrypted contents . */ private S3ObjectWrapper decrypt ( S3ObjectWrapper wrapper , ContentCryptoMaterial cekMaterial , long [ ] range ) { } }
S3ObjectInputStream objectContent = wrapper . getObjectContent ( ) ; wrapper . setObjectContent ( new S3ObjectInputStream ( new CipherLiteInputStream ( objectContent , cekMaterial . getCipherLite ( ) , DEFAULT_BUFFER_SIZE ) , objectContent . getHttpRequest ( ) ) ) ; return wrapper ;
public class AutomationAccountsInner { /** * Retrieve a list of accounts within a given resource group . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; AutomationAccountInner & gt ; object */ public Observable < Page < AutomationAccountInner > > listByResourceGroupNextAsync ( final String nextPageLink ) { } }
return listByResourceGroupNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < AutomationAccountInner > > , Page < AutomationAccountInner > > ( ) { @ Override public Page < AutomationAccountInner > call ( ServiceResponse < Page < AutomationAccountInner > > response ) { return response . body ( ) ; } } ) ;
public class SimpleDistanceConstraint { /** * Add an [ lb , ub ] interval between the two { @ link TimePoint } s of this constraint . * @ param i The interval to add . * @ return < code > true < / code > if the interval was added , < code > false < / code > if it is malformed . */ public boolean addInterval ( Bounds i ) { } }
if ( i . max < this . minimum || i . min > this . maximum ) { return false ; } bs . add ( i ) ; return true ;