signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class MetadataFinder { /** * Requests the cue list for a specific track ID , given a dbserver connection to a player that has already * been set up . * @ param rekordboxId the track of interest * @ param slot identifies the media slot we are querying * @ param client the dbserver client that is communicating with the appropriate player * @ return the retrieved cue list , or { @ code null } if none was available * @ throws IOException if there is a communication problem */ CueList getCueList ( int rekordboxId , CdjStatus . TrackSourceSlot slot , Client client ) throws IOException { } }
Message response = client . simpleRequest ( Message . KnownType . CUE_LIST_REQ , null , client . buildRMST ( Message . MenuIdentifier . DATA , slot ) , new NumberField ( rekordboxId ) ) ; if ( response . knownType == Message . KnownType . CUE_LIST ) { return new CueList ( response ) ; } logger . error ( "Unexpected response type when requesting cue list: {}" , response ) ; return null ;
public class EQL { /** * Prints the . * @ param _ oid the oid * @ return the prints the */ public static Print print ( final String ... _oid ) { } }
return ( Print ) org . efaps . eql2 . EQL2 . print ( _oid ) ;
public class SiteRoot { /** * Gets site root level path of a site . * @ param resource Resource within the site * @ return Site root path for the site . The path is not checked for validness . */ public String getRootPath ( Resource resource ) { } }
int rootLevel = urlHandlerConfig . getSiteRootLevel ( resource ) ; if ( rootLevel > 0 ) { return Path . getAbsoluteParent ( resource . getPath ( ) , rootLevel , resource . getResourceResolver ( ) ) ; } return null ;
public class HttpConnection { /** * Get the Remote address . * @ return the remote host name */ public String getRemoteAddr ( ) { } }
if ( _remoteAddr == null ) { if ( _remoteInetAddress == null ) return "127.0.0.1" ; _remoteAddr = _remoteInetAddress . getHostAddress ( ) ; } return _remoteAddr ;
public class ServerAzureADAdministratorsInner { /** * Deletes an existing server Active Directory Administrator . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ServerAzureADAdministratorInner object */ public Observable < ServerAzureADAdministratorInner > beginDeleteAsync ( String resourceGroupName , String serverName ) { } }
return beginDeleteWithServiceResponseAsync ( resourceGroupName , serverName ) . map ( new Func1 < ServiceResponse < ServerAzureADAdministratorInner > , ServerAzureADAdministratorInner > ( ) { @ Override public ServerAzureADAdministratorInner call ( ServiceResponse < ServerAzureADAdministratorInner > response ) { return response . body ( ) ; } } ) ;
public class ManipulationUtils { /** * Adds the retreiveModelName method to the class . */ private static void addRetrieveModelName ( CtClass clazz ) throws CannotCompileException , NotFoundException { } }
CtClass [ ] params = generateClassField ( ) ; CtMethod method = new CtMethod ( cp . get ( String . class . getName ( ) ) , "retrieveModelName" , params , clazz ) ; StringBuilder builder = new StringBuilder ( ) ; builder . append ( createTrace ( "Called retrieveModelName" ) ) . append ( String . format ( "return \"%s\";" , clazz . getName ( ) ) ) ; method . setBody ( createMethodBody ( builder . toString ( ) ) ) ; clazz . addMethod ( method ) ;
public class CommercePriceListUtil { /** * Returns the commerce price list where uuid = & # 63 ; and groupId = & # 63 ; or throws a { @ link NoSuchPriceListException } if it could not be found . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching commerce price list * @ throws NoSuchPriceListException if a matching commerce price list could not be found */ public static CommercePriceList findByUUID_G ( String uuid , long groupId ) throws com . liferay . commerce . price . list . exception . NoSuchPriceListException { } }
return getPersistence ( ) . findByUUID_G ( uuid , groupId ) ;
public class MemcmpDecoder { /** * A fixed is decoded by just reading length bytes , and placing the bytes read * into the bytes array , starting at index start . * @ param bytes * The bytes array to populate . * @ param start * The index in bytes to place the read bytes . * @ param length * The number of bytes to read . */ @ Override public void readFixed ( byte [ ] bytes , int start , int length ) throws IOException { } }
int i = in . read ( bytes , start , length ) ; if ( i < length ) { throw new EOFException ( ) ; }
public class Log { /** * delete of files more than 1 day old */ private static void deleteOldAndEmptyFiles ( ) { } }
File dir = LOG_FILE_DIR ; if ( dir . exists ( ) ) { File [ ] files = dir . listFiles ( ) ; for ( File f : files ) { if ( f . length ( ) == 0 || f . lastModified ( ) + MAXFILEAGE < System . currentTimeMillis ( ) ) { f . delete ( ) ; } } }
public class AbstractPrintQuery { /** * Method to get the instances used for an select . * @ param _ selectStmt selectstatement the attribute is wanted for * @ return List of instances for the select or an empty list in case that * the onselect is not found * @ throws EFapsException on error */ public List < Instance > getInstances4Select ( final String _selectStmt ) throws EFapsException { } }
final OneSelect oneselect = this . selectStmt2OneSelect . get ( _selectStmt ) ; return oneselect == null ? new ArrayList < > ( ) : oneselect . getInstances ( ) ;
public class WmsServiceImpl { @ Override public String getLegendGraphicUrl ( WmsLayerConfiguration wmsConfig , LegendConfig legendConfig ) { } }
StringBuilder url = getBaseUrlBuilder ( wmsConfig ) ; // Parameter : service int pos = url . lastIndexOf ( "?" ) ; if ( pos > 0 ) { url . append ( "&service=WMS" ) ; } else { url . append ( "?service=WMS" ) ; } // Parameter : version url . append ( "&version=" ) ; url . append ( wmsConfig . getVersion ( ) . toString ( ) ) ; // Parameter : layers url . append ( "&layer=" ) ; url . append ( wmsConfig . getLayers ( ) ) ; // No URL . encode here ! // Parameter : request type url . append ( "&request=GetLegendGraphic" ) ; // Parameter : styles url . append ( "&STYLE=" ) ; url . append ( wmsConfig . getStyles ( ) ) ; // Parameter : format url . append ( "&format=" ) ; String format = legendConfig . getImageFormat ( ) ; if ( format == null ) { url . append ( "image/png" ) ; } else if ( ! format . startsWith ( "image/" ) ) { url . append ( "image/" ) ; url . append ( format . toLowerCase ( ) ) ; } else { url . append ( format . toLowerCase ( ) ) ; } // Parameter : transparent url . append ( "&transparent=true" ) ; // Check for specific vendor options : if ( WmsServiceVendor . GEOSERVER_WMS . equals ( wmsConfig . getWmsServiceVendor ( ) ) ) { // Parameter : for geoserver , width = icon width url . append ( "&width=" ) ; url . append ( legendConfig . getIconWidth ( ) ) ; // Parameter : for geoserver , height = icon height url . append ( "&height=" ) ; url . append ( legendConfig . getIconHeight ( ) ) ; url . append ( "&legend_options=" ) ; if ( null != legendConfig . getFontStyle ( ) . getFamily ( ) ) { url . append ( "fontName:" ) ; url . append ( legendConfig . getFontStyle ( ) . getFamily ( ) ) ; url . append ( ";" ) ; } url . append ( "fontAntiAliasing:true;" ) ; if ( null != legendConfig . getFontStyle ( ) . getColor ( ) ) { url . append ( "fontColor:" ) ; url . append ( legendConfig . getFontStyle ( ) . getColor ( ) . replace ( "#" , "0x" ) ) ; url . append ( ";" ) ; } if ( legendConfig . getFontStyle ( ) . getSize ( ) > 0 ) { url . append ( "fontSize:" ) ; url . append ( legendConfig . getFontStyle ( ) . getSize ( ) ) ; url . append ( ";" ) ; } // geoserver supports dpi directly , use calculated width / height for other servers if ( legendConfig . getDpi ( ) != null ) { double dpi = legendConfig . getDpi ( ) ; // default dpi is 90. url . append ( "bgColor:0xFFFFFF;dpi:" + ( int ) dpi ) ; } } else { if ( legendConfig . getWidth ( ) != null ) { // Parameter : width url . append ( "&width=" ) ; url . append ( legendConfig . getWidth ( ) ) ; } if ( legendConfig . getHeight ( ) != null ) { // Parameter : width url . append ( "&height=" ) ; url . append ( legendConfig . getHeight ( ) ) ; } } return finishUrl ( WmsRequest . GETLEGENDGRAPHIC , url ) ;
public class ZanataInterface { /** * Get all of the Document ID ' s available from Zanata for the configured project . * @ return A List of Resource Objects that contain information such as Document ID ' s . */ public List < ResourceMeta > getZanataResources ( ) { } }
ClientResponse < List < ResourceMeta > > response = null ; try { final ISourceDocResource client = proxyFactory . getSourceDocResource ( details . getProject ( ) , details . getVersion ( ) ) ; response = client . get ( null ) ; final Status status = Response . Status . fromStatusCode ( response . getStatus ( ) ) ; if ( status == Response . Status . OK ) { final List < ResourceMeta > entities = response . getEntity ( ) ; return entities ; } else { log . error ( "REST call to get() did not complete successfully. HTTP response code was " + status . getStatusCode ( ) + ". Reason " + "was " + status . getReasonPhrase ( ) ) ; } } catch ( final Exception ex ) { log . error ( "Failed to retrieve the list of Zanata Source Documents" , ex ) ; } finally { /* * If you are using RESTEasy client framework , and returning a Response from your service method , you will * explicitly need to release the connection . */ if ( response != null ) response . releaseConnection ( ) ; /* Perform a small wait to ensure zanata isn ' t overloaded */ performZanataRESTCallWaiting ( ) ; } return null ;
public class UsageStatistics { /** * Returns true if it ' s time for us to check for new version . */ public boolean isDue ( ) { } }
// user opted out . no data collection . if ( ! Jenkins . getInstance ( ) . isUsageStatisticsCollected ( ) || DISABLED ) return false ; long now = System . currentTimeMillis ( ) ; if ( now - lastAttempt > DAY ) { lastAttempt = now ; return true ; } return false ;
public class ResourceUtilsCore { /** * Extract resource provider from a resource ID string . * @ param id the resource ID string * @ return the resource group name */ public static String resourceProviderFromResourceId ( String id ) { } }
return ( id != null ) ? ResourceId . fromString ( id ) . providerNamespace ( ) : null ;
public class MoneyFormatter { /** * Parses the text extracting monetary information . * This method parses the input providing low - level access to the parsing state . * The resulting context contains the parsed text , indicator of error , position * following the parse and the parsed currency and amount . * Together , these provide enough information for higher level APIs to use . * @ param text the text to parse , not null * @ param startIndex the start index to parse from * @ return the parsed monetary value , null only if the parse results in an error * @ throws IndexOutOfBoundsException if the start index is invalid * @ throws UnsupportedOperationException if this formatter cannot parse */ public MoneyParseContext parse ( CharSequence text , int startIndex ) { } }
checkNotNull ( text , "Text must not be null" ) ; if ( startIndex < 0 || startIndex > text . length ( ) ) { throw new StringIndexOutOfBoundsException ( "Invalid start index: " + startIndex ) ; } if ( isParser ( ) == false ) { throw new UnsupportedOperationException ( "MoneyFomatter has not been configured to be able to parse" ) ; } MoneyParseContext context = new MoneyParseContext ( locale , text , startIndex ) ; printerParser . parse ( context ) ; return context ;
public class AllureResultsUtils { /** * Write throwable as attachment . * @ param throwable to write * @ param title title of attachment * @ return Created { @ link ru . yandex . qatools . allure . model . Attachment } */ public static Attachment writeAttachmentWithErrorMessage ( Throwable throwable , String title ) { } }
String message = throwable . getMessage ( ) ; try { return writeAttachment ( message . getBytes ( CONFIG . getAttachmentsEncoding ( ) ) , title ) ; } catch ( Exception e ) { e . addSuppressed ( throwable ) ; LOGGER . error ( String . format ( "Can't write attachment \"%s\"" , title ) , e ) ; } return new Attachment ( ) ;
public class MeteredBalancingPolicy { /** * Begins by claiming all work units that are pegged to this node . * Then , continues to claim work from the available pool until we ' ve claimed * equal to or slightly more than the total desired load . */ @ Override public void claimWork ( ) throws InterruptedException { } }
synchronized ( cluster . allWorkUnits ) { for ( String workUnit : getUnclaimed ( ) ) { if ( isPeggedToMe ( workUnit ) ) { claimWorkPeggedToMe ( workUnit ) ; } } final double evenD = evenDistribution ( ) ; LinkedList < String > unclaimed = new LinkedList < String > ( getUnclaimed ( ) ) ; while ( myLoad ( ) <= evenD && ! unclaimed . isEmpty ( ) ) { final String workUnit = unclaimed . poll ( ) ; if ( config . useSoftHandoff && cluster . containsHandoffRequest ( workUnit ) && isFairGame ( workUnit ) && attemptToClaim ( workUnit , true ) ) { LOG . info ( workUnit ) ; handoffListener . finishHandoff ( workUnit ) ; } else if ( isFairGame ( workUnit ) ) { attemptToClaim ( workUnit ) ; } } }
public class DateAdapter { /** * Unmarshal a Date . * @ param value String from which to unmarshal . * @ return the unmarshalled Date object */ public Date unmarshal ( String value ) { } }
if ( value != null ) { if ( value . length ( ) >= lengthOfDateFmtYYYY_MM_DD ) { // Extract just the date from the string YYYY - MM - DD value = value . substring ( 0 , lengthOfDateFmtYYYY_MM_DD ) ; boolean isMatch = value . matches ( datePattern ) ; if ( isMatch ) { return DatatypeConverter . parseDate ( value ) . getTime ( ) ; } else { return DatatypeConverter . parseDate ( INVALIDDATE ) . getTime ( ) ; } } else { return DatatypeConverter . parseDate ( INVALIDDATE ) . getTime ( ) ; } } else { return null ; }
public class CommerceNotificationTemplateUtil { /** * Returns the first commerce notification template in the ordered set where uuid = & # 63 ; . * @ param uuid the uuid * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching commerce notification template * @ throws NoSuchNotificationTemplateException if a matching commerce notification template could not be found */ public static CommerceNotificationTemplate findByUuid_First ( String uuid , OrderByComparator < CommerceNotificationTemplate > orderByComparator ) throws com . liferay . commerce . notification . exception . NoSuchNotificationTemplateException { } }
return getPersistence ( ) . findByUuid_First ( uuid , orderByComparator ) ;
public class SQLExpressions { /** * REGR _ SYY makes the following computation after the elimination of null ( arg1 , arg2 ) pairs : * < p > { @ code REGR _ COUNT ( arg1 , arg2 ) * VAR _ POP ( arg1 ) } < / p > * @ param arg1 first arg * @ param arg2 second arg * @ return regr _ syy ( arg1 , arg2) */ public static WindowOver < Double > regrSyy ( Expression < ? extends Number > arg1 , Expression < ? extends Number > arg2 ) { } }
return new WindowOver < Double > ( Double . class , SQLOps . REGR_SYY , arg1 , arg2 ) ;
public class PingManager { /** * Query the specified entity to see if it supports the Ping protocol ( XEP - 0199 ) . * @ param jid The id of the entity the query is being sent to * @ return true if it supports ping , false otherwise . * @ throws XMPPErrorException An XMPP related error occurred during the request * @ throws NoResponseException if there was no response from the jid . * @ throws NotConnectedException * @ throws InterruptedException */ public boolean isPingSupported ( Jid jid ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
return ServiceDiscoveryManager . getInstanceFor ( connection ( ) ) . supportsFeature ( jid , Ping . NAMESPACE ) ;
public class XMLValidator { /** * Check some limits of our simplified XML output . */ public void checkSimpleXMLString ( String xml ) throws IOException { } }
Pattern pattern = Pattern . compile ( "(<error.*?/>)" , Pattern . DOTALL | Pattern . MULTILINE ) ; Matcher matcher = pattern . matcher ( xml ) ; int pos = 0 ; while ( matcher . find ( pos ) ) { String errorElement = matcher . group ( ) ; pos = matcher . end ( ) ; if ( errorElement . contains ( "\n" ) || errorElement . contains ( "\r" ) ) { throw new IOException ( "<error ...> may not contain line breaks" ) ; } char beforeError = xml . charAt ( matcher . start ( ) - 1 ) ; if ( beforeError != '\n' && beforeError != '\r' ) { throw new IOException ( "Each <error ...> must start on a new line" ) ; } }
public class ResourceDAO { /** * TODO : Rename */ public Resource updateArchived ( Resource resource , Boolean archived , User modifier ) { } }
return updateUrlNameAndArchived ( resource , resource . getUrlName ( ) , archived , modifier ) ;
public class RpcWrapper { /** * Select an IP address to use for communication , based on the * < code > key < / code > and < code > policy < / code > . * @ param key * The key to use when selecting an IP address . * @ return The IP address to use for communication . * @ throws IOException */ public String chooseIP ( byte [ ] key ) throws IOException { } }
if ( _ips == null || _ips . length == 0 ) { if ( _server != null ) { LOG . warn ( "ip list is not initialized, fallback to server" ) ; return _server ; } throw new IOException ( "ip list is not initialized" ) ; } return _ips [ Math . abs ( Arrays . hashCode ( key ) ) % _ips . length ] ;
public class ObjectInputStream { /** * Read a new object from the stream . It is assumed the object has not been * loaded yet ( not a cyclic reference ) . Return the object read . * If the object implements < code > Externalizable < / code > its * < code > readExternal < / code > is called . Otherwise , all fields described by * the class hierarchy are loaded . Each class can define how its declared * instance fields are loaded by defining a private method * < code > readObject < / code > * @ param unshared * read the object unshared * @ return the object read * @ throws IOException * If an IO exception happened when reading the object . * @ throws OptionalDataException * If optional data could not be found when reading the object * graph * @ throws ClassNotFoundException * If a class for one of the objects could not be found */ private Object readNewObject ( boolean unshared ) throws OptionalDataException , ClassNotFoundException , IOException { } }
ObjectStreamClass classDesc = readClassDesc ( ) ; if ( classDesc == null ) { throw missingClassDescriptor ( ) ; } int newHandle = nextHandle ( ) ; Class < ? > objectClass = classDesc . forClass ( ) ; Object result = null ; Object registeredResult = null ; if ( objectClass != null ) { // Now we know which class to instantiate and which constructor to // run . We are allowed to run the constructor . result = classDesc . newInstance ( objectClass ) ; registerObjectRead ( result , newHandle , unshared ) ; registeredResult = result ; } else { result = null ; } try { // This is how we know what to do in defaultReadObject . And it is // also used by defaultReadObject to check if it was called from an // invalid place . It also allows readExternal to call // defaultReadObject and have it work . currentObject = result ; currentClass = classDesc ; // If Externalizable , just let the object read itself // Note that this value comes from the Stream , and in fact it could be // that the classes have been changed so that the info below now // conflicts with the newer class boolean wasExternalizable = ( classDesc . getFlags ( ) & SC_EXTERNALIZABLE ) != 0 ; if ( wasExternalizable ) { boolean blockData = ( classDesc . getFlags ( ) & SC_BLOCK_DATA ) != 0 ; if ( ! blockData ) { primitiveData = input ; } if ( mustResolve ) { Externalizable extern = ( Externalizable ) result ; extern . readExternal ( this ) ; } if ( blockData ) { // Similar to readHierarchy . Anything not read by // readExternal has to be consumed here discardData ( ) ; } else { primitiveData = emptyStream ; } } else { // If we got here , it is Serializable but not Externalizable . // Walk the hierarchy reading each class ' slots readHierarchy ( result , classDesc ) ; } } finally { // Cleanup , needs to run always so that we can later detect invalid // calls to defaultReadObject currentObject = null ; currentClass = null ; } if ( objectClass != null ) { if ( classDesc . hasMethodReadResolve ( ) ) { Method methodReadResolve = classDesc . getMethodReadResolve ( ) ; try { result = methodReadResolve . invoke ( result , ( Object [ ] ) null ) ; } catch ( IllegalAccessException ignored ) { } catch ( InvocationTargetException ite ) { Throwable target = ite . getTargetException ( ) ; if ( target instanceof ObjectStreamException ) { throw ( ObjectStreamException ) target ; } else if ( target instanceof Error ) { throw ( Error ) target ; } else { throw ( RuntimeException ) target ; } } } } // We get here either if class - based replacement was not needed or if it // was needed but produced the same object or if it could not be // computed . // The object to return is the one we instantiated or a replacement for // it if ( result != null && enableResolve ) { result = resolveObject ( result ) ; } if ( registeredResult != result ) { registerObjectRead ( result , newHandle , unshared ) ; } return result ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getFontHorizontalScaleFactor ( ) { } }
if ( fontHorizontalScaleFactorEClass == null ) { fontHorizontalScaleFactorEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 377 ) ; } return fontHorizontalScaleFactorEClass ;
public class Log { /** * Send an ERROR log message with { @ link SUBSYSTEM # MAIN } as default one . * @ param tag Used to identify the source of a log message . It usually identifies the class or * activity where the log call occurs . * @ param msg The message you would like logged . * @ return */ public static int e ( String tag , String msg ) { } }
return e ( SUBSYSTEM . MAIN , tag , msg ) ;
public class ForkJoinTask { /** * If not done , sets SIGNAL status and performs Object . wait ( timeout ) . This task may or may not be * done on exit . Ignores interrupts . * @ param timeout using Object . wait conventions . */ final void internalWait ( long timeout ) { } }
int s ; if ( ( s = status ) >= 0 && // force completer to issue notify U . compareAndSwapInt ( this , STATUS , s , s | SIGNAL ) ) { synchronized ( this ) { if ( status >= 0 ) try { wait ( timeout ) ; } catch ( InterruptedException ie ) { } else notifyAll ( ) ; } }
public class AmazonRoute53Client { /** * Gets information about a specified traffic policy instance . * < note > * After you submit a < code > CreateTrafficPolicyInstance < / code > or an < code > UpdateTrafficPolicyInstance < / code > * request , there ' s a brief delay while Amazon Route 53 creates the resource record sets that are specified in the * traffic policy definition . For more information , see the < code > State < / code > response element . * < / note > < note > * In the Route 53 console , traffic policy instances are known as policy records . * < / note > * @ param getTrafficPolicyInstanceRequest * Gets information about a specified traffic policy instance . * @ return Result of the GetTrafficPolicyInstance operation returned by the service . * @ throws NoSuchTrafficPolicyInstanceException * No traffic policy instance exists with the specified ID . * @ throws InvalidInputException * The input is not valid . * @ sample AmazonRoute53 . GetTrafficPolicyInstance * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / route53-2013-04-01 / GetTrafficPolicyInstance " * target = " _ top " > AWS API Documentation < / a > */ @ Override public GetTrafficPolicyInstanceResult getTrafficPolicyInstance ( GetTrafficPolicyInstanceRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetTrafficPolicyInstance ( request ) ;
public class DSLMapLexer { /** * $ ANTLR start " EOL " */ public final void mEOL ( ) throws RecognitionException { } }
try { int _type = EOL ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 281:6 : ( ( ( ' \ \ r \ \ n ' ) = > ' \ \ r \ \ n ' | ' \ \ r ' | ' \ \ n ' ) ) // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 282:12 : ( ( ' \ \ r \ \ n ' ) = > ' \ \ r \ \ n ' | ' \ \ r ' | ' \ \ n ' ) { // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 282:12 : ( ( ' \ \ r \ \ n ' ) = > ' \ \ r \ \ n ' | ' \ \ r ' | ' \ \ n ' ) int alt2 = 3 ; int LA2_0 = input . LA ( 1 ) ; if ( ( LA2_0 == '\r' ) ) { int LA2_1 = input . LA ( 2 ) ; if ( ( LA2_1 == '\n' ) && ( synpred1_DSLMap ( ) ) ) { alt2 = 1 ; } } else if ( ( LA2_0 == '\n' ) ) { alt2 = 3 ; } else { if ( state . backtracking > 0 ) { state . failed = true ; return ; } NoViableAltException nvae = new NoViableAltException ( "" , 2 , 0 , input ) ; throw nvae ; } switch ( alt2 ) { case 1 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 282:20 : ( ' \ \ r \ \ n ' ) = > ' \ \ r \ \ n ' { match ( "\r\n" ) ; if ( state . failed ) return ; } break ; case 2 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 283:25 : ' \ \ r ' { match ( '\r' ) ; if ( state . failed ) return ; } break ; case 3 : // src / main / resources / org / drools / compiler / lang / dsl / DSLMap . g : 284:25 : ' \ \ n ' { match ( '\n' ) ; if ( state . failed ) return ; } break ; } } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving }
public class BugInstance { /** * Add a class annotation . If this is the first class annotation added , it * becomes the primary class annotation . * @ param className * the name of the class * @ param sourceFileName * the source file of the class * @ return this object */ @ Nonnull public BugInstance addClass ( String className , String sourceFileName ) { } }
ClassAnnotation classAnnotation = new ClassAnnotation ( className , sourceFileName ) ; add ( classAnnotation ) ; return this ;
public class CategoryGraph { /** * Computes and sets the diameter , the average degree and the average shortest path length of the graph . * Do not call this in the constructor . May run a while . * It is called in the getters , if parameters are not yet initialized when retrieved . */ private void setGraphParameters ( ) { } }
// Diameter is the maximum of all shortest path lengths // Average shortest path length is ( as the name says ) the average of the shortest path length between all node pairs double maxPathLength = 0.0 ; double shortestPathLengthSum = 0.0 ; double degreeSum = 0.0 ; double clusterCoefficientSum = 0.0 ; // iterate over all node pairs Set < Integer > nodes = undirectedGraph . vertexSet ( ) ; // a hashset of the nodes which have been the start node of the computation process // for such nodes all path lengths have beeen already computed Set < Integer > wasSource = new HashSet < Integer > ( ) ; int progress = 0 ; for ( int node : nodes ) { progress ++ ; ApiUtilities . printProgressInfo ( progress , nodes . size ( ) , 100 , ApiUtilities . ProgressInfoMode . TEXT , "Getting graph parameters" ) ; int nodeDegree = undirectedGraph . degreeOf ( node ) ; degreeSum += nodeDegree ; updateDegreeDistribution ( nodeDegree ) ; // cluster coefficient of a node is C _ v is the fraction of the connections that exist between the neighbor nodes ( k _ v ) of a this node and all allowable connections between the neighbors ( k _ v ( k _ v - 1 ) / 2) // for degrees 0 or 1 there is no cluster coefficient , as there can be no connections between neighbors if ( undirectedGraph . degreeOf ( node ) > 1 ) { double numberOfNeighborConnections = getNumberOfNeighborConnections ( node ) ; clusterCoefficientSum += ( numberOfNeighborConnections / ( nodeDegree * ( nodeDegree - 1 ) ) ) ; } // Returns the new shortestPathLengthSum and the new maxPathLength . // They are returned as an double array for performance reasons . // I do not want to create an object , as this function is called * very * often double [ ] returnValues = computeShortestPathLenghts ( node , shortestPathLengthSum , maxPathLength , wasSource ) ; shortestPathLengthSum = returnValues [ 0 ] ; maxPathLength = returnValues [ 1 ] ; // save the info that the node was already used as the source of path computation wasSource . add ( node ) ; } if ( nodes . size ( ) > 1 ) { this . averageShortestPathLength = shortestPathLengthSum / ( nodes . size ( ) * ( nodes . size ( ) - 1 ) / 2 ) ; // sum of path lengths / ( number of node pairs ) } else { this . averageShortestPathLength = 0 ; // there is only one node } this . diameter = maxPathLength ; this . averageDegree = degreeSum / nodes . size ( ) ; this . clusterCoefficient = clusterCoefficientSum / nodes . size ( ) ;
public class Smushing { /** * Return definition of smushing logic to be applied . * @ param oldLayout Old _ Layout describes horizontal layout , older fonts may only provide this . * @ param fullLayout Full _ Layout describes ALL information about horizontal and vertical layout . * @ return Smushing logic to be applied */ static SmushingRulesToApply getRulesToApply ( Integer oldLayout , Integer fullLayout ) { } }
List < SmushingRule > horizontalSmushingRules = new ArrayList < SmushingRule > ( ) ; List < SmushingRule > verticalSmushingRules = new ArrayList < SmushingRule > ( ) ; SmushingRule . Layout horizontalLayout = null ; SmushingRule . Layout verticalLayout = null ; int layout = fullLayout != null ? fullLayout : oldLayout ; for ( Integer codeValue : SmushingRule . getAvailableCodeValues ( ) ) { if ( layout >= codeValue ) { layout = layout - codeValue ; SmushingRule rule = SmushingRule . getByCodeValue ( codeValue ) ; if ( rule . getType ( ) == SmushingRule . Type . HORIZONTAL ) { horizontalLayout = rule . getLayout ( ) ; horizontalSmushingRules . add ( rule ) ; } else if ( rule . getType ( ) == SmushingRule . Type . VERTICAL ) { verticalLayout = rule . getLayout ( ) ; verticalSmushingRules . add ( rule ) ; } } } if ( horizontalLayout == null ) { if ( oldLayout == 0 ) { horizontalLayout = SmushingRule . Layout . FITTING ; horizontalSmushingRules . add ( SmushingRule . HORIZONTAL_FITTING ) ; } else if ( oldLayout == - 1 ) { horizontalLayout = SmushingRule . Layout . FULL_WIDTH ; } } else if ( horizontalLayout == SmushingRule . Layout . CONTROLLED_SMUSHING ) { horizontalSmushingRules . remove ( SmushingRule . HORIZONTAL_SMUSHING ) ; } if ( verticalLayout == null ) { verticalLayout = SmushingRule . Layout . FULL_WIDTH ; } else if ( verticalLayout == SmushingRule . Layout . CONTROLLED_SMUSHING ) { verticalSmushingRules . remove ( SmushingRule . VERTICAL_SMUSHING ) ; } return new SmushingRulesToApply ( horizontalLayout , verticalLayout , horizontalSmushingRules , verticalSmushingRules ) ;
public class WebApplicationHandler { public void addFilterHolder ( FilterHolder holder ) { } }
_filterMap . put ( holder . getName ( ) , holder ) ; _filters . add ( holder ) ; addComponent ( holder ) ;
public class DefaultMapSerializer { /** * ( non - Javadoc ) * @ see org . togglz . core . util . MapSerializer # convertFromString ( java . lang . String ) */ @ Override public Map < String , String > deserialize ( String s ) { } }
try { String input = multiline ? s : s . replace ( '&' , '\n' ) ; Properties props = new Properties ( ) ; if ( s != null ) { props . load ( new StringReader ( input ) ) ; } LinkedHashMap < String , String > result = new LinkedHashMap < String , String > ( ) ; for ( Entry < Object , Object > entry : props . entrySet ( ) ) { result . put ( entry . getKey ( ) . toString ( ) , entry . getValue ( ) . toString ( ) ) ; } return result ; } catch ( IOException e ) { throw new IllegalStateException ( e ) ; }
public class Hash2 { /** * Checks whether expansion is needed and expand when { @ link # insertWithinLock ( Entry , int , int ) } is used . * No lock may be hold when calling this method , since the table must be locked completely using * the proper lock order . * < p > Need for expansion is only checked by comparing whether the associated segment is * full . Should be called after insert after giving up the lock . */ public void checkExpand ( int _hash ) { } }
int si = _hash & LOCK_MASK ; long _size = segmentSize [ si ] . get ( ) ; if ( _size > segmentMaxFill ) { eventuallyExpand ( si ) ; }
public class RBFKernel { /** * Another common ( equivalent ) form of the RBF kernel is k ( x , y ) = * exp ( - & gamma ; | | x - y | | < sup > 2 < / sup > ) . This method converts the & sigma ; value * used by this class to the equivalent & gamma ; value . * @ param sigma the value of & sigma ; * @ return the equivalent & gamma ; value . */ public static double sigmaToGamma ( double sigma ) { } }
if ( sigma <= 0 || Double . isNaN ( sigma ) || Double . isInfinite ( sigma ) ) throw new IllegalArgumentException ( "sigma must be positive, not " + sigma ) ; return 1 / ( 2 * sigma * sigma ) ;
public class MicroMetaDao { /** * 锟斤拷锟絪ql锟斤拷锟斤拷 */ public int updateObjByCondition ( String sql ) { } }
// JdbcTemplate jdbcTemplate = ( JdbcTemplate ) MicroDbHolder . getDbSource ( dbName ) ; JdbcTemplate jdbcTemplate = getMicroJdbcTemplate ( ) ; logger . debug ( sql ) ; Integer retStatus = jdbcTemplate . update ( sql ) ; return retStatus ;
public class CommandFactory { /** * 添加一个命令 * @ param pCommand */ public void addCommand ( Command pCommand ) { } }
if ( ! mCommands . containsKey ( pCommand . getName ( ) ) ) { mCommands . put ( pCommand . getName ( ) , pCommand ) ; }
public class CurrentlyTracedCall { /** * This is a debug method used to dump out the call into stdout . */ public void dumpOut ( ) { } }
System . out . println ( this . toString ( ) ) ; TraceStep root = getRootStep ( ) ; dumpOut ( root , 1 ) ;
public class HomeController { /** * Since we are using the convention , the URI for this method is * / home / login * The method parameters are set with request parameters named * login and password . Thus if we have the request : * POST / home / login * login = john * password = nobodyknows * VRaptor will call : * homeController . login ( " john " , " nobodyknows " ) ; * This method only accept POST requests */ @ Post @ Public public void login ( String login , String password ) { } }
// search for the user in the database final User currentUser = dao . find ( login , password ) ; // if no user is found , adds an error message to the validator // " invalid _ login _ or _ password " is the message key from messages . properties , // and that key is used with the fmt taglib in index . jsp , for example : < fmt : message key = " error . key " > validator . check ( currentUser != null , new SimpleMessage ( "login" , "invalid_login_or_password" ) ) ; // you can use " this " to redirect to another logic from this controller validator . onErrorUsePageOf ( this ) . login ( ) ; // the login was valid , add user to session userInfo . login ( currentUser ) ; // we don ' t want to go to default page ( / WEB - INF / jsp / home / login . jsp ) // we want to redirect to the user ' s home result . redirectTo ( UsersController . class ) . home ( ) ;
public class BusSupport { /** * This performs the same feature as { @ link # wrapEventHandler ( String , String , Object , String ) } , just parse the params from jsonObject . * @ param subscriber Original subscriber object * @ param jsonObject Json params * @ return An EventHandlerWrapper wrapping a subscriber and used to registered into event bus . */ public static EventHandlerWrapper wrapEventHandler ( @ NonNull Object subscriber , @ NonNull JSONObject jsonObject ) { } }
String type = jsonObject . optString ( "type" ) ; if ( TextUtils . isEmpty ( type ) ) { return null ; } String producer = jsonObject . optString ( "producer" ) ; // String subscriber = jsonObject . optString ( " subscriber " ) ; String action = jsonObject . optString ( "action" ) ; return new EventHandlerWrapper ( type , producer , subscriber , action ) ;
public class VAlarmProcedureValidator { /** * { @ inheritDoc } */ public void validate ( VAlarm target ) throws ValidationException { } }
/* * ; the following are all REQUIRED , * ; but MUST NOT occur more than once action / attach / trigger / * ; ' duration ' and ' repeat ' are both optional , * ; and MUST NOT occur more than once each , * ; but if one occurs , so MUST the other duration / repeat / * ; ' description ' is optional , * ; and MUST NOT occur more than once description / * ; the following is optional , ; and MAY occur more than once x - prop */ PropertyValidator . getInstance ( ) . assertOne ( ATTACH , target . getProperties ( ) ) ; PropertyValidator . getInstance ( ) . assertOneOrLess ( DESCRIPTION , target . getProperties ( ) ) ;
public class ComposeEnvironmentsRequest { /** * A list of version labels , specifying one or more application source bundles that belong to the target * application . Each source bundle must include an environment manifest that specifies the name of the environment * and the name of the solution stack to use , and optionally can specify environment links to create . * @ param versionLabels * A list of version labels , specifying one or more application source bundles that belong to the target * application . Each source bundle must include an environment manifest that specifies the name of the * environment and the name of the solution stack to use , and optionally can specify environment links to * create . */ public void setVersionLabels ( java . util . Collection < String > versionLabels ) { } }
if ( versionLabels == null ) { this . versionLabels = null ; return ; } this . versionLabels = new com . amazonaws . internal . SdkInternalList < String > ( versionLabels ) ;
public class JSTalkBackFilter { /** * Returns parameter value from request if exists , otherwise - default value . * @ param req { @ link HttpServletRequest } * @ param paramName name of the parameter * @ param defaultValue parameter default value * @ return request parameter value */ private String getValueOrDefault ( HttpServletRequest req , String paramName , String defaultValue ) { } }
final String value = req . getParameter ( paramName ) ; return StringUtils . isEmpty ( value ) ? defaultValue : value ;
public class CmsDefaultValidationHandler { /** * The email should only be composed by digits and standard english letters , points , * underscores and exact one " At " symbol . < p > * @ see org . opencms . security . I _ CmsValidationHandler # checkEmail ( java . lang . String ) */ public void checkEmail ( String email ) throws CmsIllegalArgumentException { } }
if ( CmsStringUtil . isNotEmpty ( email ) ) { email = email . trim ( ) ; email = email . toLowerCase ( Locale . ROOT ) ; } if ( ! CmsStringUtil . validateRegex ( email , EMAIL_REGEX , false ) ) { throw new CmsIllegalArgumentException ( Messages . get ( ) . container ( Messages . ERR_EMAIL_VALIDATION_1 , email ) ) ; }
public class LoggingHandlerEnvironmentFacet { /** * Restores the original root Logger state , including Level and Handlers . */ public void restore ( ) { } }
if ( ! restored ) { // Remove the extra Handler from the RootLogger rootLogger . removeHandler ( mavenLogHandler ) ; // Restore the original state to the Root logger rootLogger . setLevel ( originalRootLoggerLevel ) ; for ( Handler current : originalHandlers ) { rootLogger . addHandler ( current ) ; } // All done . restored = true ; }
public class AlgorithmParameterGenerator { /** * Initializes this parameter generator with a set of algorithm - specific * parameter generation values . * @ param genParamSpec the set of algorithm - specific parameter generation values . * @ param random the source of randomness . * @ exception InvalidAlgorithmParameterException if the given parameter * generation values are inappropriate for this parameter generator . */ public final void init ( AlgorithmParameterSpec genParamSpec , SecureRandom random ) throws InvalidAlgorithmParameterException { } }
paramGenSpi . engineInit ( genParamSpec , random ) ;
public class DiagonalMatrix { /** * { @ inheritDoc } */ public SparseDoubleVector getRowVector ( int row ) { } }
checkIndices ( row , 0 ) ; SparseDoubleVector vector = new SparseHashDoubleVector ( values . length ) ; vector . set ( row , values [ row ] ) ; return vector ;
public class PolylineEncoding { /** * Encodes a sequence of LatLngs into an encoded path string . */ public static String encode ( final List < LatLng > path ) { } }
long lastLat = 0 ; long lastLng = 0 ; final StringBuilder result = new StringBuilder ( ) ; for ( final LatLng point : path ) { long lat = Math . round ( point . lat * 1e5 ) ; long lng = Math . round ( point . lng * 1e5 ) ; long dLat = lat - lastLat ; long dLng = lng - lastLng ; encode ( dLat , result ) ; encode ( dLng , result ) ; lastLat = lat ; lastLng = lng ; } return result . toString ( ) ;
public class RDBMEntityLockStore { /** * Updates the lock ' s < code > expiration < / code > and < code > lockType < / code > in the underlying store . * The SQL is over - qualified to make sure the row has not been updated since the lock was last * checked . * @ param lock * @ param newExpiration java . util . Date * @ param newType Integer * @ param conn Connection */ private void primUpdate ( IEntityLock lock , Date newExpiration , Integer newType , Connection conn ) throws SQLException , LockingException { } }
Integer typeID = EntityTypesLocator . getEntityTypes ( ) . getEntityIDFromType ( lock . getEntityType ( ) ) ; String key = lock . getEntityKey ( ) ; int oldLockType = lock . getLockType ( ) ; int newLockType = ( newType == null ) ? oldLockType : newType . intValue ( ) ; java . sql . Timestamp oldTs = new java . sql . Timestamp ( lock . getExpirationTime ( ) . getTime ( ) ) ; java . sql . Timestamp newTs = new java . sql . Timestamp ( newExpiration . getTime ( ) ) ; String owner = lock . getLockOwner ( ) ; try { PreparedStatement ps = conn . prepareStatement ( getUpdateSql ( ) ) ; try { ps . setTimestamp ( 1 , newTs ) ; // new expiration ps . setInt ( 2 , newLockType ) ; // new lock type ps . setInt ( 3 , typeID . intValue ( ) ) ; // entity type ps . setString ( 4 , key ) ; // entity key ps . setString ( 5 , owner ) ; // lock owner ps . setTimestamp ( 6 , oldTs ) ; // old expiration ps . setInt ( 7 , oldLockType ) ; // old lock type ; if ( log . isDebugEnabled ( ) ) log . debug ( "RDBMEntityLockStore.primUpdate(): " + ps ) ; int rc = ps . executeUpdate ( ) ; if ( rc != 1 ) { String errString = "Problem updating " + lock ; log . error ( errString ) ; throw new LockingException ( errString ) ; } } finally { if ( ps != null ) ps . close ( ) ; } } catch ( java . sql . SQLException sqle ) { log . error ( sqle , sqle ) ; throw sqle ; }
public class DynatraceMeterRegistry { /** * VisibleForTesting */ List < DynatraceBatchedPayload > createPostMessages ( String type , List < DynatraceTimeSeries > timeSeries ) { } }
final String header = "{\"type\":\"" + type + '\"' + ",\"series\":[" ; final String footer = "]}" ; final int headerFooterBytes = header . getBytes ( UTF_8 ) . length + footer . getBytes ( UTF_8 ) . length ; final int maxMessageSize = MAX_MESSAGE_SIZE - headerFooterBytes ; List < DynatraceBatchedPayload > payloadBodies = createPostMessageBodies ( timeSeries , maxMessageSize ) ; return payloadBodies . stream ( ) . map ( body -> { String message = header + body . payload + footer ; return new DynatraceBatchedPayload ( message , body . metricCount ) ; } ) . collect ( Collectors . toList ( ) ) ;
public class BlockPlacementPolicyConfigurable { /** * Picks up a remote machine within defined window * @ param rackIdx rack the request is coming from and that should be avoided * @ param firstRack rack that starts window * @ param rackWindow rack window size * @ param machineIdx index of first replica within its rack * @ param windowSize size of the machine window * @ param excludedNodes list of black listed nodes . * @ param blocksize size of a block * @ param maxReplicasPerRack maximum number of replicas per rack * @ param results List of results * @ param reverse adjustment when looking forward or backward . * @ return * @ throws NotEnoughReplicasException */ protected boolean chooseRemoteRack ( int rackIdx , int firstRack , int rackWindow , int machineIdx , int windowSize , HashMap < Node , Node > excludedNodes , long blocksize , int maxReplicasPerRack , List < DatanodeDescriptor > results , boolean reverse ) throws NotEnoughReplicasException { } }
// randomly choose one node from remote racks readLock ( ) ; try { HashSet < Integer > excludedRacks = new HashSet < Integer > ( ) ; excludedRacks . add ( rackIdx ) ; int n = racks . size ( ) ; int currRackSize = racksMap . get ( racks . get ( rackIdx ) ) . rackNodes . size ( ) ; while ( excludedRacks . size ( ) < rackWindow ) { int newRack = randomIntInWindow ( firstRack , rackWindow , n , excludedRacks ) ; if ( newRack < 0 ) break ; excludedRacks . add ( newRack ) ; int newRackSize = racksMap . get ( racks . get ( newRack ) ) . rackNodes . size ( ) ; int firstMachine = machineIdx * newRackSize / currRackSize ; int newWindowSize = windowSize ; if ( reverse ) { firstMachine = ( ( int ) Math . ceil ( ( double ) machineIdx * newRackSize / currRackSize ) ) % newRackSize ; newWindowSize = Math . max ( 1 , windowSize * newRackSize / currRackSize ) ; } if ( newWindowSize <= 0 ) { continue ; } if ( chooseMachine ( racks . get ( newRack ) , firstMachine , newWindowSize , excludedNodes , blocksize , maxReplicasPerRack , results ) ) { return true ; } } return false ; } finally { readUnlock ( ) ; }
public class Complex { /** * return a / b */ public Complex divides ( Complex b ) { } }
Complex a = this ; return a . times ( b . reciprocal ( ) ) ;
public class ForkJoinPool { /** * Submits a ForkJoinTask for execution . * @ param task the task to submit * @ return the task * @ throws NullPointerException if the task is null * @ throws RejectedExecutionException if the task cannot be * scheduled for execution */ public < T > ForkJoinTask < T > submit ( ForkJoinTask < T > task ) { } }
if ( task == null ) throw new NullPointerException ( ) ; doSubmit ( task ) ; return task ;
public class SelectStageListener { /** * 尝试载入一下上一次未使用的processId , 可能发生mainstem切换 , 新的S模块需要感知前S模块已创建但未使用的process , 不然就是一个死锁 。 而针对已经使用的processId会由e / t / l节点进行处理 */ private void recovery ( Long pipelineId ) { } }
List < Long > currentProcessIds = stageMonitor . getCurrentProcessIds ( false ) ; for ( Long processId : currentProcessIds ) { String path = StagePathUtils . getProcess ( pipelineId , processId ) ; try { byte [ ] bytes = zookeeper . readData ( path ) ; ProcessNodeEventData nodeData = JsonUtils . unmarshalFromByte ( bytes , ProcessNodeEventData . class ) ; if ( nodeData . getStatus ( ) . isUnUsed ( ) ) { // 加入未使用的processId addReply ( processId ) ; } } catch ( ZkException e ) { logger . error ( "SelectStageListener" , e ) ; } }
public class VarSet { /** * Returns the member variable with the given name . */ public IVarDef getMember ( String name ) { } }
int i = findMember ( name ) ; return i >= 0 ? members_ . get ( i ) : null ;
public class ConditionalCheck { /** * Ensures that a String argument is a number . This overload supports all subclasses of { @ code Number } . The number * is first converted to a BigInteger * @ param condition * condition must be { @ code true } ^ so that the check will be performed * @ param value * value which must be a number * @ param type * requested return value type , must be a subclass of { @ code Number } , i . e . one of { @ code BigDecimal , * BigInteger , Byte , Double , Float , Integer , Long , Short } * @ throws IllegalNumberArgumentException * if the given argument { @ code value } is no number */ @ ArgumentsChecked @ Throws ( { } }
IllegalNullArgumentException . class , IllegalNumberArgumentException . class } ) public static < T extends Number > void isNumber ( final boolean condition , @ Nonnull final String value , @ Nonnull final Class < T > type ) { if ( condition ) { Check . isNumber ( value , type ) ; }
public class DataBlock { /** * < p > When Data Matrix Codes use multiple data blocks , they actually interleave the bytes of each of them . * That is , the first byte of data block 1 to n is written , then the second bytes , and so on . This * method will separate the data into original blocks . < / p > * @ param rawCodewords bytes as read directly from the Data Matrix Code * @ param version version of the Data Matrix Code * @ return DataBlocks containing original bytes , " de - interleaved " from representation in the * Data Matrix Code */ static DataBlock [ ] getDataBlocks ( byte [ ] rawCodewords , Version version ) { } }
// Figure out the number and size of data blocks used by this version Version . ECBlocks ecBlocks = version . getECBlocks ( ) ; // First count the total number of data blocks int totalBlocks = 0 ; Version . ECB [ ] ecBlockArray = ecBlocks . getECBlocks ( ) ; for ( Version . ECB ecBlock : ecBlockArray ) { totalBlocks += ecBlock . getCount ( ) ; } // Now establish DataBlocks of the appropriate size and number of data codewords DataBlock [ ] result = new DataBlock [ totalBlocks ] ; int numResultBlocks = 0 ; for ( Version . ECB ecBlock : ecBlockArray ) { for ( int i = 0 ; i < ecBlock . getCount ( ) ; i ++ ) { int numDataCodewords = ecBlock . getDataCodewords ( ) ; int numBlockCodewords = ecBlocks . getECCodewords ( ) + numDataCodewords ; result [ numResultBlocks ++ ] = new DataBlock ( numDataCodewords , new byte [ numBlockCodewords ] ) ; } } // All blocks have the same amount of data , except that the last n // ( where n may be 0 ) have 1 less byte . Figure out where these start . // TODO ( bbrown ) : There is only one case where there is a difference for Data Matrix for size 144 int longerBlocksTotalCodewords = result [ 0 ] . codewords . length ; // int shorterBlocksTotalCodewords = longerBlocksTotalCodewords - 1; int longerBlocksNumDataCodewords = longerBlocksTotalCodewords - ecBlocks . getECCodewords ( ) ; int shorterBlocksNumDataCodewords = longerBlocksNumDataCodewords - 1 ; // The last elements of result may be 1 element shorter for 144 matrix // first fill out as many elements as all of them have minus 1 int rawCodewordsOffset = 0 ; for ( int i = 0 ; i < shorterBlocksNumDataCodewords ; i ++ ) { for ( int j = 0 ; j < numResultBlocks ; j ++ ) { result [ j ] . codewords [ i ] = rawCodewords [ rawCodewordsOffset ++ ] ; } } // Fill out the last data block in the longer ones boolean specialVersion = version . getVersionNumber ( ) == 24 ; int numLongerBlocks = specialVersion ? 8 : numResultBlocks ; for ( int j = 0 ; j < numLongerBlocks ; j ++ ) { result [ j ] . codewords [ longerBlocksNumDataCodewords - 1 ] = rawCodewords [ rawCodewordsOffset ++ ] ; } // Now add in error correction blocks int max = result [ 0 ] . codewords . length ; for ( int i = longerBlocksNumDataCodewords ; i < max ; i ++ ) { for ( int j = 0 ; j < numResultBlocks ; j ++ ) { int jOffset = specialVersion ? ( j + 8 ) % numResultBlocks : j ; int iOffset = specialVersion && jOffset > 7 ? i - 1 : i ; result [ jOffset ] . codewords [ iOffset ] = rawCodewords [ rawCodewordsOffset ++ ] ; } } if ( rawCodewordsOffset != rawCodewords . length ) { throw new IllegalArgumentException ( ) ; } return result ;
public class DataSet { /** * Creates a union of this DataSet with an other DataSet . The other DataSet must be of the same data type . * @ param other The other DataSet which is unioned with the current DataSet . * @ return The resulting DataSet . */ public UnionOperator < T > union ( DataSet < T > other ) { } }
return new UnionOperator < > ( this , other , Utils . getCallLocationName ( ) ) ;
public class InternalAuthentication { /** * Returns a list of users matching the search . * @ param search The search string to execute . * @ return A list of usernames ( String ) that match the search . * @ throws AuthenticationException if there was an error searching . */ @ Override public List < User > searchUsers ( String search ) throws AuthenticationException { } }
// Complete list of users String [ ] users = file_store . keySet ( ) . toArray ( new String [ file_store . size ( ) ] ) ; List < User > found = new ArrayList < User > ( ) ; // Look through the list for anyone who matches for ( int i = 0 ; i < users . length ; i ++ ) { if ( users [ i ] . toLowerCase ( ) . contains ( search . toLowerCase ( ) ) ) { found . add ( getUser ( users [ i ] ) ) ; } } // Return the list return found ;
public class MapMetaReader { /** * read map files . * @ param filename filename */ @ Override public void read ( final File filename ) { } }
filePath = filename ; // clear the history on global metadata table globalMeta . clear ( ) ; super . read ( filename ) ;
public class LandmarkStorage { /** * This method forces the landmark preparation to skip the landmark search and uses the specified landmark list instead . * Useful for manual tuning of larger areas to safe import time or improve quality . */ public LandmarkStorage setLandmarkSuggestions ( List < LandmarkSuggestion > landmarkSuggestions ) { } }
if ( landmarkSuggestions == null ) throw new IllegalArgumentException ( "landmark suggestions cannot be null" ) ; this . landmarkSuggestions = landmarkSuggestions ; return this ;
public class AbstractAmazonSNSAsync { /** * Simplified method form for invoking the ListSubscriptionsByTopic operation with an AsyncHandler . * @ see # listSubscriptionsByTopicAsync ( ListSubscriptionsByTopicRequest , com . amazonaws . handlers . AsyncHandler ) */ @ Override public java . util . concurrent . Future < ListSubscriptionsByTopicResult > listSubscriptionsByTopicAsync ( String topicArn , com . amazonaws . handlers . AsyncHandler < ListSubscriptionsByTopicRequest , ListSubscriptionsByTopicResult > asyncHandler ) { } }
return listSubscriptionsByTopicAsync ( new ListSubscriptionsByTopicRequest ( ) . withTopicArn ( topicArn ) , asyncHandler ) ;
public class EnvironmentsInner { /** * Create or replace an existing Environment . * @ param resourceGroupName The name of the resource group . * @ param labAccountName The name of the lab Account . * @ param labName The name of the lab . * @ param environmentSettingName The name of the environment Setting . * @ param environmentName The name of the environment . * @ param environment Represents an environment instance * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the EnvironmentInner object if successful . */ public EnvironmentInner createOrUpdate ( String resourceGroupName , String labAccountName , String labName , String environmentSettingName , String environmentName , EnvironmentInner environment ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , labAccountName , labName , environmentSettingName , environmentName , environment ) . toBlocking ( ) . single ( ) . body ( ) ;
public class DataExtractionProcessor { /** * Main method . Processes the whole dump using this processor . To change * which dump file to use and whether to run in offline mode , modify the * settings in { @ link ExampleHelpers } . * @ param args * @ throws IOException */ public static void main ( String [ ] args ) throws IOException { } }
ExampleHelpers . configureLogging ( ) ; DataExtractionProcessor . printDocumentation ( ) ; DataExtractionProcessor processor = new DataExtractionProcessor ( ) ; ExampleHelpers . processEntitiesFromWikidataDump ( processor ) ; processor . close ( ) ;
public class ExpansionPointGroupMember { /** * Attempts to add the given member to this expansion point . The member will not be added if * an equivalent entry ( according to its equals ( ) method ) already exists . If added , the member ' s * { @ link GroupMember # onAdded ( ) } method will be called . * @ param member The member to be added . Must not be null . * @ throws IllegalArgumentException if { @ code member } is null . */ protected void add ( GroupMember member ) { } }
Assert . notNull ( member , "member" ) ; if ( members . add ( member ) ) { member . onAdded ( ) ; }
public class ProfileContainer { /** * Override afterPaint to write the statistics after the component is painted . * @ param renderContext the renderContext to send output to . */ @ Override protected void afterPaint ( final RenderContext renderContext ) { } }
super . afterPaint ( renderContext ) ; // UIC serialization stats UicStats stats = new UicStats ( UIContextHolder . getCurrent ( ) ) ; stats . analyseWC ( this ) ; if ( renderContext instanceof WebXmlRenderContext ) { PrintWriter writer = ( ( WebXmlRenderContext ) renderContext ) . getWriter ( ) ; writer . println ( "<h2>Serialization Profile of UIC</h2>" ) ; UicStatsAsHtml . write ( writer , stats ) ; // ObjectProfiler writer . println ( "<h2>ObjectProfiler - " + getClass ( ) . getName ( ) + "</h2>" ) ; writer . println ( "<pre>" ) ; try { writer . println ( ObjectGraphDump . dump ( this ) . toFlatSummary ( ) ) ; } catch ( Exception e ) { LOG . error ( "Failed to dump component" , e ) ; } writer . println ( "</pre>" ) ; }
public class br_broker { /** * < pre > * Use this operation to get Unified Repeater Instance . * < / pre > */ public static br_broker [ ] get ( nitro_service client ) throws Exception { } }
br_broker resource = new br_broker ( ) ; resource . validate ( "get" ) ; return ( br_broker [ ] ) resource . get_resources ( client ) ;
public class DynamicReturnGeneratedKeys { /** * 检查DAO返回的类型是否合格 * @ param returnType DAO方法的返回类型 ( 如果方法声明的返回类型是泛型 , 框架会根据上下文信息解析为运行时实际应该返回的真正类型 ) * @ throws InvalidDataAccessApiUsageException DAO方法的返回类型不合格 */ public void checkMethodReturnType ( Class < ? > returnType , StatementMetaData metaData ) { } }
if ( returnType != void . class && ! Number . class . isAssignableFrom ( returnType ) ) { throw new InvalidDataAccessApiUsageException ( "error return type, only support int/long/double/float/void type for method with @ReturnGeneratedKeys:" + metaData . getMethod ( ) ) ; }
public class XMLParser { /** * Reads an attribute list declaration such as the following : < ! ATTLIST foo * bar CDATA # IMPLIED quux ( a | b | c ) " c " baz NOTATION ( a | b | c ) # FIXED " c " > * Each attribute has a name , type and default . * Types are one of the built - in types ( CDATA , ID , IDREF , IDREFS , ENTITY , * ENTITIES , NMTOKEN , or NMTOKENS ) , an enumerated type " ( list | of | options ) " * or NOTATION followed by an enumerated type . * The default is either # REQUIRED , # IMPLIED , # FIXED , a quoted value , or * # FIXED with a quoted value . * @ throws IOException Signals that an I / O exception has occurred . * @ throws KriptonRuntimeException the kripton runtime exception */ private void readAttributeListDeclaration ( ) throws IOException , KriptonRuntimeException { } }
read ( START_ATTLIST ) ; skip ( ) ; String elementName = readName ( ) ; while ( true ) { skip ( ) ; int c = peekCharacter ( ) ; if ( c == '>' ) { position ++ ; return ; } // attribute name String attributeName = readName ( ) ; // attribute type skip ( ) ; if ( position + 1 >= limit && ! fillBuffer ( 2 ) ) { throw new KriptonRuntimeException ( "Malformed attribute list" , true , this . getLineNumber ( ) , this . getColumnNumber ( ) , getPositionDescription ( ) , null ) ; } if ( buffer [ position ] == NOTATION [ 0 ] && buffer [ position + 1 ] == NOTATION [ 1 ] ) { read ( NOTATION ) ; skip ( ) ; } c = peekCharacter ( ) ; if ( c == '(' ) { position ++ ; while ( true ) { skip ( ) ; readName ( ) ; skip ( ) ; c = peekCharacter ( ) ; if ( c == ')' ) { position ++ ; break ; } else if ( c == '|' ) { position ++ ; } else { throw new KriptonRuntimeException ( "Malformed attribute type" , true , this . getLineNumber ( ) , this . getColumnNumber ( ) , getPositionDescription ( ) , null ) ; } } } else { readName ( ) ; } // default value skip ( ) ; c = peekCharacter ( ) ; if ( c == '#' ) { position ++ ; c = peekCharacter ( ) ; if ( c == 'R' ) { read ( REQUIRED ) ; } else if ( c == 'I' ) { read ( IMPLIED ) ; } else if ( c == 'F' ) { read ( FIXED ) ; } else { throw new KriptonRuntimeException ( "Malformed attribute type" , true , this . getLineNumber ( ) , this . getColumnNumber ( ) , getPositionDescription ( ) , null ) ; } skip ( ) ; c = peekCharacter ( ) ; } if ( c == '"' || c == '\'' ) { position ++ ; // TODO : does this do escaping correctly ? String value = readValue ( ( char ) c , true , true , ValueContext . ATTRIBUTE ) ; position ++ ; defineAttributeDefault ( elementName , attributeName , value ) ; } }
public class AttributesImplSerializer { /** * This method gets the index of an attribute given its qName . * @ param qname the qualified name of the attribute , e . g . " prefix1 : locName1" * @ return the integer index of the attribute . * @ see org . xml . sax . Attributes # getIndex ( String ) */ public final int getIndex ( String qname ) { } }
int index ; if ( super . getLength ( ) < MAX ) { // if we haven ' t got too many attributes let the // super class look it up index = super . getIndex ( qname ) ; return index ; } // we have too many attributes and the super class is slow // so find it quickly using our Hashtable . Integer i = ( Integer ) m_indexFromQName . get ( qname ) ; if ( i == null ) index = - 1 ; else index = i . intValue ( ) ; return index ;
public class Link { /** * Only used for dummy head @ param link */ final void _setAsHead ( LinkedList parent , Link tail ) { } }
_nextLink = tail ; _parent = parent ; _state = LinkState . HEAD ;
public class JMFiles { /** * Gets line stream . * @ param path the path * @ param charset the charset * @ return the line stream */ public static Stream < String > getLineStream ( Path path , Charset charset ) { } }
try { return charset == null ? Files . lines ( path ) : Files . lines ( path , charset ) ; } catch ( Exception e ) { return JMExceptionManager . handleExceptionAndReturn ( log , e , "getLineStream" , Stream :: empty , path , charset ) ; }
public class JQMList { /** * Remove the divider with the given text . This method will search all the dividers and remove the first divider * found with the given text . * @ return true if a divider with the given text was found and removed , otherwise false . */ public boolean removeDivider ( String text ) { } }
for ( int k = 0 ; k < list . getWidgetCount ( ) ; k ++ ) { Widget w = list . getWidget ( k ) ; if ( isDivider ( w ) && w . getElement ( ) . getInnerText ( ) . equals ( text ) ) { list . remove ( k ) ; items . remove ( k ) ; return true ; } } return false ;
public class CommercePriceListUserSegmentEntryRelPersistenceImpl { /** * Clears the cache for all commerce price list user segment entry rels . * The { @ link EntityCache } and { @ link FinderCache } are both cleared by this method . */ @ Override public void clearCache ( ) { } }
entityCache . clearCache ( CommercePriceListUserSegmentEntryRelImpl . class ) ; finderCache . clearCache ( FINDER_CLASS_NAME_ENTITY ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITH_PAGINATION ) ; finderCache . clearCache ( FINDER_CLASS_NAME_LIST_WITHOUT_PAGINATION ) ;
public class HibernateDocumentDao { /** * { @ inheritDoc } */ public List < Execution > getSpecificationExecutions ( Specification specification , SystemUnderTest sut , int maxResults ) { } }
final Criteria crit = sessionService . getSession ( ) . createCriteria ( Execution . class ) ; crit . add ( Restrictions . eq ( "specification.id" , specification . getId ( ) ) ) ; if ( sut != null ) { crit . createAlias ( "systemUnderTest" , "sut" ) ; crit . add ( Restrictions . eq ( "sut.name" , sut . getName ( ) ) ) ; } /* crit . add ( Restrictions . or ( Restrictions . or ( Restrictions . not ( Restrictions . eq ( " errors " , 0 ) ) , Restrictions . not ( Restrictions . eq ( " success " , 0 ) ) ) , Restrictions . or ( Restrictions . not ( Restrictions . eq ( " ignored " , 0 ) ) , Restrictions . not ( Restrictions . eq ( " failures " , 0 ) ) ) ) ) ; */ crit . addOrder ( Order . desc ( "executionDate" ) ) ; crit . setMaxResults ( maxResults ) ; @ SuppressWarnings ( "unchecked" ) List < Execution > executions = crit . list ( ) ; HibernateLazyInitializer . initCollection ( executions ) ; Collections . reverse ( executions ) ; return executions ;
public class DisruptorQueue { /** * { @ inheritDoc } * @ throws QueueException . EphemeralIsFull * if the ephemeral storage is full */ @ Override public IQueueMessage < ID , DATA > take ( ) throws QueueException . EphemeralIsFull { } }
if ( ! isEphemeralDisabled ( ) ) { int ephemeralMaxSize = getEphemeralMaxSize ( ) ; if ( ephemeralMaxSize > 0 && ephemeralStorage . size ( ) >= ephemeralMaxSize ) { throw new QueueException . EphemeralIsFull ( ephemeralMaxSize ) ; } } IQueueMessage < ID , DATA > msg = takeFromRingBuffer ( ) ; if ( msg != null && ! isEphemeralDisabled ( ) ) { ephemeralStorage . putIfAbsent ( msg . getId ( ) , msg ) ; } return msg ;
public class AbstractAWSSigner { /** * Hashes the binary data using the SHA - 256 algorithm . * @ param data * The binary data to hash . * @ return The hashed bytes from the specified data . * @ throws SdkClientException * If the hash cannot be computed . */ public byte [ ] hash ( byte [ ] data ) throws SdkClientException { } }
try { MessageDigest md = getMessageDigestInstance ( ) ; md . update ( data ) ; return md . digest ( ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to compute hash while signing request: " + e . getMessage ( ) , e ) ; }
public class JPAEntityManager { /** * ( non - Javadoc ) * @ see javax . persistence . EntityManager # createNamedQuery ( java . lang . String ) */ @ Override public Query createNamedQuery ( String name ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "em.createNamedQuery(" + name + ");\n" + toString ( ) ) ; return getEMInvocationInfo ( false ) . createNamedQuery ( name ) ;
public class MessageBirdClient { /** * Retrieves the information of an existing contact . You only need to supply * the unique contact ID that was returned upon creation or receiving . */ public Contact viewContact ( final String id ) throws NotFoundException , GeneralException , UnauthorizedException { } }
if ( id == null ) { throw new IllegalArgumentException ( "Contact ID must be specified." ) ; } return messageBirdService . requestByID ( CONTACTPATH , id , Contact . class ) ;
public class DatabaseAdminClientSnippets { /** * [ VARIABLE my _ database _ id ] */ public void updateDatabaseDdl ( String instanceId , String databaseId ) { } }
// [ START updateDatabaseDdl ] try { dbAdminClient . updateDatabaseDdl ( instanceId , databaseId , Arrays . asList ( "ALTER TABLE Albums ADD COLUMN MarketingBudget INT64" ) , null ) . get ( ) ; } catch ( ExecutionException e ) { throw ( SpannerException ) e . getCause ( ) ; } catch ( InterruptedException e ) { throw SpannerExceptionFactory . propagateInterrupt ( e ) ; } // [ END updateDatabaseDdl ]
public class cachepolicylabel_stats { /** * Use this API to fetch statistics of cachepolicylabel _ stats resource of given name . */ public static cachepolicylabel_stats get ( nitro_service service , String labelname ) throws Exception { } }
cachepolicylabel_stats obj = new cachepolicylabel_stats ( ) ; obj . set_labelname ( labelname ) ; cachepolicylabel_stats response = ( cachepolicylabel_stats ) obj . stat_resource ( service ) ; return response ;
public class GoogleAdapter { /** * Return the current user profile . */ public UserProfile fetchUserProfile ( final Google google ) { } }
final UserInfo userInfo = google . oauth2Operations ( ) . getUserinfo ( ) ; return new UserProfileBuilder ( ) . setUsername ( userInfo . getId ( ) ) . setId ( userInfo . getId ( ) ) . setEmail ( userInfo . getEmail ( ) ) . setName ( userInfo . getName ( ) ) . setFirstName ( userInfo . getGivenName ( ) ) . setLastName ( userInfo . getFamilyName ( ) ) . build ( ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcDocumentInformationRelationship ( ) { } }
if ( ifcDocumentInformationRelationshipEClass == null ) { ifcDocumentInformationRelationshipEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 190 ) ; } return ifcDocumentInformationRelationshipEClass ;
public class SimpleQueueSpecificationGenerator { /** * Given a { @ link FlowSpecification } . * @ param input { @ link FlowSpecification } * @ return A { @ link com . google . common . collect . Table } */ @ Override public Table < Node , String , Set < QueueSpecification > > create ( FlowSpecification input ) { } }
Table < Node , String , Set < QueueSpecification > > table = HashBasedTable . create ( ) ; String flow = input . getName ( ) ; Map < String , FlowletDefinition > flowlets = input . getFlowlets ( ) ; // Iterate through connections of a flow . for ( FlowletConnection connection : input . getConnections ( ) ) { final String source = connection . getSourceName ( ) ; final String target = connection . getTargetName ( ) ; final Node sourceNode = new Node ( connection . getSourceType ( ) , source ) ; Set < QueueSpecification > queueSpec = generateQueueSpecification ( flow , connection , flowlets . get ( target ) . getInputs ( ) , flowlets . get ( source ) . getOutputs ( ) ) ; Set < QueueSpecification > queueSpecifications = table . get ( sourceNode , target ) ; if ( queueSpecifications == null ) { queueSpecifications = Sets . newHashSet ( ) ; table . put ( sourceNode , target , queueSpecifications ) ; } queueSpecifications . addAll ( queueSpec ) ; } return table ;
public class TimeFilter { /** * Returns the type of the time filter . * @ return */ public TimeFrameFilterType getType ( ) { } }
if ( startDate == null && endDate == null ) { return TimeFrameFilterType . INOPERATIVE ; } else if ( startDate != null && endDate == null ) { return TimeFrameFilterType . MIN_DATE ; } else if ( startDate == null && endDate != null ) { return TimeFrameFilterType . MAX_DATE ; } else { return TimeFrameFilterType . TIMEFRAME ; }
public class AbstractBatcher { /** * { @ inheritDoc } */ @ Override public final boolean addOrWait ( T item ) throws InterruptedException { } }
requireNonNull ( item ) ; checkState ( ! isClosed , "The batcher has been closed" ) ; backingQueue . put ( item ) ; return true ;
public class JsJmsStreamMessageImpl { /** * Write a byte value into the payload Stream . * Javadoc description supplied by JsJmsMessage interface . */ public void writeByte ( byte value ) throws UnsupportedEncodingException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "writeByte" , Byte . valueOf ( value ) ) ; getBodyList ( ) . add ( Byte . valueOf ( value ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "writeByte" ) ;
public class ZipkinQueryApiV2 { /** * This is inlined here as there isn ' t enough re - use to warrant it being in the zipkin2 library */ static byte [ ] writeTraces ( SpanBytesEncoder codec , List < List < zipkin2 . Span > > traces ) { } }
// Get the encoded size of the nested list so that we don ' t need to grow the buffer int length = traces . size ( ) ; int sizeInBytes = 2 ; if ( length > 1 ) sizeInBytes += length - 1 ; // comma to join elements for ( int i = 0 ; i < length ; i ++ ) { List < zipkin2 . Span > spans = traces . get ( i ) ; int jLength = spans . size ( ) ; sizeInBytes += 2 ; if ( jLength > 1 ) sizeInBytes += jLength - 1 ; // comma to join elements for ( int j = 0 ; j < jLength ; j ++ ) { sizeInBytes += codec . sizeInBytes ( spans . get ( j ) ) ; } } byte [ ] out = new byte [ sizeInBytes ] ; int pos = 0 ; out [ pos ++ ] = '[' ; // start list of traces for ( int i = 0 ; i < length ; i ++ ) { pos += codec . encodeList ( traces . get ( i ) , out , pos ) ; if ( i + 1 < length ) out [ pos ++ ] = ',' ; } out [ pos ] = ']' ; // stop list of traces return out ;
public class JmsDestinationImpl { /** * Static method that allows a replyTo destination to be obtained from a JsJmsMessage , * a ReverseRoutingPath and an optional JMS Core Connection object . * @ param _ msg CoreSPI message for which the JMS replyTo dest should be generated * @ param rrp Reverse routing path of the message . Should not be queried directly from ' msg ' * for efficiency reasons . * @ param _ siConn JMS Core connection object that can be used if necessary to help determine * the type of the destination ( optional ) * @ return JmsDestinationImpl * @ throws JMSException */ static JmsDestinationImpl getJMSReplyToInternal ( JsJmsMessage _msg , List < SIDestinationAddress > rrp , SICoreConnection _siConn ) throws JMSException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getJMSReplyToInternal" , new Object [ ] { _msg , rrp , _siConn } ) ; JmsDestinationImpl tempReplyTo = null ; // Case a ) - check for JMS specific data in compressed byte form . byte [ ] replyURIBytes = _msg . getJmsReplyTo ( ) ; if ( replyURIBytes != null ) { tempReplyTo = ( JmsDestinationImpl ) JmsInternalsFactory . getMessageDestEncodingUtils ( ) . getDestinationFromMsgRepresentation ( replyURIBytes ) ; } if ( tempReplyTo == null ) { // Cases b ) & c ) both depend on there being a reverse routing path , otherwise // there is no replyTo . // lookup the name of the dest in the reverse routing path SIDestinationAddress sida = null ; if ( rrp . size ( ) > 0 ) { // The last element of the RRP becomes the reply to destination int lastDestInRRP = rrp . size ( ) - 1 ; sida = rrp . get ( lastDestInRRP ) ; // Case b ) - if we have a live connection , we can use that to query the dest type if ( _siConn != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Determine reply dest type using SICoreConnection" ) ; try { // get the destination configuration & type DestinationConfiguration destConfig = _siConn . getDestinationConfiguration ( sida ) ; DestinationType destType = destConfig . getDestinationType ( ) ; if ( destType == DestinationType . TOPICSPACE ) { tempReplyTo = new JmsTopicImpl ( ) ; } else { tempReplyTo = new JmsQueueImpl ( ) ; } } catch ( SIException sice ) { // No FFDC code needed // d246604 Trace exceptions , but don ' t throw on . Fall back to // case c ) below . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { SibTr . debug ( tc , "failed to look up dest type because of " + sice ) ; SibTr . debug ( tc , "detail " , sice ) ; } } } // Case c ) - Guess based on the discriminator if ( tempReplyTo == null ) { // 239238 - make a stab at determining whether it ' s a queue or topic // reply destination based on the reply disciminator . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Guess reply dest type using reply discriminator" ) ; String replyDiscrim = _msg . getReplyDiscriminator ( ) ; if ( ( replyDiscrim == null ) || ( "" . equals ( replyDiscrim ) ) ) { tempReplyTo = new JmsQueueImpl ( ) ; } else { tempReplyTo = new JmsTopicImpl ( ) ; } } } } // Now fill in the fields that were hidden in the reply header . if ( tempReplyTo != null ) { populateReplyToFromHeader ( tempReplyTo , _msg , rrp ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getJMSReplyToInternal" , tempReplyTo ) ; return tempReplyTo ;
public class HashOrderMixing { /** * A constant { @ link HashOrderMixingStrategy } . This is useful if one needs to have * deterministic key distribution but wishes to control it manually . * Do not use the same constant for more than one container . * Consider using { @ linkplain ObjectScatterSet scatter maps or sets } instead * of constant hash order mixer . */ public static HashOrderMixingStrategy constant ( final long seed ) { } }
return new HashOrderMixingStrategy ( ) { @ Override public int newKeyMixer ( int newContainerBufferSize ) { return ( int ) BitMixer . mix64 ( newContainerBufferSize ^ seed ) ; } @ Override public HashOrderMixingStrategy clone ( ) { return this ; } } ;
public class DataSiftPush { /** * Create a push subscription to be consumed via { @ link # pull ( PushSubscription , int , String ) } using a live stream * @ param historics the historic query which will be consumed via pull * @ param name a name for the subscription * @ param initialStatus the initial status of the subscription * @ param start an option timestamp of when to start the subscription * @ param end an optional timestamp of when to stop * @ return this */ public FutureData < PushSubscription > createPull ( PullJsonType jsonMeta , PreparedHistoricsQuery historics , String name , Status initialStatus , long start , long end ) { } }
return createPull ( jsonMeta , historics , null , name , initialStatus , start , end ) ;
public class JavassistUtils { /** * Checks if an annotation with the given name is either in the invisible or in the visible annotation attributes . */ private static boolean checkAnnotation ( AnnotationsAttribute invisible , AnnotationsAttribute visible , String annotationName ) { } }
boolean exist1 = false ; boolean exist2 = false ; if ( invisible != null ) { exist1 = invisible . getAnnotation ( annotationName ) != null ; } if ( visible != null ) { exist2 = visible . getAnnotation ( annotationName ) != null ; } return exist1 || exist2 ;
public class HSlicePredicate { /** * Set the columnName on which we will end . * Switches to { @ link PredicateType # Range } */ public HSlicePredicate < N > setEndOn ( N finish ) { } }
this . finish = finish ; predicateType = PredicateType . Range ; return this ;
public class VideoMultipleWrapper { /** * Set the maximum number to be selected . * @ param count the maximum number . */ public VideoMultipleWrapper selectCount ( @ IntRange ( from = 1 , to = Integer . MAX_VALUE ) int count ) { } }
this . mLimitCount = count ; return this ;
public class HttpServletRaSessionListener { /** * ( non - Javadoc ) * @ see javax . servlet . http . HttpSessionListener # sessionCreated ( javax . servlet . http . HttpSessionEvent ) */ public void sessionCreated ( HttpSessionEvent httpSessionEvent ) { } }
if ( active ) { if ( log . isDebugEnabled ( ) ) log . debug ( "sessionCreated sessionId = " + httpSessionEvent . getSession ( ) . getId ( ) ) ; }
public class FrontUtils { /** * Given a front , converts it to a Solution set of PointSolutions * @ param front * @ return A front as a List < FrontSolution > */ public static List < PointSolution > convertFrontToSolutionList ( Front front ) { } }
if ( front == null ) { throw new NullFrontException ( ) ; } int numberOfObjectives ; int solutionSetSize = front . getNumberOfPoints ( ) ; if ( front . getNumberOfPoints ( ) == 0 ) { numberOfObjectives = 0 ; } else { numberOfObjectives = front . getPoint ( 0 ) . getDimension ( ) ; } List < PointSolution > solutionSet = new ArrayList < > ( solutionSetSize ) ; for ( int i = 0 ; i < front . getNumberOfPoints ( ) ; i ++ ) { PointSolution solution = new PointSolution ( numberOfObjectives ) ; for ( int j = 0 ; j < numberOfObjectives ; j ++ ) { solution . setObjective ( j , front . getPoint ( i ) . getValue ( j ) ) ; } solutionSet . add ( solution ) ; } return solutionSet ;
public class Crouton { /** * Creates a { @ link Crouton } with provided text - resource and style for a given * activity . * @ param activity * The { @ link Activity } that represents the context in which the Crouton should exist . * @ param customView * The custom { @ link View } to display * @ param viewGroup * The { @ link ViewGroup } that this { @ link Crouton } should be added to . * @ return The created { @ link Crouton } . */ public static Crouton make ( Activity activity , View customView , ViewGroup viewGroup ) { } }
return new Crouton ( activity , customView , viewGroup ) ;
public class Maybe { /** * { @ inheritDoc } */ @ SuppressWarnings ( "RedundantTypeArguments" ) @ Override public final < B > Maybe < B > flatMap ( Function < ? super A , ? extends Monad < B , Maybe < ? > > > f ) { } }
return match ( constantly ( nothing ( ) ) , f . andThen ( Monad < B , Maybe < ? > > :: coerce ) ) ;