signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class ConferenceReader { /** * Add the requested query string arguments to the Request .
* @ param request Request to add query string arguments to */
private void addQueryParams ( final Request request ) { } } | if ( absoluteDateCreated != null ) { request . addQueryParam ( "DateCreated" , absoluteDateCreated . toString ( Request . QUERY_STRING_DATE_FORMAT ) ) ; } else if ( rangeDateCreated != null ) { request . addQueryDateRange ( "DateCreated" , rangeDateCreated ) ; } if ( absoluteDateUpdated != null ) { request . addQueryParam ( "DateUpdated" , absoluteDateUpdated . toString ( Request . QUERY_STRING_DATE_FORMAT ) ) ; } else if ( rangeDateUpdated != null ) { request . addQueryDateRange ( "DateUpdated" , rangeDateUpdated ) ; } if ( friendlyName != null ) { request . addQueryParam ( "FriendlyName" , friendlyName ) ; } if ( status != null ) { request . addQueryParam ( "Status" , status . toString ( ) ) ; } if ( getPageSize ( ) != null ) { request . addQueryParam ( "PageSize" , Integer . toString ( getPageSize ( ) ) ) ; } |
public class CassandraClientBase { /** * Persist join table by cql .
* @ param joinTableData
* the join table data
* @ param conn
* the conn */
protected void persistJoinTableByCql ( JoinTableData joinTableData , Cassandra . Client conn ) { } } | String joinTableName = joinTableData . getJoinTableName ( ) ; String invJoinColumnName = joinTableData . getInverseJoinColumnName ( ) ; Map < Object , Set < Object > > joinTableRecords = joinTableData . getJoinTableRecords ( ) ; EntityMetadata entityMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , joinTableData . getEntityClass ( ) ) ; // need to bring in an insert query for this
// add columns & execute query
CQLTranslator translator = new CQLTranslator ( ) ; String batch_Query = CQLTranslator . BATCH_QUERY ; String insert_Query = translator . INSERT_QUERY ; StringBuilder builder = new StringBuilder ( ) ; builder . append ( CQLTranslator . DEFAULT_KEY_NAME ) ; builder . append ( CQLTranslator . COMMA_STR ) ; builder . append ( translator . ensureCase ( new StringBuilder ( ) , joinTableData . getJoinColumnName ( ) , false ) ) ; builder . append ( CQLTranslator . COMMA_STR ) ; builder . append ( translator . ensureCase ( new StringBuilder ( ) , joinTableData . getInverseJoinColumnName ( ) , false ) ) ; insert_Query = StringUtils . replace ( insert_Query , CQLTranslator . COLUMN_FAMILY , translator . ensureCase ( new StringBuilder ( ) , joinTableName , false ) . toString ( ) ) ; insert_Query = StringUtils . replace ( insert_Query , CQLTranslator . COLUMNS , builder . toString ( ) ) ; StringBuilder columnValueBuilder = new StringBuilder ( ) ; StringBuilder statements = new StringBuilder ( ) ; // insert query for each row key
for ( Object key : joinTableRecords . keySet ( ) ) { PropertyAccessor accessor = PropertyAccessorFactory . getPropertyAccessor ( ( Field ) entityMetadata . getIdAttribute ( ) . getJavaMember ( ) ) ; Set < Object > values = joinTableRecords . get ( key ) ; // join column value
for ( Object value : values ) { if ( value != null ) { String insertQuery = insert_Query ; columnValueBuilder . append ( CQLTranslator . QUOTE_STR ) ; columnValueBuilder . append ( PropertyAccessorHelper . getString ( key ) + "\001" + PropertyAccessorHelper . getString ( value ) ) ; columnValueBuilder . append ( CQLTranslator . QUOTE_STR ) ; columnValueBuilder . append ( CQLTranslator . COMMA_STR ) ; translator . appendValue ( columnValueBuilder , key . getClass ( ) , key , true , false ) ; columnValueBuilder . append ( CQLTranslator . COMMA_STR ) ; translator . appendValue ( columnValueBuilder , value . getClass ( ) , value , true , false ) ; insertQuery = StringUtils . replace ( insertQuery , CQLTranslator . COLUMN_VALUES , columnValueBuilder . toString ( ) ) ; statements . append ( insertQuery ) ; statements . append ( " " ) ; } } } if ( ! StringUtils . isBlank ( statements . toString ( ) ) ) { batch_Query = StringUtils . replace ( batch_Query , CQLTranslator . STATEMENT , statements . toString ( ) ) ; StringBuilder batchBuilder = new StringBuilder ( ) ; batchBuilder . append ( batch_Query ) ; batchBuilder . append ( CQLTranslator . APPLY_BATCH ) ; execute ( batchBuilder . toString ( ) , conn ) ; } |
public class DocumentsClient { /** * Deletes the specified document .
* < p > Operation & lt ; response : [ google . protobuf . Empty ] [ google . protobuf . Empty ] , metadata :
* [ KnowledgeOperationMetadata ] [ google . cloud . dialogflow . v2beta1 . KnowledgeOperationMetadata ] & gt ;
* < p > Sample code :
* < pre > < code >
* try ( DocumentsClient documentsClient = DocumentsClient . create ( ) ) {
* DocumentName name = DocumentName . of ( " [ PROJECT ] " , " [ KNOWLEDGE _ BASE ] " , " [ DOCUMENT ] " ) ;
* DeleteDocumentRequest request = DeleteDocumentRequest . newBuilder ( )
* . setName ( name . toString ( ) )
* . build ( ) ;
* documentsClient . deleteDocumentAsync ( request ) . get ( ) ;
* < / code > < / pre >
* @ param request The request object containing all of the parameters for the API call .
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi ( "The surface for long-running operations is not stable yet and may change in the future." ) public final OperationFuture < Empty , KnowledgeOperationMetadata > deleteDocumentAsync ( DeleteDocumentRequest request ) { } } | return deleteDocumentOperationCallable ( ) . futureCall ( request ) ; |
public class ComposableBody { /** * Parse a static body instance into a composable instance . This is an
* expensive operation and should not be used lightly .
* The current implementation does not obtain the payload XML by means of
* a proper XML parser . It uses some string pattern searching to find the
* first @ { code body } element and the last element ' s closing tag . It is
* assumed that the static body ' s XML is well formed , etc . . This
* implementation may change in the future .
* @ param body static body instance to convert
* @ return composable body instance
* @ throws BOSHException */
static ComposableBody fromStaticBody ( final StaticBody body ) throws BOSHException { } } | String raw = body . toXML ( ) ; Matcher matcher = BOSH_START . matcher ( raw ) ; if ( ! matcher . find ( ) ) { throw ( new BOSHException ( "Could not locate 'body' element in XML. The raw XML did" + " not match the pattern: " + BOSH_START ) ) ; } String payload ; if ( ">" . equals ( matcher . group ( 1 ) ) ) { int first = matcher . end ( ) ; int last = raw . lastIndexOf ( "</" ) ; if ( last < first ) { last = first ; } payload = raw . substring ( first , last ) ; } else { payload = "" ; } return new ComposableBody ( body . getAttributes ( ) , payload ) ; |
public class DescribeVpcPeeringConnectionsRequest { /** * One or more VPC peering connection IDs .
* Default : Describes all your VPC peering connections .
* @ return One or more VPC peering connection IDs . < / p >
* Default : Describes all your VPC peering connections . */
public java . util . List < String > getVpcPeeringConnectionIds ( ) { } } | if ( vpcPeeringConnectionIds == null ) { vpcPeeringConnectionIds = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return vpcPeeringConnectionIds ; |
public class ApiOvhOrder { /** * Create order
* REST : POST / order / license / sqlserver / { serviceName } / upgrade / { duration }
* @ param version [ required ] This license version
* @ param serviceName [ required ] The name of your SQL Server license
* @ param duration [ required ] Duration */
public OvhOrder license_sqlserver_serviceName_upgrade_duration_POST ( String serviceName , String duration , OvhSqlServerVersionEnum version ) throws IOException { } } | String qPath = "/order/license/sqlserver/{serviceName}/upgrade/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "version" , version ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , OvhOrder . class ) ; |
public class InstanceQuery { /** * Create the SQL statement .
* @ return StringBuilder containing the statement
* @ throws EFapsException on error */
protected String createSQLStatement ( ) throws EFapsException { } } | final SQLSelect select = new SQLSelect ( ) . column ( 0 , "ID" ) . from ( getBaseType ( ) . getMainTable ( ) . getSqlTable ( ) , 0 ) ; // if the main table has a column for the type it is selected also
if ( getBaseType ( ) . getMainTable ( ) . getSqlColType ( ) != null ) { select . column ( 0 , getBaseType ( ) . getMainTable ( ) . getSqlColType ( ) ) ; } // add child tables
if ( getSqlTable2Index ( ) . size ( ) > 0 ) { for ( final Entry < SQLTable , Integer > entry : getSqlTable2Index ( ) . entrySet ( ) ) { if ( entry . getValue ( ) > 0 ) { select . leftJoin ( entry . getKey ( ) . getSqlTable ( ) , entry . getValue ( ) , "ID" , 0 , "ID" ) ; } } } select . addSection ( getWhere ( ) ) ; select . addSection ( getOrderBy ( ) ) ; select . addSection ( getLimit ( ) ) ; return select . getSQL ( ) ; |
public class IfcReferencesValueDocumentImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) public EList < IfcAppliedValue > getReferencingValues ( ) { } } | return ( EList < IfcAppliedValue > ) eGet ( Ifc2x3tc1Package . Literals . IFC_REFERENCES_VALUE_DOCUMENT__REFERENCING_VALUES , true ) ; |
public class Enforcer { /** * hasRoleForUser determines whether a user has a role .
* @ param name the user .
* @ param role the role .
* @ return whether the user has the role . */
public boolean hasRoleForUser ( String name , String role ) { } } | List < String > roles = getRolesForUser ( name ) ; boolean hasRole = false ; for ( String r : roles ) { if ( r . equals ( role ) ) { hasRole = true ; break ; } } return hasRole ; |
public class Tuple16 { /** * Skip 10 degrees from this tuple . */
public final Tuple6 < T11 , T12 , T13 , T14 , T15 , T16 > skip10 ( ) { } } | return new Tuple6 < > ( v11 , v12 , v13 , v14 , v15 , v16 ) ; |
public class MethodWriterImpl { /** * Get the signature for the given method .
* @ param method the method being documented .
* @ return a content object for the signature */
public Content getSignature ( MethodDoc method ) { } } | Content pre = new HtmlTree ( HtmlTag . PRE ) ; writer . addAnnotationInfo ( method , pre ) ; addModifiers ( method , pre ) ; addTypeParameters ( method , pre ) ; addReturnType ( method , pre ) ; if ( configuration . linksource ) { Content methodName = new StringContent ( method . name ( ) ) ; writer . addSrcLink ( method , methodName , pre ) ; } else { addName ( method . name ( ) , pre ) ; } int indent = pre . charCount ( ) ; addParameters ( method , pre , indent ) ; addExceptions ( method , pre , indent ) ; return pre ; |
public class NameCache { /** * Add a given name to the cache or track use count .
* exist . If the name already exists , then the internal value is returned .
* @ param name name to be looked up
* @ return internal value for the name if found ; otherwise null */
K put ( final K name ) { } } | K internal = cache . get ( name ) ; if ( internal != null ) { lookups ++ ; return internal ; } // Track the usage count only during initialization
if ( ! initialized ) { UseCount useCount = transientMap . get ( name ) ; if ( useCount != null ) { useCount . increment ( ) ; if ( useCount . get ( ) >= useThreshold ) { promote ( name ) ; } return useCount . value ; } useCount = new UseCount ( name ) ; transientMap . put ( name , useCount ) ; } return null ; |
public class CSVMultiTokExporter { /** * Takes a match and outputs a csv - line
* @ param graph
* @ param alignmc
* @ param matchNumber
* @ param out
* @ throws java . io . IOException */
@ Override public void outputText ( SDocumentGraph graph , boolean alignmc , int matchNumber , Writer out ) throws IOException , IllegalArgumentException { } } | // first match
if ( matchNumber == 0 ) { // output header
List < String > headerLine = new ArrayList < > ( ) ; for ( Map . Entry < Integer , TreeSet < String > > match : annotationsForMatchedNodes . entrySet ( ) ) { int node_id = match . getKey ( ) ; headerLine . add ( String . valueOf ( node_id ) + "_id" ) ; headerLine . add ( String . valueOf ( node_id ) + "_span" ) ; for ( String annoName : match . getValue ( ) ) { headerLine . add ( String . valueOf ( node_id ) + "_anno_" + annoName ) ; } } for ( String key : metakeys ) { headerLine . add ( "meta_" + key ) ; } out . append ( StringUtils . join ( headerLine , "\t" ) ) ; out . append ( "\n" ) ; } // output nodes in the order of the matches
SortedMap < Integer , String > contentLine = new TreeMap < > ( ) ; for ( SNode node : this . getMatchedNodes ( graph ) ) { List < String > nodeLine = new ArrayList < > ( ) ; // export id
RelannisNodeFeature feats = RelannisNodeFeature . extract ( node ) ; nodeLine . add ( String . valueOf ( feats . getInternalID ( ) ) ) ; // export spanned text
String span = graph . getText ( node ) ; if ( span != null ) nodeLine . add ( graph . getText ( node ) ) ; else nodeLine . add ( "" ) ; // export annotations
int node_id = node . getFeature ( AnnisConstants . ANNIS_NS , AnnisConstants . FEAT_MATCHEDNODE ) . getValue_SNUMERIC ( ) . intValue ( ) ; for ( String annoName : annotationsForMatchedNodes . get ( node_id ) ) { SAnnotation anno = node . getAnnotation ( annoName ) ; if ( anno != null ) { nodeLine . add ( anno . getValue_STEXT ( ) ) ; } else nodeLine . add ( "'NULL'" ) ; } // add everything to line
contentLine . put ( node_id , StringUtils . join ( nodeLine , "\t" ) ) ; } out . append ( StringUtils . join ( contentLine . values ( ) , "\t" ) ) ; // export Metadata
// TODO cache the metadata
if ( ! metakeys . isEmpty ( ) ) { // TODO is this the best way to get the corpus name ?
String corpus_name = CommonHelper . getCorpusPath ( java . net . URI . create ( graph . getDocument ( ) . getId ( ) ) ) . get ( 0 ) ; List < Annotation > asList = Helper . getMetaData ( corpus_name , graph . getDocument ( ) . getName ( ) ) ; for ( Annotation anno : asList ) { if ( metakeys . contains ( anno . getName ( ) ) ) out . append ( "\t" + anno . getValue ( ) ) ; } } out . append ( "\n" ) ; |
public class Lexicon { /** * Sets the weighting scheme for a specific field * */
public void setMethod ( WeightingMethod method , String fieldName ) { } } | WeightingMethod existingmethod = this . customWeights . get ( fieldName ) ; if ( existingmethod == null ) { this . customWeights . put ( fieldName , method ) ; return ; } // already one specified : check that it is the same as the one we have
if ( ! method . equals ( existingmethod ) ) throw new RuntimeException ( "Already set weight of field " + fieldName + " to " + existingmethod . toString ( ) ) ; |
public class FilePathMappingUtils { /** * Builds the File path mapping and add it to the file mappings of the bundle
* @ param bundle the bundle
* @ param path the resource path
* @ param rsHandler the resource reader handler
* @ return the file path mapping */
public static FilePathMapping buildFilePathMapping ( JoinableResourceBundle bundle , String path , ResourceReaderHandler rsHandler ) { } } | FilePathMapping fPathMapping = null ; String filePath = rsHandler . getFilePath ( path ) ; if ( filePath != null ) { File f = new File ( filePath ) ; if ( f . exists ( ) ) { fPathMapping = new FilePathMapping ( bundle , filePath , f . lastModified ( ) ) ; if ( bundle != null ) { bundle . getLinkedFilePathMappings ( ) . add ( fPathMapping ) ; } } else { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "The file path '" + filePath + "' associated to the URL '" + path + "' doesn't exixts." ) ; } } } return fPathMapping ; |
public class BasicFrequencyCounter { /** * 删除掉早于一定时间的记录
* @ param tillWhen清除到哪个时间点为止 */
@ Override public void purge ( long tillWhen ) { } } | Long t ; while ( ( t = ( ( NavigableMap < Long , AtomicLong > ) counters . getMap ( ) ) . firstKey ( ) ) != null && t < tillWhen ) { counters . remove ( t ) ; } |
public class ErlangDistributionTypeImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public void setK ( double newK ) { } } | double oldK = k ; k = newK ; boolean oldKESet = kESet ; kESet = true ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , BpsimPackage . ERLANG_DISTRIBUTION_TYPE__K , oldK , k , ! oldKESet ) ) ; |
public class RootBeer { /** * When you ' re root you can change the permissions on common system directories , this method checks if any of these patha Const . pathsThatShouldNotBeWrtiable are writable .
* @ return true if one of the dir is writable */
public boolean checkForRWPaths ( ) { } } | boolean result = false ; String [ ] lines = mountReader ( ) ; if ( lines == null ) { // Could not read , assume false ;
return false ; } for ( String line : lines ) { // Split lines into parts
String [ ] args = line . split ( " " ) ; if ( args . length < 4 ) { // If we don ' t have enough options per line , skip this and log an error
QLog . e ( "Error formatting mount line: " + line ) ; continue ; } String mountPoint = args [ 1 ] ; String mountOptions = args [ 3 ] ; for ( String pathToCheck : Const . pathsThatShouldNotBeWrtiable ) { if ( mountPoint . equalsIgnoreCase ( pathToCheck ) ) { // Split options out and compare against " rw " to avoid false positives
for ( String option : mountOptions . split ( "," ) ) { if ( option . equalsIgnoreCase ( "rw" ) ) { QLog . v ( pathToCheck + " path is mounted with rw permissions! " + line ) ; result = true ; break ; } } } } } return result ; |
public class CommerceVirtualOrderItemUtil { /** * Returns the last commerce virtual order item in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; .
* @ param uuid the uuid
* @ param companyId the company ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce virtual order item , or < code > null < / code > if a matching commerce virtual order item could not be found */
public static CommerceVirtualOrderItem fetchByUuid_C_Last ( String uuid , long companyId , OrderByComparator < CommerceVirtualOrderItem > orderByComparator ) { } } | return getPersistence ( ) . fetchByUuid_C_Last ( uuid , companyId , orderByComparator ) ; |
public class BinaryArrayBulkInsertWeakHeap { /** * Create a heap from an array of elements . The elements of the array are
* not destroyed . The method has linear time complexity .
* @ param < K >
* the type of keys maintained by the heap
* @ param array
* an array of elements
* @ param comparator
* the comparator to use
* @ return a binary heap
* @ throws IllegalArgumentException
* in case the array is null */
@ LinearTime public static < K > BinaryArrayBulkInsertWeakHeap < K > heapify ( K [ ] array , Comparator < ? super K > comparator ) { } } | if ( array == null ) { throw new IllegalArgumentException ( "Array cannot be null" ) ; } if ( array . length == 0 ) { return new BinaryArrayBulkInsertWeakHeap < K > ( comparator ) ; } BinaryArrayBulkInsertWeakHeap < K > h = new BinaryArrayBulkInsertWeakHeap < K > ( comparator , array . length ) ; System . arraycopy ( array , 0 , h . array , 0 , array . length ) ; h . size = array . length ; for ( int j = h . size - 1 ; j > 0 ; j -- ) { h . joinWithComparator ( h . dancestor ( j ) , j ) ; } return h ; |
public class PureFunctionIdentifier { /** * Fill all of the auxiliary data - structures used by this pass based on the results in { @ code
* referenceMap } .
* < p > This is the first step of analysis . These structures will be used by a traversal that
* analyzes the bodies of located functions for side - effects . That traversal is separate because
* it needs access to scopes and also depends on global knowledge of functions . */
private void populateDatastructuresForAnalysisTraversal ( ReferenceMap referenceMap ) { } } | // Merge the prop and name references into a single multimap since only the name matters .
ArrayListMultimap < String , Node > referencesByName = ArrayListMultimap . create ( ) ; for ( Map . Entry < String , ? extends List < Node > > entry : referenceMap . getNameReferences ( ) ) { referencesByName . putAll ( entry . getKey ( ) , entry . getValue ( ) ) ; } for ( Map . Entry < String , ? extends List < Node > > entry : referenceMap . getPropReferences ( ) ) { referencesByName . putAll ( PROP_NAME_PREFIX + entry . getKey ( ) , entry . getValue ( ) ) ; } // Empty function names cause a crash during analysis that is better to detect here .
// Additionally , functions require a name to be invoked in a statically analyzable way ; there ' s
// no value in tracking the set of anonymous functions .
checkState ( ! referencesByName . containsKey ( "" ) ) ; checkState ( ! referencesByName . containsKey ( PROP_NAME_PREFIX ) ) ; // Create and store a summary for all known names .
for ( String name : referencesByName . keySet ( ) ) { summariesByName . put ( name , AmbiguatedFunctionSummary . createInGraph ( reverseCallGraph , name ) ) ; } Multimaps . asMap ( referencesByName ) . forEach ( this :: populateFunctionDefinitions ) ; |
public class MesosTaskManagerParameters { /** * Build a list of URIs for providing custom artifacts to Mesos tasks .
* @ param uris a comma delimited optional string listing artifact URIs */
public static List < String > buildUris ( Option < String > uris ) { } } | if ( uris . isEmpty ( ) ) { return Collections . emptyList ( ) ; } else { List < String > urisList = new ArrayList < > ( ) ; for ( String uri : uris . get ( ) . split ( "," ) ) { urisList . add ( uri . trim ( ) ) ; } return urisList ; } |
public class IO { /** * Returns a Set containing only single words . */
public static ImmutableSet < String > lowercaseWordSet ( final Class < ? > origin , final String resource , final boolean eliminatePrepAndConj ) throws IOException { } } | return ImmutableSet . copyOf ( new HashSet < String > ( ) { { readResource ( origin , resource , new NullReturnLineProcessor ( ) { @ Override public boolean processLine ( @ Nonnull final String line ) { final String l = simplify ( line ) ; if ( ! l . isEmpty ( ) && ! l . startsWith ( "#" ) ) { for ( final String part : SPACES . split ( l ) ) { if ( eliminatePrepAndConj ) { final String wordType = Dictionary . checkup ( part ) ; if ( wordType != null && ( wordType . startsWith ( "IN" ) || wordType . startsWith ( "CC" ) ) ) { continue ; } } // add to the containing HashSet we are currently in the init block of
add ( toEngLowerCase ( clean ( part ) ) ) ; } } return true ; } } ) ; } } ) ; |
public class SoundManager { /** * adds an IzouSoundLine , will now be tracked by the SoundManager
* @ param addOnModel the addOnModel where the IzouSoundLine belongs to
* @ param izouSoundLine the IzouSoundLine to add */
public void addIzouSoundLine ( AddOnModel addOnModel , IzouSoundLineBaseClass izouSoundLine ) { } } | debug ( "adding soundLine " + izouSoundLine + " from " + addOnModel ) ; if ( permanentAddOn != null && permanentAddOn . equals ( addOnModel ) ) { addPermanent ( izouSoundLine ) ; } else { addNonPermanent ( addOnModel , izouSoundLine ) ; } izouSoundLine . registerCloseCallback ( voit -> closeCallback ( addOnModel , izouSoundLine ) ) ; izouSoundLine . registerMuteCallback ( voit -> muteCallback ( addOnModel , izouSoundLine ) ) ; |
public class DevicesInner { /** * Downloads the updates on a data box edge / gateway device .
* @ param deviceName The device name .
* @ param resourceGroupName The resource group name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */
public void beginDownloadUpdates ( String deviceName , String resourceGroupName ) { } } | beginDownloadUpdatesWithServiceResponseAsync ( deviceName , resourceGroupName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Router { /** * get Content - Type from URLConnection */
private String getRequestHeaderContentType ( ) { } } | String contentType = getRequestHeaderValue ( "Content-Type" ) ; if ( contentType != null ) { // remove parameter ( Content - Type : = type " / " subtype * [ " ; " parameter ] )
int index = contentType . indexOf ( ';' ) ; if ( index > 0 ) contentType = contentType . substring ( 0 , index ) ; contentType = contentType . trim ( ) ; } return contentType ; |
public class AmazonCloudDirectoryClient { /** * Updates a < a > TypedLinkFacet < / a > . For more information , see < a href =
* " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / directory _ objects _ links . html # directory _ objects _ links _ typedlink "
* > Typed Links < / a > .
* @ param updateTypedLinkFacetRequest
* @ return Result of the UpdateTypedLinkFacet operation returned by the service .
* @ throws InternalServiceException
* Indicates a problem that must be resolved by Amazon Web Services . This might be a transient error in
* which case you can retry your request until it succeeds . Otherwise , go to the < a
* href = " http : / / status . aws . amazon . com / " > AWS Service Health Dashboard < / a > site to see if there are any
* operational issues with the service .
* @ throws InvalidArnException
* Indicates that the provided ARN value is not valid .
* @ throws RetryableConflictException
* Occurs when a conflict with a previous successful write is detected . For example , if a write operation
* occurs on an object and then an attempt is made to read the object using “ SERIALIZABLE ” consistency , this
* exception may result . This generally occurs when the previous write did not have time to propagate to the
* host serving the current request . A retry ( with appropriate backoff logic ) is the recommended response to
* this exception .
* @ throws ValidationException
* Indicates that your request is malformed in some manner . See the exception message .
* @ throws LimitExceededException
* Indicates that limits are exceeded . See < a
* href = " https : / / docs . aws . amazon . com / clouddirectory / latest / developerguide / limits . html " > Limits < / a > for more
* information .
* @ throws AccessDeniedException
* Access denied . Check your permissions .
* @ throws FacetValidationException
* The < a > Facet < / a > that you provided was not well formed or could not be validated with the schema .
* @ throws InvalidFacetUpdateException
* An attempt to modify a < a > Facet < / a > resulted in an invalid schema exception .
* @ throws ResourceNotFoundException
* The specified resource could not be found .
* @ throws FacetNotFoundException
* The specified < a > Facet < / a > could not be found .
* @ throws InvalidRuleException
* Occurs when any of the rule parameter keys or values are invalid .
* @ sample AmazonCloudDirectory . UpdateTypedLinkFacet
* @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / clouddirectory - 2017-01-11 / UpdateTypedLinkFacet "
* target = " _ top " > AWS API Documentation < / a > */
@ Override public UpdateTypedLinkFacetResult updateTypedLinkFacet ( UpdateTypedLinkFacetRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeUpdateTypedLinkFacet ( request ) ; |
public class JaxbUtils { /** * Marshals the given data using a given context . A < code > null < / code > data argument returns < code > null < / code > .
* @ param ctx
* Context to use - Cannot be < code > null < / code > .
* @ param data
* Data to serialize or < code > null < / code > .
* @ return XML data or < code > null < / code > .
* @ param < T >
* Type of the data . */
public static < T > String marshal ( final JAXBContext ctx , final T data ) { } } | return marshal ( ctx , data , null ) ; |
public class FSDataset { /** * Make a copy of the block if this block is linked to an existing
* snapshot . This ensures that modifying this block does not modify
* data in any existing snapshots .
* @ param block Block
* @ param numLinks Detach if the number of links exceed this value
* @ throws IOException
* @ return - true if the specified block was detached */
public boolean detachBlock ( int namespaceId , Block block , int numLinks ) throws IOException { } } | DatanodeBlockInfo info = null ; lock . readLock ( ) . lock ( ) ; try { info = volumeMap . get ( namespaceId , block ) ; } finally { lock . readLock ( ) . unlock ( ) ; } return info . detachBlock ( namespaceId , block , numLinks ) ; |
public class MMCIFFileTools { /** * Converts a Group into a List of { @ link AtomSite } objects .
* Atoms in other altloc groups ( different from the main group ) are also included , removing possible duplicates
* via using the atom identifier to assess uniqueness .
* @ param g the group
* @ param model the model number for the output AtomSites
* @ param chainName the chain identifier ( author id ) for the output AtomSites
* @ param chainId the internal chain identifier ( asym id ) for the output AtomSites
* @ return */
public static List < AtomSite > convertGroupToAtomSites ( Group g , int model , String chainName , String chainId ) { } } | // The alt locs can have duplicates , since at parsing time we make sure that all alt loc groups have
// all atoms ( see StructureTools # cleanUpAltLocs )
// Thus we have to remove duplicates here by using the atom id
// See issue https : / / github . com / biojava / biojava / issues / 778 and TestAltLocs . testMmcifWritingAllAltlocs / testMmcifWritingPartialAltlocs
Map < Integer , AtomSite > uniqueAtomSites = new LinkedHashMap < > ( ) ; int groupsize = g . size ( ) ; for ( int atompos = 0 ; atompos < groupsize ; atompos ++ ) { Atom a = g . getAtom ( atompos ) ; if ( a == null ) continue ; uniqueAtomSites . put ( a . getPDBserial ( ) , convertAtomToAtomSite ( a , model , chainName , chainId ) ) ; } if ( g . hasAltLoc ( ) ) { for ( Group alt : g . getAltLocs ( ) ) { for ( AtomSite atomSite : convertGroupToAtomSites ( alt , model , chainName , chainId ) ) { uniqueAtomSites . put ( Integer . parseInt ( atomSite . getId ( ) ) , atomSite ) ; } } } return new ArrayList < > ( uniqueAtomSites . values ( ) ) ; |
public class A_CmsReport { /** * Removes the report site root prefix from the absolute path in the resource name ,
* that is adjusts the resource name for the report site root . < p >
* If the site root for this report has not been set ,
* or the resource name does not start with the report site root ,
* the name it is left untouched . < p >
* @ param resourcename the resource name ( full path )
* @ return the resource name adjusted for the report site root
* @ see CmsRequestContext # removeSiteRoot ( String ) */
public String removeSiteRoot ( String resourcename ) { } } | if ( m_siteRoot == null ) { // site root has not been set
return resourcename ; } String siteRoot = CmsRequestContext . getAdjustedSiteRoot ( m_siteRoot , resourcename ) ; if ( ( siteRoot . equals ( m_siteRoot ) ) && resourcename . startsWith ( siteRoot ) ) { resourcename = resourcename . substring ( siteRoot . length ( ) ) ; } return resourcename ; |
public class RETemplateGroup { /** * 根据句子得到匹配的模板
* @ param str
* @ return */
public List < RETemplate > getTemplate ( String str ) { } } | List < RETemplate > templates = new ArrayList < RETemplate > ( ) ; Iterator < RETemplate > it = group . iterator ( ) ; while ( it . hasNext ( ) ) { RETemplate qt = it . next ( ) ; float w = qt . matches ( str ) ; if ( w > 0 ) templates . add ( qt ) ; } return templates ; |
public class IOUtil { /** * 将一个BufferedReader中的词条加载到词典
* @ param br 源
* @ param storage 储存位置
* @ throws IOException 异常表示加载失败 */
public static void loadDictionary ( BufferedReader br , TreeMap < String , CoreDictionary . Attribute > storage , boolean isCSV , Nature defaultNature ) throws IOException { } } | String splitter = "\\s" ; if ( isCSV ) { splitter = "," ; } String line ; boolean firstLine = true ; while ( ( line = br . readLine ( ) ) != null ) { if ( firstLine ) { line = IOUtil . removeUTF8BOM ( line ) ; firstLine = false ; } String param [ ] = line . split ( splitter ) ; int natureCount = ( param . length - 1 ) / 2 ; CoreDictionary . Attribute attribute ; if ( natureCount == 0 ) { attribute = new CoreDictionary . Attribute ( defaultNature ) ; } else { attribute = new CoreDictionary . Attribute ( natureCount ) ; for ( int i = 0 ; i < natureCount ; ++ i ) { attribute . nature [ i ] = LexiconUtility . convertStringToNature ( param [ 1 + 2 * i ] ) ; attribute . frequency [ i ] = Integer . parseInt ( param [ 2 + 2 * i ] ) ; attribute . totalFrequency += attribute . frequency [ i ] ; } } storage . put ( param [ 0 ] , attribute ) ; } br . close ( ) ; |
public class NodeCountryImpl { /** * { @ inheritDoc } */
@ Override public void setNodeCountry ( String nodeCountry ) throws IllegalArgumentException { } } | if ( nodeCountry == null ) { throw new IllegalArgumentException ( "nodeCountry must not be null" ) ; } if ( ! nodeCountry . trim ( ) . matches ( pattern ) ) { throw new IllegalArgumentException ( nodeCountry + " is not a valid country code" ) ; } this . nodeCountry = prepareForAssignment ( this . nodeCountry , nodeCountry . trim ( ) . toUpperCase ( ) ) ; |
public class ServerLock { /** * Create a marker file when the server is running to determine if the JVM terminated normally .
* This file is deleted automatically when the JVM exits normally , on a normal server stop .
* If the marker file already exists when the server is starting , it assumes the JVM abended or
* the JVM process was forcefully terminated . In this case , we mark the bootstrap properties to
* do a full clean of the workarea , to remove any possible corruption that might have occurred
* as a result of the JVM abend .
* The other existing files such as . sLock and . sCommand weren ' t not reused for this purpose
* because they each had ties in to other code and scripts that have expections on them that
* the server running marker file could not work with ( such as slock is always expected to
* exist )
* This utlilty relies on the server lock already being obtained ( for synchronization ) and that
* a server workspace clean is done after it is executed .
* This method also expects the server directory and workarea directories exists . The server lock
* file creation will ensure those things will exist . */
public static void createServerRunningMarkerFile ( BootstrapConfig bootConfig ) { } } | File serverWorkArea = bootConfig . getWorkareaFile ( null ) ; File serverRunningMarkerFile = null ; try { serverRunningMarkerFile = new File ( serverWorkArea , BootstrapConstants . SERVER_RUNNING_FILE ) ; serverRunningMarkerFile . deleteOnExit ( ) ; boolean newFile = serverRunningMarkerFile . createNewFile ( ) ; if ( ! newFile ) bootConfig . forceCleanStart ( ) ; } catch ( IOException e ) { throw new LaunchException ( "Can not create or write to server running marker file, check file permissions" , MessageFormat . format ( BootstrapConstants . messages . getString ( "error.serverDirPermission" ) , serverRunningMarkerFile . getAbsolutePath ( ) ) , e ) ; } |
public class Location { /** * Compare two locations and return if the locations are near or not
* @ param location Location to compare with
* @ param distance The distance between two locations
* @ return true is the real distance is lower or equals than the distance parameter */
public boolean isNearTo ( Double3D location , double distance ) { } } | if ( this . is3DLocation ( ) ) { return this . getLocation3D ( ) . distance ( location ) < distance ; } else { return false ; } |
public class LibertyRuntimeMetaDataProvider { /** * ( non - Javadoc )
* @ see com . ibm . tx . config . RuntimeMetaDataProvider # getTransactionTimeout ( ) */
@ Override public int getTransactionTimeout ( ) { } } | for ( TransactionSettingsProvider s : _serverWideConfigProvider . getTransactionSettingsProviders ( ) . services ( ) ) { if ( s . isActive ( ) ) { final GlobalTransactionSettings gts = s . getGlobalTransactionSettings ( ) ; if ( gts != null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Found a GlobalTransactionSettings" ) ; return gts . getTransactionTimeout ( ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "No GlobalTransactionSettings on this thread" ) ; } } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "Found the TransactionSettingsProvider but it was inactive" ) ; } } return - 1 ; |
public class ChunkStepControllerImpl { /** * Prime the next chunk ' s ChunkStatus based on the previous one
* ( if there was one ) , particularly taking into account retry - with - rollback
* and the one - at - a - time processing it entails .
* @ return the upcoming chunk ' s ChunkStatus */
private ChunkStatus getNextChunkStatusBasedOnPrevious ( ) { } } | // If this is the first chunk
if ( currentChunkStatus == null ) { return new ChunkStatus ( ) ; } ChunkStatus nextChunkStatus = null ; // At this point the ' current ' status is the previous chunk ' s status .
if ( currentChunkStatus . wasMarkedForRollbackWithRetry ( ) ) { // Re - position reader & writer
getTransactionManager ( ) . begin ( ) ; positionReaderAtCheckpoint ( ) ; positionWriterAtCheckpoint ( ) ; getTransactionManager ( ) . commit ( ) ; nextChunkStatus = new ChunkStatus ( ChunkStatusType . RETRY_AFTER_ROLLBACK ) ; // What happens if we get a retry - with - rollback on a single item that we were processing
// after a prior retry with rollback ? We don ' t want to revert to normal processing
// after completing only the single item of the " single item chunk " . We want to complete
// the full portion of the original chunk . So be careful to propagate this number if
// it already exists .
int numToProcessOneByOne = currentChunkStatus . getItemsToProcessOneByOneAfterRollback ( ) ; if ( numToProcessOneByOne > 0 ) { // Retry after rollback AFTER a previous retry after rollback
nextChunkStatus . setItemsToProcessOneByOneAfterRollback ( numToProcessOneByOne ) ; } else { // " Normal " ( i . e . the first ) retry after rollback .
nextChunkStatus . setItemsToProcessOneByOneAfterRollback ( currentChunkStatus . getItemsTouchedInCurrentChunk ( ) ) ; } } else if ( currentChunkStatus . isRetryingAfterRollback ( ) ) { // In this case the ' current ' ( actually the last ) chunk was a single - item retry after rollback chunk ,
// so we have to see if it ' s time to revert to normal processing .
int numToProcessOneByOne = currentChunkStatus . getItemsToProcessOneByOneAfterRollback ( ) ; if ( numToProcessOneByOne == 1 ) { // we ' re done , revert to normal
nextChunkStatus = new ChunkStatus ( ) ; } else { nextChunkStatus = new ChunkStatus ( ChunkStatusType . RETRY_AFTER_ROLLBACK ) ; nextChunkStatus . setItemsToProcessOneByOneAfterRollback ( numToProcessOneByOne - 1 ) ; } } else { nextChunkStatus = new ChunkStatus ( ) ; } return nextChunkStatus ; |
public class Object2ObjectHashMap { /** * Put a key value pair into the map .
* @ param key lookup key
* @ param value new value , must not be null
* @ return current value associated with key , or null if none found
* @ throws IllegalArgumentException if value is null */
public V put ( final Object key , final Object value ) { } } | final Object val = mapNullValue ( value ) ; requireNonNull ( val , "value cannot be null" ) ; final Object [ ] entries = this . entries ; final int mask = entries . length - 1 ; int index = Hashing . evenHash ( key . hashCode ( ) , mask ) ; Object oldValue = null ; while ( entries [ index + 1 ] != null ) { if ( entries [ index ] == key || entries [ index ] . equals ( key ) ) { oldValue = entries [ index + 1 ] ; break ; } index = next ( index , mask ) ; } if ( oldValue == null ) { ++ size ; entries [ index ] = key ; } entries [ index + 1 ] = val ; increaseCapacity ( ) ; return unmapNullValue ( oldValue ) ; |
public class VCardManager { /** * Returns true if the given entity understands the vCard - XML format and allows the exchange of such .
* @ param jid
* @ return true if the given entity understands the vCard - XML format and exchange .
* @ throws XMPPErrorException
* @ throws NoResponseException
* @ throws NotConnectedException
* @ throws InterruptedException */
public boolean isSupported ( Jid jid ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } } | return ServiceDiscoveryManager . getInstanceFor ( connection ( ) ) . supportsFeature ( jid , NAMESPACE ) ; |
public class PathChildrenCache { /** * Start the cache . The cache is not started automatically . You must call this method .
* @ param mode Method for priming the cache
* @ throws Exception errors */
public void start ( StartMode mode ) throws Exception { } } | Preconditions . checkState ( state . compareAndSet ( State . LATENT , State . STARTED ) , "already started" ) ; mode = Preconditions . checkNotNull ( mode , "mode cannot be null" ) ; client . getConnectionStateListenable ( ) . addListener ( connectionStateListener ) ; switch ( mode ) { case NORMAL : { offerOperation ( new RefreshOperation ( this , RefreshMode . STANDARD ) ) ; break ; } case BUILD_INITIAL_CACHE : { rebuild ( ) ; break ; } case POST_INITIALIZED_EVENT : { initialSet . set ( Maps . < String , ChildData > newConcurrentMap ( ) ) ; offerOperation ( new RefreshOperation ( this , RefreshMode . POST_INITIALIZED ) ) ; break ; } } |
public class CollectionUtil { /** * Merges two arrays into a new array . Elements from pArray1 and pArray2 will
* be copied into a new array , that has pLength1 + pLength2 elements .
* @ param pArray1 First array
* @ param pOffset1 the offset into the first array
* @ param pLength1 the number of elements to copy from the first array
* @ param pArray2 Second array , must be compatible with ( assignable from )
* the first array
* @ param pOffset2 the offset into the second array
* @ param pLength2 the number of elements to copy from the second array
* @ return A new array , containing the values of pArray1 and pArray2 . The
* array ( wrapped as an object ) , will have the length of pArray1 +
* pArray2 , and can be safely cast to the type of the pArray1
* parameter .
* @ see java . lang . System # arraycopy ( Object , int , Object , int , int ) */
@ SuppressWarnings ( { } } | "SuspiciousSystemArraycopy" } ) public static Object mergeArrays ( Object pArray1 , int pOffset1 , int pLength1 , Object pArray2 , int pOffset2 , int pLength2 ) { Class class1 = pArray1 . getClass ( ) ; Class type = class1 . getComponentType ( ) ; // Create new array of the new length
Object array = Array . newInstance ( type , pLength1 + pLength2 ) ; System . arraycopy ( pArray1 , pOffset1 , array , 0 , pLength1 ) ; System . arraycopy ( pArray2 , pOffset2 , array , pLength1 , pLength2 ) ; return array ; |
public class CommerceTaxFixedRateUtil { /** * Returns the first commerce tax fixed rate in the ordered set where commerceTaxMethodId = & # 63 ; .
* @ param commerceTaxMethodId the commerce tax method ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching commerce tax fixed rate
* @ throws NoSuchTaxFixedRateException if a matching commerce tax fixed rate could not be found */
public static CommerceTaxFixedRate findByCommerceTaxMethodId_First ( long commerceTaxMethodId , OrderByComparator < CommerceTaxFixedRate > orderByComparator ) throws com . liferay . commerce . tax . engine . fixed . exception . NoSuchTaxFixedRateException { } } | return getPersistence ( ) . findByCommerceTaxMethodId_First ( commerceTaxMethodId , orderByComparator ) ; |
public class Topic { /** * getter for mostLikelyTopic - gets The most likely topic id for this topic
* @ generated
* @ return value of the feature */
public int getMostLikelyTopic ( ) { } } | if ( Topic_Type . featOkTst && ( ( Topic_Type ) jcasType ) . casFeat_mostLikelyTopic == null ) jcasType . jcas . throwFeatMissing ( "mostLikelyTopic" , "ch.epfl.bbp.uima.types.Topic" ) ; return jcasType . ll_cas . ll_getIntValue ( addr , ( ( Topic_Type ) jcasType ) . casFeatCode_mostLikelyTopic ) ; |
public class TaskContainer { /** * { @ inheritDoc } */
@ Override public void removed ( Task task ) { } } | // Remove the task from the file and its parent task
m_uniqueIDMap . remove ( task . getUniqueID ( ) ) ; m_idMap . remove ( task . getID ( ) ) ; Task parentTask = task . getParentTask ( ) ; if ( parentTask != null ) { parentTask . removeChildTask ( task ) ; } else { m_projectFile . getChildTasks ( ) . remove ( task ) ; } // Remove all resource assignments
Iterator < ResourceAssignment > iter = m_projectFile . getResourceAssignments ( ) . iterator ( ) ; while ( iter . hasNext ( ) == true ) { ResourceAssignment assignment = iter . next ( ) ; if ( assignment . getTask ( ) == task ) { Resource resource = assignment . getResource ( ) ; if ( resource != null ) { resource . removeResourceAssignment ( assignment ) ; } iter . remove ( ) ; } } // Recursively remove any child tasks
while ( true ) { List < Task > childTaskList = task . getChildTasks ( ) ; if ( childTaskList . isEmpty ( ) == true ) { break ; } remove ( childTaskList . get ( 0 ) ) ; } |
public class ExampleTemplateMatching { /** * Helper function will is finds matches and displays the results as colored rectangles */
private static void drawRectangles ( Graphics2D g2 , GrayF32 image , GrayF32 template , GrayF32 mask , int expectedMatches ) { } } | List < Match > found = findMatches ( image , template , mask , expectedMatches ) ; int r = 2 ; int w = template . width + 2 * r ; int h = template . height + 2 * r ; for ( Match m : found ) { System . out . println ( "Match " + m . x + " " + m . y + " score " + m . score ) ; // this demonstrates how to filter out false positives
// the meaning of score will depend on the template technique
// if ( m . score < - 1000 ) / / This line is commented out for demonstration purposes
// continue ;
// the return point is the template ' s top left corner
int x0 = m . x - r ; int y0 = m . y - r ; int x1 = x0 + w ; int y1 = y0 + h ; g2 . drawLine ( x0 , y0 , x1 , y0 ) ; g2 . drawLine ( x1 , y0 , x1 , y1 ) ; g2 . drawLine ( x1 , y1 , x0 , y1 ) ; g2 . drawLine ( x0 , y1 , x0 , y0 ) ; } |
public class DefaultStreamFilenameGenerator { /** * { @ inheritDoc } */
public String generateFilename ( IScope scope , String name , GenerationType type ) { } } | return generateFilename ( scope , name , null , type ) ; |
public class AbstractEJBRuntime { /** * Start a new EJB Module
* Warning : This method will be called by multiple threads that are
* starting separate applications . WebSphere runtime will still prevent
* multiple modules within the same EAR from starting simultaneously . < p > */
public void startModule ( EJBModuleMetaDataImpl mmd ) throws RuntimeError { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "startModule" , mmd . getJ2EEName ( ) ) ; String jarName = null ; // 294477
String appJarName = null ; RuntimeError error = null ; Object origCL = ThreadContextAccessor . UNCHANGED ; NameSpaceBinder < ? > binder = null ; boolean postInvokeNeeded = false ; try { // Fire the ModuleMetaData event to the listeners .
// TODO - why is this here rather than createMetaData ?
if ( isTraceOn && tc . isDebugEnabled ( ) ) // 203449
Tr . debug ( tc , "Fire Module Metadata created event to listeners for module: " + mmd . getJ2EEName ( ) ) ; mmd . ivMetaDataDestroyRequired = true ; // d505055
fireMetaDataCreated ( mmd ) ; ModuleInitData mid = mmd . ivInitData ; // F743-36113
mmd . ivInitData = null ; jarName = mmd . getName ( ) ; // 294477
EJBModuleConfigData moduleConfig = createEJBModuleConfigData ( mid ) ; preInvokeStartModule ( mmd , moduleConfig ) ; // 621157 - Past this point , ensure we notify the container and
// collaborators that the module has started , even if an exception
// occurs . Otherwise , collaborators get confused when we notify
// them that a module is stopping without first notifying them that
// the module has started .
postInvokeNeeded = true ; // For EJB 2.1 and earlier modules , the Scheduler is only
// ' started ' if EJBDeploy flagged the module as containing
// Timer objects . For EJB 3.0 and later modules , there is no
// EJBDeploy , so the Scheduler is started if the customer has
// configured to use a Timer database other than the default ,
// or if the default database exists ( i . e . it might have timers ) .
// Otherwise , for EJB 3.0 and later , the Scheduler will be
// created and started on first use by a Timer bean . d438133
if ( mid . ivHasTimers == null ) { initializeTimerService ( true ) ; } else if ( mid . ivHasTimers ) { initializeTimerService ( false ) ; } // Create the module namespace binder . F69147.2
binder = createNameSpaceBinder ( mmd ) ; binder . beginBind ( ) ; boolean hasEJB = false ; for ( BeanInitData bid : mid . ivBeans ) { // Create the BeanMetaData if it hasn ' t already been created .
BeanMetaData bmd = mmd . ivBeanMetaDatas . get ( bid . ivName ) ; if ( bmd == null ) { bmd = createBeanMetaData ( bid , mmd ) ; } hasEJB |= bmd . type != InternalConstants . TYPE_MANAGED_BEAN ; // F743-4950 - If this EJB is a Singleton Session bean , then
// add it to the application metadata to finish its processing
// when the application finishes its startup processing .
if ( bmd . isSingletonSessionBean ( ) ) { mmd . getEJBApplicationMetaData ( ) . addSingleton ( bmd , bid . ivStartup , bid . ivDependsOn ) ; } } if ( ! hasEJB && ! mmd . ivManagedBeansOnly ) // F743-36113
{ // Error - EJB modules must have at least one bean configured . Stop application from starting .
Tr . error ( tc , "NO_BEANS_IN_MODULE_CNTR9269W" , jarName ) ; throw new EJBConfigurationException ( "The " + jarName + " Enterprise JavaBeans (EJB) module does not have any enterprise beans configured." ) ; } validateMergedXML ( mid ) ; // d680497.1
ivEJBMDOrchestrator . processEJBJarBindings ( mid , mmd ) ; // F743-36290.1
Collection < BeanMetaData > bmds = mmd . ivBeanMetaDatas . values ( ) ; // d664917.2 - Process all BeanMetaData . Note that metadata
// processing must be done using the runtime class loader .
for ( BeanMetaData bmd : bmds ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) { bmd . dump ( ) ; } validateMergedXML ( bmd ) ; // d680497
// Notify PM that the bean has been installed . / / RTC112791
beanInstall ( bmd ) ; if ( ! bmd . fullyInitialized ) // F91481
{ initializeBMD ( bmd ) ; } } // Switch over from the runtime classloader to the application classloader so that
// the classes loaded during install will be accessible during the execution of the application .
origCL = svThreadContextAccessor . pushContextClassLoader ( mid . getContextClassLoader ( ) ) ; // F85059
// Start all EJBs unless they ' re deferred .
for ( BeanMetaData bmd : bmds ) { if ( ! bmd . ivDeferEJBInitialization ) { fireMetaDataCreatedAndStartBean ( bmd ) ; // d648522 , d739043
} } // All EJBs have either been started , or their metadata has been
// sufficiently processed to allow deferred initialization , so make
// the EJB visible .
for ( BeanMetaData bmd : bmds ) { // Add the EJB to HomeOfHomes ( remote , serialized refs , etc . ) .
addHome ( bmd ) ; // Bind non - MDB into JNDI .
if ( bmd . type != InternalConstants . TYPE_MESSAGE_DRIVEN ) { try { bindInterfaces ( binder , bmd ) ; } catch ( Exception e ) { FFDCFilter . processException ( e , CLASS_NAME + ".install" , "950" , this ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "startModule: exception" , e ) ; error = new RuntimeError ( e ) ; // 119723 set warning to last exception
} } } // d608829.1 - If a bean failed to start , then there ' s no need to
// finish starting the module .
if ( error == null ) { // If any Remote bindings were made ( component or business ) then an
// EJBFactory also needs to be bound to support ejb - link / auto - link
// from the client . d440604
binder . bindEJBFactory ( ) ; // F69147.2
startMDBs ( mid , mmd ) ; // d604213 - Modules can be started after the application is running ,
// so we must create automatic timers at module start and not at
// application start .
if ( mmd . ivAutomaticTimerBeans != null ) { // The EJB TimerService does not run in the z / OS CRA .
if ( ! EJSPlatformHelper . isZOSCRA ( ) ) { int numPersistentCreated = 0 ; int numNonPersistentCreated = 0 ; if ( mmd . ivHasNonPersistentAutomaticTimers ) numNonPersistentCreated = createNonPersistentAutomaticTimers ( mmd . ivJ2EEName . getApplication ( ) , mmd . getName ( ) , mmd . ivAutomaticTimerBeans ) ; if ( mmd . ivHasPersistentAutomaticTimers ) numPersistentCreated = createPersistentAutomaticTimers ( mmd . ivJ2EEName . getApplication ( ) , mmd . getName ( ) , mmd . ivAutomaticTimerBeans ) ; Tr . info ( tc , "AUTOMATIC_TIMER_CREATION_CNTR0219I" , new Object [ ] { numPersistentCreated , numNonPersistentCreated , mmd . getName ( ) } ) ; } mmd . ivAutomaticTimerBeans = null ; } if ( ! mmd . ivManagedBeansOnly ) { registerMBeans ( mid , mmd ) ; // 198685
} } ivEJBMDOrchestrator . processGeneralizations ( moduleConfig , mmd ) ; // F743-21131
postInvokeNeeded = false ; // d621157
postInvokeStartModule ( mmd , appJarName ) ; // d621157
mid . unload ( ) ; } // d607801 : removed catch block
catch ( Throwable t ) { FFDCFilter . processException ( t , CLASS_NAME + ".install" , "982" , this ) ; error = new RuntimeError ( t ) ; // 119723
} finally { // The following code was reordered so that the context classloader @ MD20022A
// is reset after the call to executeBatchedOperation . On 390 , @ MD20022A
// executeBatchedOperation drives a remote operation to the control @ MD20022A
// region . It returns IORs for the bound homes . For each IOR , @ MD20022A
// the base JDK ORB calls lookupLocalObject ( which 390 overrides ) @ MD20022A
// to determine if the object is local and if so to obtain a stub @ MD20022A
// for it . SOV defect 68226 validates that the stub returned by @ MD20022A
// lookupLocalObject is compatible with the active classloaders . @ MD20022A
// If the context classloader has been reset , this validation fails @ MD20022A
// and the JDK ORB tries to load a new compatible stub . This won ' t @ MD20022A
// be able to find the stub class from the EJB application either @ MD20022A
// and it will end up creating and caching an instance of @ MD20022A
// org . omg . stub . javax . ejb . _ EJBHome _ Stub . In order to avoid this , @ MD20022A
// the context classloader is reset after executeBatchedOperation . @ MD20022A
if ( postInvokeNeeded ) { // d621157
try { postInvokeStartModule ( mmd , appJarName ) ; } catch ( Throwable t ) { FFDCFilter . processException ( t , CLASS_NAME + ".startModule" , "761" , this ) ; } } if ( binder != null ) { try { binder . end ( ) ; // F69147.2
} catch ( Throwable t ) { if ( error == null ) { error = new RuntimeError ( t ) ; } } } svThreadContextAccessor . popContextClassLoader ( origCL ) ; // F85059
if ( error != null ) { if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "startModule: " + error ) ; try { mmd . getEJBApplicationMetaData ( ) . stoppingModule ( mmd ) ; // F743-26072
uninstall ( mmd , true ) ; // d127220 / / d130898
} catch ( Throwable t ) { FFDCFilter . processException ( t , CLASS_NAME + ".startModule" , "980" , this ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "startModule: " + error ) ; throw error ; // 118362
} if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "startModule" ) ; } |
public class Context { /** * Unregister a connection
* @ param cm The connection manager
* @ param cl The connection listener
* @ param c The connection
* @ return True if the connection was unregistered , otherwise false */
boolean unregisterConnection ( ConnectionManager cm , ConnectionListener cl , Object c ) { } } | if ( clToC != null && clToC . get ( cl ) != null ) { List < Object > l = clToC . get ( cl ) ; return l . remove ( c ) ; } return false ; |
public class GeometryUtilities { /** * Scales a { @ link Polygon } to have an unitary area .
* @ param polygon the geometry to scale .
* @ return a copy of the scaled geometry .
* @ throws Exception */
public static Geometry scaleToUnitaryArea ( Geometry polygon ) throws Exception { } } | double area = polygon . getArea ( ) ; double scale = sqrt ( 1.0 / area ) ; AffineTransform scaleAT = new AffineTransform ( ) ; scaleAT . scale ( scale , scale ) ; AffineTransform2D scaleTransform = new AffineTransform2D ( scaleAT ) ; polygon = JTS . transform ( polygon , scaleTransform ) ; return polygon ; |
public class GeometryUtil { /** * Checks whether the line ( x1 , y1 ) - ( x2 , y2 ) and the cubic curve ( cx1 , cy1 ) - ( cx2 , cy2 ) -
* ( cx3 , cy3 ) - ( cx4 , cy4 ) intersect . The points of intersection are saved to { @ code points } .
* Therefore { @ code points } must be of length at least 6.
* @ return the numbers of roots that lie in the defined interval . */
public static int intersectLineAndCubic ( float x1 , float y1 , float x2 , float y2 , float cx1 , float cy1 , float cx2 , float cy2 , float cx3 , float cy3 , float cx4 , float cy4 , float [ ] params ) { } } | float [ ] eqn = new float [ 4 ] ; float [ ] t = new float [ 3 ] ; float [ ] s = new float [ 3 ] ; float dy = y2 - y1 ; float dx = x2 - x1 ; int quantity = 0 ; int count = 0 ; eqn [ 0 ] = ( cy1 - y1 ) * dx + ( x1 - cx1 ) * dy ; eqn [ 1 ] = - 3 * ( cy1 - cy2 ) * dx + 3 * ( cx1 - cx2 ) * dy ; eqn [ 2 ] = ( 3 * cy1 - 6 * cy2 + 3 * cy3 ) * dx - ( 3 * cx1 - 6 * cx2 + 3 * cx3 ) * dy ; eqn [ 3 ] = ( - 3 * cy1 + 3 * cy2 - 3 * cy3 + cy4 ) * dx + ( 3 * cx1 - 3 * cx2 + 3 * cx3 - cx4 ) * dy ; if ( ( count = Crossing . solveCubic ( eqn , t ) ) == 0 ) { return 0 ; } for ( int i = 0 ; i < count ; i ++ ) { if ( dx != 0 ) { s [ i ] = ( cubic ( t [ i ] , cx1 , cx2 , cx3 , cx4 ) - x1 ) / dx ; } else if ( dy != 0 ) { s [ i ] = ( cubic ( t [ i ] , cy1 , cy2 , cy3 , cy4 ) - y1 ) / dy ; } else { s [ i ] = 0f ; } if ( t [ i ] >= 0 && t [ i ] <= 1 && s [ i ] >= 0 && s [ i ] <= 1 ) { params [ 2 * quantity ] = t [ i ] ; params [ 2 * quantity + 1 ] = s [ i ] ; ++ quantity ; } } return quantity ; |
public class RestClientUtil { /** * * * * * * slice searchAll end * * * * * */
@ Override public TermRestResponse termSuggest ( String path , String entity ) throws ElasticSearchException { } } | TermRestResponse searchResult = this . client . executeRequest ( path , entity , new ElasticSearchTermResponseHandler ( ) ) ; return searchResult ; |
public class Bzip2HuffmanAllocator { /** * A final allocation pass with no code length limit .
* @ param array The code length array */
private static void allocateNodeLengths ( final int [ ] array ) { } } | int firstNode = array . length - 2 ; int nextNode = array . length - 1 ; for ( int currentDepth = 1 , availableNodes = 2 ; availableNodes > 0 ; currentDepth ++ ) { final int lastNode = firstNode ; firstNode = first ( array , lastNode - 1 , 0 ) ; for ( int i = availableNodes - ( lastNode - firstNode ) ; i > 0 ; i -- ) { array [ nextNode -- ] = currentDepth ; } availableNodes = ( lastNode - firstNode ) << 1 ; } |
public class PermissionController { /** * List of global roles */
@ RequestMapping ( value = "globals/roles" , method = RequestMethod . GET ) public Resources < GlobalRole > getGlobalRoles ( ) { } } | return Resources . of ( rolesService . getGlobalRoles ( ) , uri ( on ( PermissionController . class ) . getGlobalRoles ( ) ) ) ; |
public class ServiceResolver { /** * This method returns the list of service implementations that implement
* the supplied interface .
* @ param intf The interface
* @ return The list
* @ param < T > The service interface */
public static < T > List < T > getServices ( Class < T > intf ) { } } | List < T > ret = new ArrayList < T > ( ) ; for ( T service : ServiceLoader . load ( intf ) ) { if ( ! ( service instanceof ServiceStatus ) || ( ( ServiceStatus ) service ) . isAvailable ( ) ) { if ( service instanceof ServiceLifecycle ) { ( ( ServiceLifecycle ) service ) . init ( ) ; } ret . add ( service ) ; } } return ret ; |
public class JacksonDBCollection { /** * Finds the first document in the query and updates it .
* @ param query query to match
* @ param fields fields to be returned
* @ param sort sort to apply before picking first document
* @ param remove if true , document found will be removed
* @ param update update to apply
* @ param returnNew if true , the updated document is returned , otherwise the old document is returned ( or it would be lost forever )
* @ param upsert do upsert ( insert if document not present )
* @ return the object */
public T findAndModify ( DBObject query , DBObject fields , DBObject sort , boolean remove , DBObject update , boolean returnNew , boolean upsert ) { } } | return convertFromDbObject ( dbCollection . findAndModify ( serializeFields ( query ) , fields , sort , remove , update , returnNew , upsert ) ) ; |
public class SparkStorageUtils { /** * Restore a { @ code JavaPairRDD < Long , List < List < Writable > > > } previously saved with { @ link # saveMapFile ( String , JavaRDD ) } } < br >
* Note that if the keys are not required , simply use { @ code restoreMapFileSequences ( . . . ) . values ( ) }
* @ param path Path of the MapFile
* @ param sc Spark context
* @ return The restored RDD , with their unique indices as the key */
public static JavaPairRDD < Long , List < List < Writable > > > restoreMapFileSequences ( String path , JavaSparkContext sc ) { } } | Configuration c = new Configuration ( ) ; c . set ( FileInputFormat . INPUT_DIR , FilenameUtils . normalize ( path , true ) ) ; JavaPairRDD < LongWritable , SequenceRecordWritable > pairRDD = sc . newAPIHadoopRDD ( c , SequenceFileInputFormat . class , LongWritable . class , SequenceRecordWritable . class ) ; return pairRDD . mapToPair ( new SequenceRecordLoadPairFunction ( ) ) ; |
public class InstrumentedFileSystemUtils { /** * Replace the scheme of the input { @ link FileStatus } if it matches the string to replace . */
public static FileStatus replaceScheme ( FileStatus st , String replace , String replacement ) { } } | if ( replace != null && replace . equals ( replacement ) ) { return st ; } try { return new FileStatus ( st . getLen ( ) , st . isDir ( ) , st . getReplication ( ) , st . getBlockSize ( ) , st . getModificationTime ( ) , st . getAccessTime ( ) , st . getPermission ( ) , st . getOwner ( ) , st . getGroup ( ) , st . isSymlink ( ) ? st . getSymlink ( ) : null , replaceScheme ( st . getPath ( ) , replace , replacement ) ) ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; } |
public class XMLUpdateShredder { /** * Main algorithm to determine if nodes are equal , have to be inserted , or
* have to be removed .
* @ param paramEvent
* The currently parsed StAX event .
* @ throws IOException
* In case the open operation fails ( delegated from
* checkDescendants ( . . . ) ) .
* @ throws XMLStreamException
* In case any StAX parser problem occurs .
* @ throws TTIOException */
private void algorithm ( final XMLEvent paramEvent ) throws IOException , XMLStreamException , TTIOException { } } | assert paramEvent != null ; do { /* * Check if a node in the shreddered file on the same level equals
* the current element node . */
if ( paramEvent . isStartElement ( ) ) { mFound = checkElement ( paramEvent . asStartElement ( ) ) ; } else if ( paramEvent . isCharacters ( ) ) { mFound = checkText ( paramEvent . asCharacters ( ) ) ; } if ( mWtx . getNode ( ) . getDataKey ( ) != mNodeKey ) { mIsRightSibling = true ; } mKeyMatches = mWtx . getNode ( ) . getDataKey ( ) ; // if ( mFound & & mIsRightSibling ) {
// * Root element of next subtree in shreddered file matches
// * so check all descendants . If they match the node must be
// * inserted .
// switch ( paramEvent . getEventType ( ) ) {
// case XMLStreamConstants . START _ ELEMENT :
// mMoved = EMoved . FIRSTNODE ;
// / / mFound = checkDescendants ( paramEvent . asStartElement ( ) ) ;
// mFound = checkDescendants ( paramEvent . asStartElement ( ) ) ;
// break ;
// case XMLStreamConstants . CHARACTERS :
// mFound = checkText ( paramEvent . asCharacters ( ) ) ;
// break ;
// default :
// / / throw new
// AssertionError ( " Node type not known or not implemented ! " ) ;
// mWtx . moveTo ( mKeyMatches ) ;
} while ( ! mFound && mWtx . moveTo ( ( ( ITreeStructData ) mWtx . getNode ( ) ) . getRightSiblingKey ( ) ) ) ; mWtx . moveTo ( mNodeKey ) ; |
public class GetInProgressPackages { /** * Runs the example .
* @ param adManagerServices the services factory .
* @ param session the session .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors . */
public static void runExample ( AdManagerServices adManagerServices , AdManagerSession session ) throws RemoteException { } } | PackageServiceInterface packageService = adManagerServices . get ( session , PackageServiceInterface . class ) ; // Create a statement to select packages .
StatementBuilder statementBuilder = new StatementBuilder ( ) . where ( "status = :status" ) . orderBy ( "id ASC" ) . limit ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) . withBindVariableValue ( "status" , PackageStatus . IN_PROGRESS . toString ( ) ) ; // Retrieve a small amount of packages at a time , paging through
// until all packages have been retrieved .
int totalResultSetSize = 0 ; do { PackagePage page = packageService . getPackagesByStatement ( statementBuilder . toStatement ( ) ) ; if ( page . getResults ( ) != null ) { // Print out some information for each package .
totalResultSetSize = page . getTotalResultSetSize ( ) ; int i = page . getStartIndex ( ) ; for ( Package pkg : page . getResults ( ) ) { System . out . printf ( "%d) Package with ID %d, name '%s', and proposal ID %d was found.%n" , i ++ , pkg . getId ( ) , pkg . getName ( ) , pkg . getProposalId ( ) ) ; } } statementBuilder . increaseOffsetBy ( StatementBuilder . SUGGESTED_PAGE_LIMIT ) ; } while ( statementBuilder . getOffset ( ) < totalResultSetSize ) ; System . out . printf ( "Number of results found: %d%n" , totalResultSetSize ) ; |
public class DaemonStarter { /** * I KNOW WHAT I AM DOING */
private static void handleSignals ( ) { } } | if ( DaemonStarter . isRunMode ( ) ) { try { // handle SIGHUP to prevent process to get killed when exiting the tty
Signal . handle ( new Signal ( "HUP" ) , arg0 -> { // Nothing to do here
System . out . println ( "SIG INT" ) ; } ) ; } catch ( IllegalArgumentException e ) { System . err . println ( "Signal HUP not supported" ) ; } try { // handle SIGTERM to notify the program to stop
Signal . handle ( new Signal ( "TERM" ) , arg0 -> { System . out . println ( "SIG TERM" ) ; DaemonStarter . stopService ( ) ; } ) ; } catch ( IllegalArgumentException e ) { System . err . println ( "Signal TERM not supported" ) ; } try { // handle SIGINT to notify the program to stop
Signal . handle ( new Signal ( "INT" ) , arg0 -> { System . out . println ( "SIG INT" ) ; DaemonStarter . stopService ( ) ; } ) ; } catch ( IllegalArgumentException e ) { System . err . println ( "Signal INT not supported" ) ; } try { // handle SIGUSR2 to notify the life - cycle listener
Signal . handle ( new Signal ( "USR2" ) , arg0 -> { System . out . println ( "SIG USR2" ) ; DaemonStarter . getLifecycleListener ( ) . signalUSR2 ( ) ; } ) ; } catch ( IllegalArgumentException e ) { System . err . println ( "Signal USR2 not supported" ) ; } } |
public class AbcNode { /** * Returns the textual value of this node , transforms all
* escaped chars ( e . g . < TT > \ ' i < / TT > = > < TT > í < / TT > ) */
public String getTexTextValue ( ) { } } | String text = value ; if ( ( text != null ) && ( text . trim ( ) . length ( ) > 0 ) ) { Enumeration e = bundle . getKeys ( ) ; while ( e . hasMoreElements ( ) ) { String key = ( String ) e . nextElement ( ) ; if ( text . indexOf ( key ) != - 1 ) text = stringReplace ( text , key , bundle . getString ( key ) ) ; } } return text ; |
public class EventLockCumulativeTimeout { /** * Wait for a notification or a timeout - may wake spuriously . */
private void waitForEventOrTimeout ( ) throws InterruptedException { } } | long remainingWait = timeout - timeWaited ; long millisToWait = remainingWait / 1000000 ; int nanosToWait = ( int ) ( remainingWait % 1000000 ) ; wait ( millisToWait , nanosToWait ) ; |
public class ComStmtPrepare { /** * Read COM _ PREPARE _ RESULT .
* @ param reader inputStream
* @ param eofDeprecated are EOF _ packet deprecated
* @ return ServerPrepareResult prepare result
* @ throws IOException if connection has error
* @ throws SQLException if server answer with error . */
public ServerPrepareResult read ( PacketInputStream reader , boolean eofDeprecated ) throws IOException , SQLException { } } | Buffer buffer = reader . getPacket ( true ) ; byte firstByte = buffer . getByteAt ( buffer . position ) ; if ( firstByte == ERROR ) { throw buildErrorException ( buffer ) ; } if ( firstByte == OK ) { /* Prepared Statement OK */
buffer . readByte ( ) ; /* skip field count */
final int statementId = buffer . readInt ( ) ; final int numColumns = buffer . readShort ( ) & 0xffff ; final int numParams = buffer . readShort ( ) & 0xffff ; ColumnInformation [ ] params = new ColumnInformation [ numParams ] ; ColumnInformation [ ] columns = new ColumnInformation [ numColumns ] ; if ( numParams > 0 ) { for ( int i = 0 ; i < numParams ; i ++ ) { params [ i ] = new ColumnInformation ( reader . getPacket ( false ) ) ; } if ( numColumns > 0 ) { if ( ! eofDeprecated ) { protocol . skipEofPacket ( ) ; } for ( int i = 0 ; i < numColumns ; i ++ ) { columns [ i ] = new ColumnInformation ( reader . getPacket ( false ) ) ; } } if ( ! eofDeprecated ) { protocol . readEofPacket ( ) ; } } else { if ( numColumns > 0 ) { for ( int i = 0 ; i < numColumns ; i ++ ) { columns [ i ] = new ColumnInformation ( reader . getPacket ( false ) ) ; } if ( ! eofDeprecated ) { protocol . readEofPacket ( ) ; } } else { // read warning only if no param / columns , because will be overwritten by EOF warning data
buffer . readByte ( ) ; // reserved
protocol . setHasWarnings ( buffer . readShort ( ) > 0 ) ; } } ServerPrepareResult serverPrepareResult = new ServerPrepareResult ( sql , statementId , columns , params , protocol ) ; if ( protocol . getOptions ( ) . cachePrepStmts && protocol . getOptions ( ) . useServerPrepStmts && sql != null && sql . length ( ) < protocol . getOptions ( ) . prepStmtCacheSqlLimit ) { String key = protocol . getDatabase ( ) + "-" + sql ; ServerPrepareResult cachedServerPrepareResult = protocol . addPrepareInCache ( key , serverPrepareResult ) ; return cachedServerPrepareResult != null ? cachedServerPrepareResult : serverPrepareResult ; } return serverPrepareResult ; } else { throw new SQLException ( "Unexpected packet returned by server, first byte " + firstByte ) ; } |
public class CmsExplorerTypeSettings { /** * Sets the basic attributes of the type settings . < p >
* @ param name the name of the type setting
* @ param key the key name of the explorer type setting
* @ param icon the icon path and file name of the explorer type setting
* @ param bigIcon the file name of the big icon
* @ param smallIconStyle the small icon CSS style class
* @ param bigIconStyle the big icon CSS style class
* @ param reference the reference of the explorer type setting
* @ param elementView the element view
* @ param isView ' true ' if this type represents an element view
* @ param namePattern the name pattern
* @ param viewOrder the view order */
public void setTypeAttributes ( String name , String key , String icon , String bigIcon , String smallIconStyle , String bigIconStyle , String reference , String elementView , String isView , String namePattern , String viewOrder ) { } } | setName ( name ) ; setKey ( key ) ; setIcon ( icon ) ; setBigIcon ( bigIcon ) ; setSmallIconStyle ( smallIconStyle ) ; setBigIconStyle ( bigIconStyle ) ; setReference ( reference ) ; setElementView ( elementView ) ; try { m_viewOrder = Integer . valueOf ( viewOrder ) ; } catch ( NumberFormatException e ) { LOG . debug ( "Type " + name + " has no or invalid view order:" + viewOrder ) ; } m_isView = Boolean . valueOf ( isView ) . booleanValue ( ) ; m_namePattern = namePattern ; |
public class MockInjectionStrategy { /** * Actually inject mockCandidates on field .
* Actual algorithm is defined in the implementations of { @ link # processInjection ( Field , Object , Set ) } .
* However if injection occurred successfully , the process should return < code > true < / code > ,
* and < code > false < / code > otherwise .
* The code takes care of calling the next strategy if available and if of course if required
* @ param onField Field needing injection .
* @ param fieldOwnedBy The owning instance of the field .
* @ param mockCandidates A set of mock candidate , that might be injected .
* @ return < code > true < / code > if successful , < code > false < / code > otherwise . */
public boolean process ( Field onField , Object fieldOwnedBy , Set < Object > mockCandidates ) { } } | if ( processInjection ( onField , fieldOwnedBy , mockCandidates ) ) { return true ; } return relayProcessToNextStrategy ( onField , fieldOwnedBy , mockCandidates ) ; |
public class AbstractNewSarlElementWizardPage { /** * Create the default standard lifecycle function templates .
* @ param elementTypeName the name of the element type .
* @ param actionAdder the adder of actions .
* @ param usesAdder the adder of uses statement .
* @ return { @ code true } if the units are added ; { @ code false } otherwise .
* @ since 0.5 */
protected boolean createStandardSARLLifecycleFunctionTemplates ( String elementTypeName , Function1 < ? super String , ? extends ISarlActionBuilder > actionAdder , Procedure1 < ? super String > usesAdder ) { } } | if ( ! isCreateStandardLifecycleFunctions ( ) ) { return false ; } usesAdder . apply ( LOGGING_CAPACITY_NAME ) ; ISarlActionBuilder action = actionAdder . apply ( INSTALL_SKILL_NAME ) ; IBlockExpressionBuilder block = action . getExpression ( ) ; block . setInnerDocumentation ( MessageFormat . format ( Messages . AbstractNewSarlElementWizardPage_19 , elementTypeName ) ) ; IExpressionBuilder expr = block . addExpression ( ) ; createInfoCall ( expr , "Installing the " + elementTypeName ) ; // $ NON - NLS - 1 $
action = actionAdder . apply ( UNINSTALL_SKILL_NAME ) ; block = action . getExpression ( ) ; block . setInnerDocumentation ( MessageFormat . format ( Messages . AbstractNewSarlElementWizardPage_20 , elementTypeName ) ) ; expr = block . addExpression ( ) ; createInfoCall ( expr , "Uninstalling the " + elementTypeName ) ; // $ NON - NLS - 1 $
return true ; |
public class LogEntry { /** * Creates a { @ code LogEntry } object given the log name , the monitored resource and the entry
* payload . */
public static LogEntry of ( String logName , MonitoredResource resource , Payload < ? > payload ) { } } | return newBuilder ( payload ) . setLogName ( logName ) . setResource ( resource ) . build ( ) ; |
public class IntegerRange { /** * Checks whether this contains the given number , i . e . whether the iterator will yield the number . This is different
* from interval containment : < code > 0 . . 2 . by ( 2 ) < / code > will < em > not < / em > contain 1.
* @ param number
* the number to be checked for containment .
* @ return whether this sequence contains the given number or not . */
@ Pure public boolean contains ( int number ) { } } | if ( step < 0 ) return number <= start && number >= end && ( number - start ) % step == 0 ; else return number >= start && number <= end && ( number - start ) % step == 0 ; |
public class ShaSaslClient { /** * Generate the HMAC with the given SHA algorithm */
private byte [ ] hmac ( byte [ ] key , byte [ ] data ) { } } | try { final Mac mac = Mac . getInstance ( hmacAlgorithm ) ; mac . init ( new SecretKeySpec ( key , mac . getAlgorithm ( ) ) ) ; return mac . doFinal ( data ) ; } catch ( InvalidKeyException e ) { if ( key . length == 0 ) { throw new UnsupportedOperationException ( "This JVM does not support empty HMAC keys (empty passwords). " + "Please set a bucket password or upgrade your JVM." ) ; } else { throw new RuntimeException ( "Failed to generate HMAC hash for password" , e ) ; } } catch ( Throwable t ) { throw new RuntimeException ( t ) ; } |
public class SiteJarResourceLoader { /** * Returns the input stream that can be used to load the named
* resource .
* @ param path the path for the resource to be loaded .
* @ return an input stream that can be used to read the resource .
* @ exception ResourceNotFoundException if the resource was not found . */
@ Override public InputStream getResourceStream ( String path ) throws ResourceNotFoundException { } } | SiteKey skey = new SiteKey ( path ) ; // load it on up
try { InputStream stream = _loader . getResourceAsStream ( skey . siteId , skey . path ) ; if ( stream == null ) { String errmsg = "Unable to load resource via " + "site-specific jar file [path=" + path + "]." ; throw new ResourceNotFoundException ( errmsg ) ; } return stream ; } catch ( IOException ioe ) { throw new ResourceNotFoundException ( ioe . getMessage ( ) ) ; } |
public class AWSDatabaseMigrationServiceWaiters { /** * Builds a ReplicationInstanceAvailable waiter by using custom parameters waiterParameters and other parameters
* defined in the waiters specification , and then polls until it determines whether the resource entered the desired
* state or not , where polling criteria is bound by either default polling strategy or custom polling strategy . */
public Waiter < DescribeReplicationInstancesRequest > replicationInstanceAvailable ( ) { } } | return new WaiterBuilder < DescribeReplicationInstancesRequest , DescribeReplicationInstancesResult > ( ) . withSdkFunction ( new DescribeReplicationInstancesFunction ( client ) ) . withAcceptors ( new ReplicationInstanceAvailable . IsAvailableMatcher ( ) , new ReplicationInstanceAvailable . IsDeletingMatcher ( ) , new ReplicationInstanceAvailable . IsIncompatiblecredentialsMatcher ( ) , new ReplicationInstanceAvailable . IsIncompatiblenetworkMatcher ( ) , new ReplicationInstanceAvailable . IsInaccessibleencryptioncredentialsMatcher ( ) ) . withDefaultPollingStrategy ( new PollingStrategy ( new MaxAttemptsRetryStrategy ( 60 ) , new FixedDelayStrategy ( 60 ) ) ) . withExecutorService ( executorService ) . build ( ) ; |
public class AbstractObjectMapper { /** * { @ inheritDoc } */
@ Override public T read ( String in , JsonDeserializationContext ctx ) throws JsonDeserializationException { } } | JsonReader reader = ctx . newJsonReader ( in ) ; try { if ( ctx . isUnwrapRootValue ( ) ) { if ( JsonToken . BEGIN_OBJECT != reader . peek ( ) ) { throw ctx . traceError ( "Unwrap root value is enabled but the input is not a JSON Object" , reader ) ; } reader . beginObject ( ) ; if ( JsonToken . END_OBJECT == reader . peek ( ) ) { throw ctx . traceError ( "Unwrap root value is enabled but the JSON Object is empty" , reader ) ; } String name = reader . nextName ( ) ; if ( ! name . equals ( rootName ) ) { throw ctx . traceError ( "Unwrap root value is enabled but the name '" + name + "' don't match the expected rootName " + "'" + rootName + "'" , reader ) ; } T result = getDeserializer ( ) . deserialize ( reader , ctx ) ; reader . endObject ( ) ; return result ; } else { return getDeserializer ( ) . deserialize ( reader , ctx ) ; } } catch ( JsonDeserializationException e ) { // already logged , we just throw it
throw e ; } catch ( RuntimeException e ) { throw ctx . traceError ( e , reader ) ; } |
public class Balance { /** * Returns the amount of the < code > currency < / code > in this balance that is available to trade .
* @ return the amount that is available to trade . */
public BigDecimal getAvailable ( ) { } } | if ( available == null ) { return total . subtract ( frozen ) . subtract ( loaned ) . add ( borrowed ) . subtract ( withdrawing ) . subtract ( depositing ) ; } else { return available ; } |
public class DeviceInfo { /** * don ' t run on main thread */
@ SuppressWarnings ( { } } | "WeakerAccess" } ) protected void initDeviceID ( ) { getDeviceCachedInfo ( ) ; // put this here to avoid running on main thread
// generate a provisional while we do the rest async
generateProvisionalGUID ( ) ; // grab and cache the googleAdID in any event if available
// if we already have a deviceID we won ' t user ad id as the guid
cacheGoogleAdID ( ) ; // if we already have a device ID use it and just notify
// otherwise generate one , either from ad id if available or the provisional
String deviceID = getDeviceID ( ) ; if ( deviceID == null || deviceID . trim ( ) . length ( ) <= 2 ) { generateDeviceID ( ) ; } |
public class KerasLSTM { /** * Get layer output type .
* @ param inputType Array of InputTypes
* @ return output type as InputType
* @ throws InvalidKerasConfigurationException Invalid Keras config */
@ Override public InputType getOutputType ( InputType ... inputType ) throws InvalidKerasConfigurationException { } } | if ( inputType . length > 1 && inputType . length != 3 ) throw new InvalidKerasConfigurationException ( "Keras LSTM layer accepts only one single input" + "or three (input to LSTM and two states tensors, but " + "received " + inputType . length + "." ) ; InputPreProcessor preProcessor = getInputPreprocessor ( inputType ) ; if ( preProcessor != null ) { if ( returnSequences ) { return preProcessor . getOutputType ( inputType [ 0 ] ) ; } else { return this . getLSTMLayer ( ) . getOutputType ( - 1 , preProcessor . getOutputType ( inputType [ 0 ] ) ) ; } } else return this . getLSTMLayer ( ) . getOutputType ( - 1 , inputType [ 0 ] ) ; |
public class RequestBuilder { /** * Adds the given parameter to the map and returns this same object .
* @ param jsonParam JSON name of the parameter to add
* @ param obj Object to add to the map
* @ return This same object */
@ SuppressWarnings ( "unchecked" ) public < T extends RequestBuilder > T with ( final String jsonParam , final Object obj ) { } } | this . parameters . put ( jsonParam , obj ) ; return ( T ) this ; |
public class AbstractPlane4F { /** * Replies the intersection factor of the given segment
* when it is intersecting the plane .
* If the segment and the plane are not intersecting , this
* function replies { @ link Double # NaN } .
* If the segment and the plane are intersecting , two cases : < ol >
* < li > the segment is coplanar to the plane , then this function replies
* { @ link Double # POSITIVE _ INFINITY } . < / li >
* < li > the segment and the plane have a single point of intersection ,
* then this function replies the factor of the line ' s equation that
* permits to retreive the intersection point from the segment definition .
* < / ol >
* @ param a first component of the plane equation .
* @ param b second component of the plane equation .
* @ param c third component of the plane equation .
* @ param d fourth component of the plane equation .
* @ param sx1 x coordinate of the first point of the segment .
* @ param sy1 y coordinate of the first point of the segment .
* @ param sz1 z coordinate of the first axis of the oriented box .
* @ param sx2 x coordinate of the second point of the segment .
* @ param sy2 y coordinate of the second point of the segment .
* @ param sz2 z coordinate of the second axis of the oriented box .
* @ return the factor that permits to compute the intersection point ,
* { @ link Double # NaN } when no intersection , { @ link Double # POSITIVE _ INFINITY }
* when an infinite number of intersection points . */
@ Pure public static double getIntersectionFactorPlaneSegment ( double a , double b , double c , double d , double sx1 , double sy1 , double sz1 , double sx2 , double sy2 , double sz2 ) { } } | double denom = a * ( sx2 - sx1 ) + b * ( sy2 - sy1 ) + c * ( sz2 - sz1 ) ; if ( denom == 0. ) { // Segment and triangle ' s plane are parallel
// Compute the distance between a point of the segment and the plane .
double dist = a * sx1 + b * sy1 + c * sz1 + d ; if ( MathUtil . isEpsilonZero ( dist ) ) { return Double . POSITIVE_INFINITY ; } } else { double factor = ( - a * sx1 - b * sy1 - c * sz1 - d ) / denom ; if ( factor >= 0. && factor <= 1. ) { return factor ; } } return Double . NaN ; |
public class ProcessEngines { /** * retries to initialize a process engine that previously failed . */
public static ProcessEngineInfo retry ( String resourceUrl ) { } } | try { return initProcessEngineFromResource ( new URL ( resourceUrl ) ) ; } catch ( MalformedURLException e ) { throw new ProcessEngineException ( "invalid url: " + resourceUrl , e ) ; } |
public class ComponentDao { /** * Retrieves all components with a specific root project Uuid , no other filtering is done by this method .
* Used by Views plugin */
public List < ComponentDto > selectByProjectUuid ( String projectUuid , DbSession dbSession ) { } } | return mapper ( dbSession ) . selectByProjectUuid ( projectUuid ) ; |
public class GeneralPurposeFFT_F32_2D { /** * Computes 2D forward DFT of real data leaving the result in < code > a < / code >
* . This method only works when the sizes of both dimensions are
* power - of - two numbers . The physical layout of the output data is as
* follows :
* < pre >
* a [ k1 * columns + 2 * k2 ] = Re [ k1 ] [ k2 ] = Re [ rows - k1 ] [ columns - k2 ] ,
* a [ k1 * columns + 2 * k2 + 1 ] = Im [ k1 ] [ k2 ] = - Im [ rows - k1 ] [ columns - k2 ] ,
* 0 & lt ; k1 & lt ; rows , 0 & lt ; k2 & lt ; columns / 2,
* a [ 2 * k2 ] = Re [ 0 ] [ k2 ] = Re [ 0 ] [ columns - k2 ] ,
* a [ 2 * k2 + 1 ] = Im [ 0 ] [ k2 ] = - Im [ 0 ] [ columns - k2 ] ,
* 0 & lt ; k2 & lt ; columns / 2,
* a [ k1 * columns ] = Re [ k1 ] [ 0 ] = Re [ rows - k1 ] [ 0 ] ,
* a [ k1 * columns + 1 ] = Im [ k1 ] [ 0 ] = - Im [ rows - k1 ] [ 0 ] ,
* a [ ( rows - k1 ) * columns + 1 ] = Re [ k1 ] [ columns / 2 ] = Re [ rows - k1 ] [ columns / 2 ] ,
* a [ ( rows - k1 ) * columns ] = - Im [ k1 ] [ columns / 2 ] = Im [ rows - k1 ] [ columns / 2 ] ,
* 0 & lt ; k1 & lt ; rows / 2,
* a [ 0 ] = Re [ 0 ] [ 0 ] ,
* a [ 1 ] = Re [ 0 ] [ columns / 2 ] ,
* a [ ( rows / 2 ) * columns ] = Re [ rows / 2 ] [ 0 ] ,
* a [ ( rows / 2 ) * columns + 1 ] = Re [ rows / 2 ] [ columns / 2]
* < / pre >
* This method computes only half of the elements of the real transform . The
* other half satisfies the symmetry condition . If you want the full real
* forward transform , use < code > realForwardFull < / code > . To get back the
* original data , use < code > realInverse < / code > on the output of this method .
* @ param a
* data to transform */
public void realForward ( float [ ] a ) { } } | if ( isPowerOfTwo == false ) { throw new IllegalArgumentException ( "rows and columns must be power of two numbers" ) ; } else { for ( int r = 0 ; r < rows ; r ++ ) { fftColumns . realForward ( a , r * columns ) ; } cdft2d_sub ( - 1 , a , true ) ; rdft2d_sub ( 1 , a ) ; } |
public class ComponentGeneratorsUtil { /** * Resolve a variable element { @ link TypeName } . If the type cannot be resolved ( the TypeMirror is
* of kind ERROR ) , displays an explicit message .
* @ param variableElement The variable we want to resolve the type of
* @ param messager A messager to display the error if any
* @ return The { @ link TypeName } of our variable type . */
public static TypeName resolveVariableTypeName ( VariableElement variableElement , Messager messager ) { } } | // Resolve type
if ( variableElement . asType ( ) . getKind ( ) == TypeKind . ERROR ) { messager . printMessage ( Kind . ERROR , "Couldn't resolve type " + variableElement . asType ( ) + " for variable " + variableElement + ". If you are trying to inject a ComponentFactory inside a Component, please inject VueComponentFactory<MyComponent> instead." ) ; } TypeName typeName = ClassName . get ( variableElement . asType ( ) ) ; if ( typeName instanceof ParameterizedTypeName ) { ParameterizedTypeName parameterizedTypeName = ( ParameterizedTypeName ) typeName ; if ( parameterizedTypeName . rawType . equals ( ClassName . get ( VueComponentFactory . class ) ) ) { return componentFactoryName ( ( ClassName ) parameterizedTypeName . typeArguments . get ( 0 ) ) ; } } return typeName ; |
public class ToastCompat { /** * Make a standard toast that just contains a text view .
* @ param context The context to use . Usually your { @ link android . app . Application }
* or { @ link android . app . Activity } object .
* @ param text The text to show . Can be formatted text .
* @ param duration How long to display the message . Either { @ link # LENGTH _ SHORT } or
* { @ link # LENGTH _ LONG } */
public static ToastCompat makeText ( Context context , CharSequence text , int duration ) { } } | // We cannot pass the SafeToastContext to Toast . makeText ( ) because
// the View will unwrap the base context and we are in vain .
@ SuppressLint ( "ShowToast" ) Toast toast = Toast . makeText ( context , text , duration ) ; setContextCompat ( toast . getView ( ) , new SafeToastContext ( context , toast ) ) ; return new ToastCompat ( context , toast ) ; |
public class ConsumerDispatcher { /** * This list is cloned to stop illegal access to the ConsumerPoints
* controlled by this ConsumerDispatcher
* @ return */
@ Override public List < DispatchableKey > getConsumerPoints ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getConsumerPoints" ) ; SibTr . exit ( tc , "getConsumerPoints" , consumerPoints ) ; } return ( List < DispatchableKey > ) consumerPoints . clone ( ) ; |
public class AbstractAppender { /** * Handles an install request failure . */
protected void handleInstallRequestFailure ( MemberState member , InstallRequest request , Throwable error ) { } } | // Log the failed attempt to contact the member .
failAttempt ( member , error ) ; |
public class EntityAttribute { /** * indexed getter for names - gets an indexed value -
* @ generated
* @ param i index in the array to get
* @ return value of the element at index i */
public Name getNames ( int i ) { } } | if ( EntityAttribute_Type . featOkTst && ( ( EntityAttribute_Type ) jcasType ) . casFeat_names == null ) jcasType . jcas . throwFeatMissing ( "names" , "de.julielab.jules.types.ace.EntityAttribute" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( EntityAttribute_Type ) jcasType ) . casFeatCode_names ) , i ) ; return ( Name ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( EntityAttribute_Type ) jcasType ) . casFeatCode_names ) , i ) ) ) ; |
public class Util { /** * fails try replacing the last . with $ . */
public static Class < ? > lenientClassForName ( String className ) throws ClassNotFoundException { } } | try { // TODO : consider whether any control over class loader is really needed here
// ( forName ( className , true , Thread . currentThread ( ) . getContextClassLoader ( ) ) ? )
return Class . forName ( className ) ; } catch ( ClassNotFoundException ignored ) { // try replacing the last dot with a $ , in case that helps
// example : tutorial . Tutorial . Benchmark1 becomes tutorial . Tutorial $ Benchmark1
// amusingly , the $ character means three different things in this one line alone
String newName = className . replaceFirst ( "\\.([^.]+)$" , "\\$$1" ) ; return Class . forName ( newName ) ; } |
public class API { /** * complementaryPyCcLib .
* @ return a { @ link java . util . List } object .
* @ throws java . io . IOException
* if any . */
public static List < String > complementaryPyCcLib ( ) throws IOException { } } | if ( complementaryLibs == null ) { complementaryLibs = new LinkedList < String > ( ) ; InputStream inputStream = API . class . getClassLoader ( ) . getResourceAsStream ( FILE ) ; PropertyResourceBundle labels = new PropertyResourceBundle ( inputStream ) ; Enumeration < String > bundleKeys = labels . getKeys ( ) ; while ( bundleKeys . hasMoreElements ( ) ) { String key ; if ( KEY . equals ( key = bundleKeys . nextElement ( ) ) ) { String value = labels . getString ( key ) ; if ( value != null && ! value . isEmpty ( ) ) { String [ ] values = value . split ( "," ) ; for ( String v : values ) { complementaryLibs . add ( v . trim ( ) ) ; } } } } } return complementaryLibs ; |
public class ArrayUtils { /** * Returns a random element from the given array that ' s not in the values to exclude .
* @ param array array to return random element from
* @ param excludes values to exclude
* @ param < T > the type of elements in the given array
* @ return random element from the given array that ' s not in the values to exclude
* @ throws IllegalArgumentException if the array is empty */
@ SafeVarargs public static < T > T randomFrom ( T [ ] array , T ... excludes ) { } } | return randomFrom ( array , Arrays . asList ( excludes ) ) ; |
public class LoggedInUserManager { /** * Login the passed user and require a set of certain roles , the used needs to
* have to login here .
* @ param aUser
* The user to log - in . May be < code > null < / code > . When the user is
* < code > null < / code > the login must fail .
* @ param sPlainTextPassword
* Plain text password to use . May be < code > null < / code > .
* @ param aRequiredRoleIDs
* A set of required role IDs , the user needs to have . May be
* < code > null < / code > .
* @ return Never < code > null < / code > login status . */
@ Nonnull public ELoginResult loginUser ( @ Nullable final IUser aUser , @ Nullable final String sPlainTextPassword , @ Nullable final Iterable < String > aRequiredRoleIDs ) { } } | if ( aUser == null ) return ELoginResult . USER_NOT_EXISTING ; final String sUserID = aUser . getID ( ) ; // Deleted user ?
if ( aUser . isDeleted ( ) ) { AuditHelper . onAuditExecuteFailure ( "login" , sUserID , "user-is-deleted" ) ; return _onLoginError ( sUserID , ELoginResult . USER_IS_DELETED ) ; } // Disabled user ?
if ( aUser . isDisabled ( ) ) { AuditHelper . onAuditExecuteFailure ( "login" , sUserID , "user-is-disabled" ) ; return _onLoginError ( sUserID , ELoginResult . USER_IS_DISABLED ) ; } // Check the password
final UserManager aUserMgr = PhotonSecurityManager . getUserMgr ( ) ; if ( ! aUserMgr . areUserIDAndPasswordValid ( sUserID , sPlainTextPassword ) ) { AuditHelper . onAuditExecuteFailure ( "login" , sUserID , "invalid-password" ) ; return _onLoginError ( sUserID , ELoginResult . INVALID_PASSWORD ) ; } // Are all roles present ?
if ( ! SecurityHelper . hasUserAllRoles ( sUserID , aRequiredRoleIDs ) ) { AuditHelper . onAuditExecuteFailure ( "login" , sUserID , "user-is-missing-required-roles" , aRequiredRoleIDs ) ; return _onLoginError ( sUserID , ELoginResult . USER_IS_MISSING_ROLE ) ; } // Check if the password hash needs to be updated
final String sExistingPasswordHashAlgorithmName = aUser . getPasswordHash ( ) . getAlgorithmName ( ) ; final String sDefaultPasswordHashAlgorithmName = GlobalPasswordSettings . getPasswordHashCreatorManager ( ) . getDefaultPasswordHashCreatorAlgorithmName ( ) ; if ( ! sExistingPasswordHashAlgorithmName . equals ( sDefaultPasswordHashAlgorithmName ) ) { // This implicitly implies using the default hash creator algorithm
// This automatically saves the file
aUserMgr . setUserPassword ( sUserID , sPlainTextPassword ) ; if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Updated password hash of " + _getUserIDLogText ( sUserID ) + " from algorithm '" + sExistingPasswordHashAlgorithmName + "' to '" + sDefaultPasswordHashAlgorithmName + "'" ) ; } boolean bLoggedOutUser = false ; LoginInfo aInfo ; m_aRWLock . writeLock ( ) . lock ( ) ; try { if ( m_aLoggedInUsers . containsKey ( sUserID ) ) { // The user is already logged in
if ( isLogoutAlreadyLoggedInUser ( ) ) { // Explicitly log out
logoutUser ( sUserID ) ; // Just a short check
if ( m_aLoggedInUsers . containsKey ( sUserID ) ) throw new IllegalStateException ( "Failed to logout '" + sUserID + "'" ) ; AuditHelper . onAuditExecuteSuccess ( "logout-in-login" , sUserID ) ; bLoggedOutUser = true ; } else { // Error : user already logged in
AuditHelper . onAuditExecuteFailure ( "login" , sUserID , "user-already-logged-in" ) ; return _onLoginError ( sUserID , ELoginResult . USER_ALREADY_LOGGED_IN ) ; } } // Update user in session
final InternalSessionUserHolder aSUH = InternalSessionUserHolder . _getInstance ( ) ; if ( aSUH . _hasUser ( ) ) { // This session already has a user
if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "The session user holder already has the user ID '" + aSUH . _getUserID ( ) + "' so the new ID '" + sUserID + "' will not be set!" ) ; AuditHelper . onAuditExecuteFailure ( "login" , sUserID , "session-already-has-user" ) ; return _onLoginError ( sUserID , ELoginResult . SESSION_ALREADY_HAS_USER ) ; } aInfo = new LoginInfo ( aUser , ScopeManager . getSessionScope ( ) ) ; m_aLoggedInUsers . put ( sUserID , aInfo ) ; aSUH . _setUser ( this , aUser ) ; } finally { m_aRWLock . writeLock ( ) . unlock ( ) ; } if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Logged in " + _getUserIDLogText ( sUserID ) + ( isAnonymousLogging ( ) ? "" : " with login name '" + aUser . getLoginName ( ) + "'" ) ) ; AuditHelper . onAuditExecuteSuccess ( "login-user" , sUserID , aUser . getLoginName ( ) ) ; // Execute callback as the very last action
m_aUserLoginCallbacks . forEach ( aCB -> aCB . onUserLogin ( aInfo ) ) ; return bLoggedOutUser ? ELoginResult . SUCCESS_WITH_LOGOUT : ELoginResult . SUCCESS ; |
public class SMPPSession { /** * / * ( non - Javadoc )
* @ see org . jsmpp . session . ClientSession # submitShortMessage ( java . lang . String , org . jsmpp . bean . TypeOfNumber , org . jsmpp . bean . NumberingPlanIndicator , java . lang . String , org . jsmpp . bean . TypeOfNumber , org . jsmpp . bean . NumberingPlanIndicator , java . lang . String , org . jsmpp . bean . ESMClass , byte , byte , java . lang . String , java . lang . String , org . jsmpp . bean . RegisteredDelivery , byte , org . jsmpp . bean . DataCoding , byte , byte [ ] , org . jsmpp . bean . OptionalParameter [ ] ) */
public String submitShortMessage ( String serviceType , TypeOfNumber sourceAddrTon , NumberingPlanIndicator sourceAddrNpi , String sourceAddr , TypeOfNumber destAddrTon , NumberingPlanIndicator destAddrNpi , String destinationAddr , ESMClass esmClass , byte protocolId , byte priorityFlag , String scheduleDeliveryTime , String validityPeriod , RegisteredDelivery registeredDelivery , byte replaceIfPresentFlag , DataCoding dataCoding , byte smDefaultMsgId , byte [ ] shortMessage , OptionalParameter ... optionalParameters ) throws PDUException , ResponseTimeoutException , InvalidResponseException , NegativeResponseException , IOException { } } | ensureTransmittable ( "submitShortMessage" ) ; SubmitSmCommandTask submitSmTask = new SubmitSmCommandTask ( pduSender ( ) , serviceType , sourceAddrTon , sourceAddrNpi , sourceAddr , destAddrTon , destAddrNpi , destinationAddr , esmClass , protocolId , priorityFlag , scheduleDeliveryTime , validityPeriod , registeredDelivery , replaceIfPresentFlag , dataCoding , smDefaultMsgId , shortMessage , optionalParameters ) ; SubmitSmResp resp = ( SubmitSmResp ) executeSendCommand ( submitSmTask , getTransactionTimer ( ) ) ; return resp . getMessageId ( ) ; |
public class Forbidden { /** * Returns a static Forbidden instance and set the { @ link # payload } thread local
* with error code and default message .
* When calling the instance on { @ link # getMessage ( ) } method , it will return whatever
* stored in the { @ link # payload } thread local
* @ param errorCode the app defined error code
* @ return a static Forbidden instance as described above */
public static Forbidden of ( int errorCode ) { } } | touchPayload ( ) . errorCode ( errorCode ) ; return _localizedErrorMsg ( ) ? of ( defaultMessage ( FORBIDDEN ) ) : INSTANCE ; |
public class RowRangeAdapter { /** * Convert guava ' s { @ link RangeSet } to Bigtable ' s { @ link ByteStringRange } . Please note that this will convert
* boundless ranges into unset key cases . */
@ VisibleForTesting void rangeSetToByteStringRange ( RangeSet < RowKeyWrapper > guavaRangeSet , Query query ) { } } | for ( Range < RowKeyWrapper > guavaRange : guavaRangeSet . asRanges ( ) ) { // Is it a point ?
if ( guavaRange . hasLowerBound ( ) && guavaRange . lowerBoundType ( ) == BoundType . CLOSED && guavaRange . hasUpperBound ( ) && guavaRange . upperBoundType ( ) == BoundType . CLOSED && guavaRange . lowerEndpoint ( ) . equals ( guavaRange . upperEndpoint ( ) ) ) { query . rowKey ( guavaRange . lowerEndpoint ( ) . getKey ( ) ) ; } else { ByteStringRange byteRange = ByteStringRange . unbounded ( ) ; // Handle start key
if ( guavaRange . hasLowerBound ( ) ) { switch ( guavaRange . lowerBoundType ( ) ) { case CLOSED : byteRange . startClosed ( guavaRange . lowerEndpoint ( ) . getKey ( ) ) ; break ; case OPEN : byteRange . startOpen ( guavaRange . lowerEndpoint ( ) . getKey ( ) ) ; break ; default : throw new IllegalArgumentException ( "Unexpected lower bound type: " + guavaRange . lowerBoundType ( ) ) ; } } // handle end key
if ( guavaRange . hasUpperBound ( ) ) { switch ( guavaRange . upperBoundType ( ) ) { case CLOSED : byteRange . endClosed ( guavaRange . upperEndpoint ( ) . getKey ( ) ) ; break ; case OPEN : byteRange . endOpen ( guavaRange . upperEndpoint ( ) . getKey ( ) ) ; break ; default : throw new IllegalArgumentException ( "Unexpected upper bound type: " + guavaRange . upperBoundType ( ) ) ; } } query . range ( byteRange ) ; } } |
public class StringUtil { /** * Swaps the case of a String changing upper and title case to lower case ,
* and lower case to upper case .
* < ul >
* < li > Upper case character converts to Lower case < / li >
* < li > Title case character converts to Lower case < / li >
* < li > Lower case character converts to Upper case < / li >
* < / ul >
* For a word based algorithm , see
* { @ link org . apache . commons . lang3 . text . WordUtils # swapCase ( String ) } . A
* { @ code null } input String returns { @ code null } .
* < pre >
* N . swapCase ( null ) = null
* N . swapCase ( " " ) = " "
* N . swapCase ( " The dog has a BONE " ) = " tHE DOG HAS A bone "
* < / pre >
* NOTE : This method changed in Lang version 2.0 . It no longer performs a
* word based algorithm . If you only use ASCII , you will notice no change .
* That functionality is available in
* org . apache . commons . lang3 . text . WordUtils .
* @ param str
* the String to swap case , may be null
* @ return the changed String , { @ code null } if null String input */
public static String swapCase ( final String str ) { } } | if ( N . isNullOrEmpty ( str ) ) { return str ; } final char [ ] cbuf = str . toCharArray ( ) ; char ch = 0 ; for ( int i = 0 , len = cbuf . length ; i < len ; i ++ ) { ch = cbuf [ i ] ; if ( Character . isUpperCase ( ch ) || Character . isTitleCase ( ch ) ) { cbuf [ i ] = Character . toLowerCase ( ch ) ; } else if ( Character . isLowerCase ( ch ) ) { cbuf [ i ] = Character . toUpperCase ( ch ) ; } } return newString ( cbuf , true ) ; |
public class PruneStructureFromSceneMetric { /** * Prune a feature it has fewer than X neighbors within Y distance . Observations
* associated with this feature are also pruned .
* Call { @ link # pruneViews ( int ) } to makes sure the graph is valid .
* @ param neighbors Number of other features which need to be near by
* @ param distance Maximum distance a point can be to be considered a feature */
public void prunePoints ( int neighbors , double distance ) { } } | // Use a nearest neighbor search to find near by points
Point3D_F64 worldX = new Point3D_F64 ( ) ; List < Point3D_F64 > cloud = new ArrayList < > ( ) ; for ( int i = 0 ; i < structure . points . length ; i ++ ) { SceneStructureMetric . Point structureP = structure . points [ i ] ; structureP . get ( worldX ) ; cloud . add ( worldX . copy ( ) ) ; } NearestNeighbor < Point3D_F64 > nn = FactoryNearestNeighbor . kdtree ( new KdTreePoint3D_F64 ( ) ) ; NearestNeighbor . Search < Point3D_F64 > search = nn . createSearch ( ) ; nn . setPoints ( cloud , false ) ; FastQueue < NnData < Point3D_F64 > > resultsNN = new FastQueue ( NnData . class , true ) ; // Create a look up table containing from old to new indexes for each point
int oldToNew [ ] = new int [ structure . points . length ] ; Arrays . fill ( oldToNew , - 1 ) ; // crash is bug
// List of point ID ' s which are to be removed .
GrowQueue_I32 prunePointID = new GrowQueue_I32 ( ) ; // identify points which need to be pruned
for ( int pointId = 0 ; pointId < structure . points . length ; pointId ++ ) { SceneStructureMetric . Point structureP = structure . points [ pointId ] ; structureP . get ( worldX ) ; // distance is squared
search . findNearest ( cloud . get ( pointId ) , distance * distance , neighbors + 1 , resultsNN ) ; // Don ' t prune if it has enough neighbors . Remember that it will always find itself .
if ( resultsNN . size ( ) > neighbors ) { oldToNew [ pointId ] = pointId - prunePointID . size ; continue ; } prunePointID . add ( pointId ) ; // Remove observations of this point
for ( int viewIdx = 0 ; viewIdx < structureP . views . size ; viewIdx ++ ) { SceneObservations . View v = observations . getView ( structureP . views . data [ viewIdx ] ) ; int pointIdx = v . point . indexOf ( pointId ) ; if ( pointIdx < 0 ) throw new RuntimeException ( "Bad structure. Point not found in view's observation " + "which was in its structure" ) ; v . remove ( pointIdx ) ; } } pruneUpdatePointID ( oldToNew , prunePointID ) ; |
public class CreateCommitResult { /** * The files added as part of the committed file changes .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setFilesAdded ( java . util . Collection ) } or { @ link # withFilesAdded ( java . util . Collection ) } if you want to
* override the existing values .
* @ param filesAdded
* The files added as part of the committed file changes .
* @ return Returns a reference to this object so that method calls can be chained together . */
public CreateCommitResult withFilesAdded ( FileMetadata ... filesAdded ) { } } | if ( this . filesAdded == null ) { setFilesAdded ( new java . util . ArrayList < FileMetadata > ( filesAdded . length ) ) ; } for ( FileMetadata ele : filesAdded ) { this . filesAdded . add ( ele ) ; } return this ; |
public class Utils { /** * Get the concatenated value of all Text nodes that are immediate children of the
* given Element . If the element has no content , it will not have a child Text node .
* If it does have content , it will usually have a single child Text node . But in
* rare cases it could have multiple child Text nodes . If multiple child Text nodes
* are found , their content is concatenated into a single string , each separated by a
* single space . The value returned is trimmed of beginning and ending whitespace .
* If the element has no child Text nodes , or if all child Text nodes are empty or
* have whitespace - only values , an empty string is returned .
* @ param elem Element to examine .
* @ return Concatenated text of all child Text nodes . An empty string is returned
* if there are no child Text nodes or they are all empty or contain only
* whitespace . */
public static String getElementText ( Element elem ) { } } | StringBuilder result = new StringBuilder ( ) ; NodeList nodeList = elem . getChildNodes ( ) ; for ( int index = 0 ; index < nodeList . getLength ( ) ; index ++ ) { Node childNode = nodeList . item ( index ) ; if ( childNode != null && ( childNode instanceof Text ) ) { result . append ( " " ) ; result . append ( ( ( Text ) childNode ) . getData ( ) ) ; } } return result . toString ( ) . trim ( ) ; |
public class ColumnPrinter { /** * Generate the output as a list of string lines
* @ return lines */
List < String > generate ( ) { } } | List < String > lines = Lists . newArrayList ( ) ; StringBuilder workStr = new StringBuilder ( ) ; List < AtomicInteger > columnWidths = getColumnWidths ( ) ; List < Iterator < String > > dataIterators = getDataIterators ( ) ; Iterator < AtomicInteger > columnWidthIterator = columnWidths . iterator ( ) ; for ( String columnName : columnNames ) { int thisWidth = columnWidthIterator . next ( ) . intValue ( ) ; printValue ( workStr , columnName , thisWidth ) ; } pushLine ( lines , workStr ) ; boolean done = false ; while ( ! done ) { boolean hadValue = false ; Iterator < Iterator < String > > rowIterator = dataIterators . iterator ( ) ; for ( AtomicInteger width : columnWidths ) { Iterator < String > thisDataIterator = rowIterator . next ( ) ; if ( thisDataIterator . hasNext ( ) ) { hadValue = true ; String value = thisDataIterator . next ( ) ; printValue ( workStr , value , width . intValue ( ) ) ; } else { printValue ( workStr , "" , width . intValue ( ) ) ; } } pushLine ( lines , workStr ) ; if ( ! hadValue ) { done = true ; } } return lines ; |
public class CrudDispatcher { /** * Performs a delete for the corresponding object from the in - memory db .
* @ param path
* @ return */
public MockResponse handleDelete ( String path ) { } } | MockResponse response = new MockResponse ( ) ; List < AttributeSet > items = new ArrayList < > ( ) ; AttributeSet query = attributeExtractor . extract ( path ) ; for ( Map . Entry < AttributeSet , String > entry : map . entrySet ( ) ) { if ( entry . getKey ( ) . matches ( query ) ) { items . add ( entry . getKey ( ) ) ; } } if ( ! items . isEmpty ( ) ) { for ( AttributeSet item : items ) { map . remove ( item ) ; } response . setResponseCode ( 200 ) ; } else { response . setResponseCode ( 404 ) ; } return response ; |
public class AnnotationInfo { /** * JAXB用のアノテーションの属性情報を設定するメソッド 。
* < p > XMLの読み込み時に呼ばれます 。
* < br > ただし 、 Java8からはこのメソッドは呼ばれず 、 { @ link # getAttributeInfos ( ) } で取得したインスタンスに対して要素が追加されます 。
* < p > 既存の情報はクリアされます 。 < / p >
* @ since 1.1
* @ param attributeInfos アノテーションの属性情報 。 */
@ XmlElement ( name = "attribute" ) public void setAttributeInfos ( final List < AttributeInfo > attributeInfos ) { } } | if ( attributeInfos == this . attributes ) { // Java7の場合 、 getterで取得したインスタンスをそのまま設定するため 、 スキップする 。
return ; } this . attributes . clear ( ) ; for ( AttributeInfo attr : attributeInfos ) { addAttribute ( attr . name , attr . value ) ; } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.