signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Factories { /** * Creates a new instance of a class whose full qualified name is specified under the given key . * < br / > * The class will be loaded using the current context ClassLoader . * < br / > * If the given settings is null , or it does not contain the specified key , the default value of the key * is taken from the { @ link Defaults defaults } . * @ param settings the configuration settings that may specify the full qualified name of the class to create , * overriding the default value * @ param key the key under which the full qualified name of the class is specified * @ return a new instance of the class specified by the given key * @ throws ServiceLocationException if the instance cannot be created * @ see # newInstance ( Settings , Key , ClassLoader ) */ public static < T > T newInstance ( Settings settings , Key < String > key ) throws ServiceLocationException { } }
// Workaround for compiler bug ( # 6302954) return Factories . < T > newInstance ( settings , key , Thread . currentThread ( ) . getContextClassLoader ( ) ) ;
public class AvailabilityForecast { /** * Sets the targetingCriteriaBreakdowns value for this AvailabilityForecast . * @ param targetingCriteriaBreakdowns * The forecast result broken down by the targeting of the forecasted * line item . */ public void setTargetingCriteriaBreakdowns ( com . google . api . ads . admanager . axis . v201808 . TargetingCriteriaBreakdown [ ] targetingCriteriaBreakdowns ) { } }
this . targetingCriteriaBreakdowns = targetingCriteriaBreakdowns ;
public class Unmarshaller { /** * Unamrshals the identifier . * @ throws Throwable * propagated */ private void unmarshalIdentifier ( ) throws Throwable { } }
IdentifierMetadata identifierMetadata = entityMetadata . getIdentifierMetadata ( ) ; Object id = ( ( Key ) nativeEntity . getKey ( ) ) . getNameOrId ( ) ; // If the ID is not a simple type . . . IdClassMetadata idClassMetadata = identifierMetadata . getIdClassMetadata ( ) ; if ( idClassMetadata != null ) { Object wrappedId = idClassMetadata . getConstructor ( ) . invoke ( id ) ; id = wrappedId ; } // Now set the ID ( either simple or complex ) on the Entity MethodHandle writeMethod = identifierMetadata . getWriteMethod ( ) ; writeMethod . invoke ( entity , id ) ;
public class WebUtils { /** * 使用指定的字符集编码请求参数值 。 * @ param value 参数值 * @ param charset 字符集 * @ return 编码后的参数值 */ public static String encode ( String value , String charset ) { } }
String result = null ; if ( ! StringUtils . isEmpty ( value ) ) { try { result = URLEncoder . encode ( value , charset ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } } return result ;
public class DeepEquals { /** * Get a deterministic hashCode ( int ) value for an Object , regardless of * when it was created or where it was loaded into memory . The problem * with java . lang . Object . hashCode ( ) is that it essentially relies on * memory location of an object ( what identity it was assigned ) , whereas * this method will produce the same hashCode for any object graph , regardless * of how many times it is created . < br > < br > * This method will handle cycles correctly ( A - & gt ; B - & gt ; C - & gt ; A ) . In this case , * Starting with object A , B , or C would yield the same hashCode . If an * object encountered ( root , suboject , etc . ) has a hashCode ( ) method on it * ( that is not Object . hashCode ( ) ) , that hashCode ( ) method will be called * and it will stop traversal on that branch . * @ param obj Object who hashCode is desired . * @ return the ' deep ' hashCode value for the passed in object . */ public static int deepHashCode ( Object obj ) { } }
Set < Object > visited = new HashSet < > ( ) ; LinkedList < Object > stack = new LinkedList < > ( ) ; stack . addFirst ( obj ) ; int hash = 0 ; while ( ! stack . isEmpty ( ) ) { obj = stack . removeFirst ( ) ; if ( obj == null || visited . contains ( obj ) ) { continue ; } visited . add ( obj ) ; if ( obj . getClass ( ) . isArray ( ) ) { int len = Array . getLength ( obj ) ; for ( int i = 0 ; i < len ; i ++ ) { stack . addFirst ( Array . get ( obj , i ) ) ; } continue ; } if ( obj instanceof Collection ) { stack . addAll ( 0 , ( Collection ) obj ) ; continue ; } if ( obj instanceof Map ) { stack . addAll ( 0 , ( ( Map ) obj ) . keySet ( ) ) ; stack . addAll ( 0 , ( ( Map ) obj ) . values ( ) ) ; continue ; } if ( obj instanceof Double || obj instanceof Float ) { // just take the integral value for hashcode // equality tests things more comprehensively stack . add ( Math . round ( ( ( Number ) obj ) . doubleValue ( ) ) ) ; continue ; } if ( hasCustomHashCode ( obj . getClass ( ) ) ) { // A real hashCode ( ) method exists , call it . hash += obj . hashCode ( ) ; continue ; } Collection < Field > fields = ReflectionUtils . getDeepDeclaredFields ( obj . getClass ( ) ) ; for ( Field field : fields ) { try { stack . addFirst ( field . get ( obj ) ) ; } catch ( Exception ignored ) { } } } return hash ;
public class LanguageServiceClient { /** * Finds named entities ( currently proper names and common nouns ) in the text along with entity * types , salience , mentions for each entity , and other properties . * < p > Sample code : * < pre > < code > * try ( LanguageServiceClient languageServiceClient = LanguageServiceClient . create ( ) ) { * Document document = Document . newBuilder ( ) . build ( ) ; * AnalyzeEntitiesResponse response = languageServiceClient . analyzeEntities ( document ) ; * < / code > < / pre > * @ param document Input document . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final AnalyzeEntitiesResponse analyzeEntities ( Document document ) { } }
AnalyzeEntitiesRequest request = AnalyzeEntitiesRequest . newBuilder ( ) . setDocument ( document ) . build ( ) ; return analyzeEntities ( request ) ;
public class DateUtil { /** * 将长整型数字转换为日期格式的字符串 */ public static String convert2String ( long time , String format ) { } }
if ( time > 0l ) { if ( StringUtil . isEmpty ( format ) ) format = dateTimeFormat ; SimpleDateFormat sf = new SimpleDateFormat ( format ) ; Date date = new Date ( time ) ; return sf . format ( date ) ; } return "" ;
public class ChronoLocalDateImpl { /** * Returns a copy of this date with the specified number of months subtracted . * This subtracts the specified period in months to the date . * In some cases , subtracting months can cause the resulting date to become invalid . * If this occurs , then other fields , typically the day - of - month , will be adjusted to ensure * that the result is valid . Typically this will select the last valid day of the month . * The default implementation uses { @ link # plusMonths ( long ) } . * This instance is immutable and unaffected by this method call . * @ param monthsToSubtract the months to subtract , may be negative * @ return a date based on this one with the months subtracted , not null * @ throws DateTimeException if the result exceeds the supported date range */ @ SuppressWarnings ( "unchecked" ) D minusMonths ( long monthsToSubtract ) { } }
return ( monthsToSubtract == Long . MIN_VALUE ? ( ( ChronoLocalDateImpl < D > ) plusMonths ( Long . MAX_VALUE ) ) . plusMonths ( 1 ) : plusMonths ( - monthsToSubtract ) ) ;
public class IRFactory { /** * Try / Catch / Finally * The IRFactory tries to express as much as possible in the tree ; * the responsibilities remaining for Codegen are to add the Java * handlers : ( Either ( but not both ) of TARGET and FINALLY might not * be defined ) * - a catch handler for javascript exceptions that unwraps the * exception onto the stack and GOTOes to the catch target * - a finally handler * . . . and a goto to GOTO around these handlers . */ private Node createTryCatchFinally ( Node tryBlock , Node catchBlocks , Node finallyBlock , int lineno ) { } }
boolean hasFinally = ( finallyBlock != null ) && ( finallyBlock . getType ( ) != Token . BLOCK || finallyBlock . hasChildren ( ) ) ; // short circuit if ( tryBlock . getType ( ) == Token . BLOCK && ! tryBlock . hasChildren ( ) && ! hasFinally ) { return tryBlock ; } boolean hasCatch = catchBlocks . hasChildren ( ) ; // short circuit if ( ! hasFinally && ! hasCatch ) { // bc finally might be an empty block . . . return tryBlock ; } Node handlerBlock = new Node ( Token . LOCAL_BLOCK ) ; Jump pn = new Jump ( Token . TRY , tryBlock , lineno ) ; pn . putProp ( Node . LOCAL_BLOCK_PROP , handlerBlock ) ; if ( hasCatch ) { // jump around catch code Node endCatch = Node . newTarget ( ) ; pn . addChildToBack ( makeJump ( Token . GOTO , endCatch ) ) ; // make a TARGET for the catch that the tcf node knows about Node catchTarget = Node . newTarget ( ) ; pn . target = catchTarget ; // mark it pn . addChildToBack ( catchTarget ) ; // Given // try { // tryBlock ; // } catch ( e if condition1 ) { // something1; // } catch ( e if conditionN ) { // somethingN ; // } catch ( e ) { // somethingDefault ; // rewrite as // try { // tryBlock ; // goto after _ catch : // } catch ( x ) { // with ( newCatchScope ( e , x ) ) { // if ( condition1 ) { // something1; // goto after _ catch ; // with ( newCatchScope ( e , x ) ) { // if ( conditionN ) { // somethingN ; // goto after _ catch ; // with ( newCatchScope ( e , x ) ) { // somethingDefault ; // goto after _ catch ; // after _ catch : // If there is no default catch , then the last with block // arround " somethingDefault ; " is replaced by " rethrow ; " // It is assumed that catch handler generation will store // exeception object in handlerBlock register // Block with local for exception scope objects Node catchScopeBlock = new Node ( Token . LOCAL_BLOCK ) ; // expects catchblocks children to be ( cond block ) pairs . Node cb = catchBlocks . getFirstChild ( ) ; boolean hasDefault = false ; int scopeIndex = 0 ; while ( cb != null ) { int catchLineNo = cb . getLineno ( ) ; Node name = cb . getFirstChild ( ) ; Node cond = name . getNext ( ) ; Node catchStatement = cond . getNext ( ) ; cb . removeChild ( name ) ; cb . removeChild ( cond ) ; cb . removeChild ( catchStatement ) ; // Add goto to the catch statement to jump out of catch // but prefix it with LEAVEWITH since try . . catch produces // " with " code in order to limit the scope of the exception // object . catchStatement . addChildToBack ( new Node ( Token . LEAVEWITH ) ) ; catchStatement . addChildToBack ( makeJump ( Token . GOTO , endCatch ) ) ; // Create condition " if " when present Node condStmt ; if ( cond . getType ( ) == Token . EMPTY ) { condStmt = catchStatement ; hasDefault = true ; } else { condStmt = createIf ( cond , catchStatement , null , catchLineNo ) ; } // Generate code to create the scope object and store // it in catchScopeBlock register Node catchScope = new Node ( Token . CATCH_SCOPE , name , createUseLocal ( handlerBlock ) ) ; catchScope . putProp ( Node . LOCAL_BLOCK_PROP , catchScopeBlock ) ; catchScope . putIntProp ( Node . CATCH_SCOPE_PROP , scopeIndex ) ; catchScopeBlock . addChildToBack ( catchScope ) ; // Add with statement based on catch scope object catchScopeBlock . addChildToBack ( createWith ( createUseLocal ( catchScopeBlock ) , condStmt , catchLineNo ) ) ; // move to next cb cb = cb . getNext ( ) ; ++ scopeIndex ; } pn . addChildToBack ( catchScopeBlock ) ; if ( ! hasDefault ) { // Generate code to rethrow if no catch clause was executed Node rethrow = new Node ( Token . RETHROW ) ; rethrow . putProp ( Node . LOCAL_BLOCK_PROP , handlerBlock ) ; pn . addChildToBack ( rethrow ) ; } pn . addChildToBack ( endCatch ) ; } if ( hasFinally ) { Node finallyTarget = Node . newTarget ( ) ; pn . setFinally ( finallyTarget ) ; // add jsr finally to the try block pn . addChildToBack ( makeJump ( Token . JSR , finallyTarget ) ) ; // jump around finally code Node finallyEnd = Node . newTarget ( ) ; pn . addChildToBack ( makeJump ( Token . GOTO , finallyEnd ) ) ; pn . addChildToBack ( finallyTarget ) ; Node fBlock = new Node ( Token . FINALLY , finallyBlock ) ; fBlock . putProp ( Node . LOCAL_BLOCK_PROP , handlerBlock ) ; pn . addChildToBack ( fBlock ) ; pn . addChildToBack ( finallyEnd ) ; } handlerBlock . addChildToBack ( pn ) ; return handlerBlock ;
public class FastAdapter { /** * wraps notifyDataSetChanged */ public void notifyAdapterDataSetChanged ( ) { } }
// handle our extensions for ( IAdapterExtension < Item > ext : mExtensions . values ( ) ) { ext . notifyAdapterDataSetChanged ( ) ; } cacheSizes ( ) ; notifyDataSetChanged ( ) ;
public class CommerceCurrencyUtil { /** * Removes the commerce currency where groupId = & # 63 ; and code = & # 63 ; from the database . * @ param groupId the group ID * @ param code the code * @ return the commerce currency that was removed */ public static CommerceCurrency removeByG_C ( long groupId , String code ) throws com . liferay . commerce . currency . exception . NoSuchCurrencyException { } }
return getPersistence ( ) . removeByG_C ( groupId , code ) ;
public class BaseXmlExporter { /** * { @ inheritDoc } */ public void visit ( NodeData node ) throws RepositoryException { } }
try { entering ( node , currentLevel ) ; if ( ( maxLevel == - 1 ) || ( currentLevel < maxLevel ) ) { currentLevel ++ ; List < PropertyData > properies = new ArrayList < PropertyData > ( dataManager . getChildPropertiesData ( node ) ) ; // Sorting properties Collections . sort ( properies , new PropertyDataOrderComparator ( ) ) ; for ( PropertyData data : properies ) { InternalQName propName = data . getQPath ( ) . getName ( ) ; // 7.3.3 Respecting Property Semantics // When an element or attribute representing such a property is // encountered , an implementation may either skip it or respect it . if ( Constants . JCR_LOCKISDEEP . equals ( propName ) || Constants . JCR_LOCKOWNER . equals ( propName ) ) { continue ; } data . accept ( this ) ; } if ( ! isNoRecurse ( ) && ( currentLevel > 0 ) ) { List < NodeData > nodes = new ArrayList < NodeData > ( dataManager . getChildNodesData ( node ) ) ; // Sorting nodes Collections . sort ( nodes , new NodeDataOrderComparator ( ) ) ; for ( NodeData data : nodes ) { data . accept ( this ) ; } } currentLevel -- ; } leaving ( node , currentLevel ) ; } catch ( RepositoryException re ) { currentLevel = 0 ; throw re ; }
public class SftpClient { /** * Called by putFileMatches ( ) to do regular expression pattern matching on * the files in ' local ' ' s parent directory . * @ param local * @ return String [ ] * @ throws SftpStatusException * @ throws SshException */ private String [ ] matchLocalFiles ( String local ) throws SftpStatusException , SshException { } }
// Resolve the search path as it may not be CWD String actualDir ; String actualSearch ; int fileSeparatorIndex ; if ( ( fileSeparatorIndex = local . lastIndexOf ( System . getProperty ( "file.separator" ) ) ) > - 1 || ( fileSeparatorIndex = local . lastIndexOf ( '/' ) ) > - 1 ) { actualDir = resolveLocalPath ( local . substring ( 0 , fileSeparatorIndex ) ) . getAbsolutePath ( ) ; actualSearch = ( fileSeparatorIndex < local . length ( ) - 1 ) ? local . substring ( fileSeparatorIndex + 1 ) : "" ; } else { actualDir = lcwd ; actualSearch = local ; } File f ; RegularExpressionMatching matcher ; File [ ] files ; switch ( RegExpSyntax ) { case GlobSyntax : f = new File ( actualDir ) ; matcher = new GlobRegExpMatching ( ) ; files = listFiles ( f ) ; break ; case Perl5Syntax : f = new File ( actualDir ) ; matcher = new Perl5RegExpMatching ( ) ; files = listFiles ( f ) ; break ; default : matcher = new NoRegExpMatching ( ) ; files = new File [ 1 ] ; files [ 0 ] = new File ( local ) ; } return matcher . matchFileNamesWithPattern ( files , actualSearch ) ;
public class CorporationApi { /** * Get corporation medals Returns a corporation & # 39 ; s medals - - - This route * is cached for up to 3600 seconds SSO Scope : * esi - corporations . read _ medals . v1 * @ param corporationId * An EVE corporation ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param page * Which page of results to return ( optional , default to 1) * @ param token * Access token to use if unable to set a header ( optional ) * @ return List & lt ; CorporationMedalsResponse & gt ; * @ throws ApiException * If fail to call the API , e . g . server error or cannot * deserialize the response body */ public List < CorporationMedalsResponse > getCorporationsCorporationIdMedals ( Integer corporationId , String datasource , String ifNoneMatch , Integer page , String token ) throws ApiException { } }
ApiResponse < List < CorporationMedalsResponse > > resp = getCorporationsCorporationIdMedalsWithHttpInfo ( corporationId , datasource , ifNoneMatch , page , token ) ; return resp . getData ( ) ;
public class GroupMember { /** * Change the data stored in this instance ' s node * @ param data new data ( cannot be null ) */ public void setThisData ( byte [ ] data ) { } }
try { pen . setData ( data ) ; } catch ( Exception e ) { ThreadUtils . checkInterrupted ( e ) ; Throwables . propagate ( e ) ; }
public class Trace { /** * false or null first ( client first ) */ static int compareShared ( Span left , Span right ) { } }
// If either are shared put it last boolean leftShared = Boolean . TRUE . equals ( left . shared ( ) ) ; boolean rightShared = Boolean . TRUE . equals ( right . shared ( ) ) ; if ( leftShared && rightShared ) return 0 ; // both are shared , so skip out if ( leftShared ) return 1 ; if ( rightShared ) return - 1 ; // neither are shared , put the client spans first boolean leftClient = Span . Kind . CLIENT . equals ( left . kind ( ) ) ; boolean rightClient = Span . Kind . CLIENT . equals ( right . kind ( ) ) ; if ( leftClient && rightClient ) return 0 ; if ( leftClient ) return - 1 ; if ( rightClient ) return 1 ; return 0 ; // neither are client spans
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcHeatFluxDensityMeasure ( ) { } }
if ( ifcHeatFluxDensityMeasureEClass == null ) { ifcHeatFluxDensityMeasureEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 812 ) ; } return ifcHeatFluxDensityMeasureEClass ;
public class JBossModuleUtils { /** * Populate a module spec builder with a dependencies on other modules . * @ param moduleSpecBuilder builder to populate * @ param moduleImportFilterPaths paths valid for importing into the module being built . * Can be null or empty to indicate that no filters should be applied . * @ param dependencyExportFilterPaths export paths for the dependency being linked * @ param dependentModuleIdentifier used to lookup the latest dependencies . see { @ link JBossModuleLoader # getLatestRevisionIds ( ) } */ public static void populateModuleSpecWithModuleDependency ( ModuleSpec . Builder moduleSpecBuilder , @ Nullable Set < String > moduleImportFilterPaths , @ Nullable Set < String > dependencyExportFilterPaths , ModuleIdentifier dependentModuleIdentifier ) { } }
Objects . requireNonNull ( moduleSpecBuilder , "moduleSpecBuilder" ) ; PathFilter moduleImportFilters = buildFilters ( moduleImportFilterPaths , false ) ; PathFilter dependencyExportFilters = buildFilters ( dependencyExportFilterPaths , false ) ; PathFilter importFilters = PathFilters . all ( dependencyExportFilters , moduleImportFilters ) ; moduleSpecBuilder . addDependency ( DependencySpec . createModuleDependencySpec ( importFilters , dependencyExportFilters , null , dependentModuleIdentifier , false ) ) ;
public class CouchbaseCore { /** * Helper method to dispatch internal requests accordingly , without going to the { @ link Disruptor } . * This makes sure that certain prioritized requests ( adding / removing services / nodes ) gets done , even when the * { @ link RingBuffer } is swamped with requests during failure scenarios or high load . * @ param request the request to dispatch . */ private void handleInternalRequest ( final CouchbaseRequest request ) { } }
if ( request instanceof GetConfigProviderRequest ) { request . observable ( ) . onNext ( new GetConfigProviderResponse ( configProvider ) ) ; request . observable ( ) . onCompleted ( ) ; } else if ( request instanceof AddNodeRequest ) { // we do not have access to the alternate here at this point , so just pass in null requestHandler . addNode ( ( ( AddNodeRequest ) request ) . hostname ( ) , null ) . map ( new Func1 < LifecycleState , AddNodeResponse > ( ) { @ Override public AddNodeResponse call ( LifecycleState state ) { return new AddNodeResponse ( ResponseStatus . SUCCESS , ( ( AddNodeRequest ) request ) . hostname ( ) ) ; } } ) . subscribe ( request . observable ( ) ) ; } else if ( request instanceof RemoveNodeRequest ) { requestHandler . removeNode ( ( ( RemoveNodeRequest ) request ) . hostname ( ) ) . map ( new Func1 < LifecycleState , RemoveNodeResponse > ( ) { @ Override public RemoveNodeResponse call ( LifecycleState state ) { return new RemoveNodeResponse ( ResponseStatus . SUCCESS ) ; } } ) . subscribe ( request . observable ( ) ) ; } else if ( request instanceof AddServiceRequest ) { requestHandler . addService ( ( AddServiceRequest ) request ) . map ( new Func1 < Service , AddServiceResponse > ( ) { @ Override public AddServiceResponse call ( Service service ) { return new AddServiceResponse ( ResponseStatus . SUCCESS , ( ( AddServiceRequest ) request ) . hostname ( ) ) ; } } ) . subscribe ( request . observable ( ) ) ; } else if ( request instanceof RemoveServiceRequest ) { requestHandler . removeService ( ( RemoveServiceRequest ) request ) . map ( new Func1 < Service , RemoveServiceResponse > ( ) { @ Override public RemoveServiceResponse call ( Service service ) { return new RemoveServiceResponse ( ResponseStatus . SUCCESS ) ; } } ) . subscribe ( request . observable ( ) ) ; } else if ( request instanceof DiagnosticsRequest ) { requestHandler . diagnostics ( ( ( DiagnosticsRequest ) request ) . id ( ) ) . subscribe ( request . observable ( ) ) ; } else { request . observable ( ) . onError ( new IllegalArgumentException ( "Unknown request " + request ) ) ; }
public class JaxRsPatientRestProvider { /** * START SNIPPET : jax - rs - provider - operation */ @ GET @ Path ( "/{id}/$someCustomOperation" ) public Response someCustomOperationUsingGet ( @ PathParam ( "id" ) String id , String resource ) throws Exception { } }
return customOperation ( resource , RequestTypeEnum . GET , id , "$someCustomOperation" , RestOperationTypeEnum . EXTENDED_OPERATION_INSTANCE ) ;
public class PseudoWordContextExtractor { /** * { @ inheritDoc } */ public void processDocument ( BufferedReader document , Wordsi wordsi ) { } }
Queue < String > prevWords = new ArrayDeque < String > ( ) ; Queue < String > nextWords = new ArrayDeque < String > ( ) ; Queue < String > nextRealWord = new ArrayDeque < String > ( ) ; Iterator < String > it = IteratorFactory . tokenizeOrdered ( document ) ; // Fill up the words after the context so that when the real processing // starts , the context is fully prepared . for ( int i = 0 ; i < windowSize && it . hasNext ( ) ; ++ i ) addNextToken ( it . next ( ) , nextWords , nextRealWord ) ; // Iterate through each of the words in the context , generating context // vectors for each acceptable word . String focusWord = null ; String replacementWord = null ; while ( ! nextWords . isEmpty ( ) ) { focusWord = nextWords . remove ( ) ; replacementWord = nextRealWord . remove ( ) ; // Advance the sliding window to the right . if ( it . hasNext ( ) ) addNextToken ( it . next ( ) , nextWords , nextRealWord ) ; // Represent the word if wordsi is willing to process it . if ( ! replacementWord . equals ( EMPTY ) ) { SparseDoubleVector contextVector = generator . generateContext ( prevWords , nextWords ) ; wordsi . handleContextVector ( focusWord , replacementWord , contextVector ) ; } // Advance the sliding window to the right . prevWords . offer ( focusWord ) ; if ( prevWords . size ( ) > windowSize ) prevWords . remove ( ) ; }
public class PDBFileParser { /** * Handler for ATOM . * Record Format : * < pre > * ATOM 1 N ASP A 15 110.964 24.941 59.191 1.00 83.44 N * COLUMNS DATA TYPE FIELD DEFINITION * 1 - 6 Record name " ATOM " * 7 - 11 Integer serial Atom serial number . * 13 - 16 Atom name Atom name . * 17 Character altLoc Alternate location indicator . * 18 - 20 Residue name resName Residue name . * 22 Character chainID Chain identifier . * 23 - 26 Integer resSeq Residue sequence number . * 27 AChar iCode Code for insertion of residues . * 31 - 38 Real ( 8.3 ) x Orthogonal coordinates for X in Angstroms . * 39 - 46 Real ( 8.3 ) y Orthogonal coordinates for Y in Angstroms . * 47 - 54 Real ( 8.3 ) z Orthogonal coordinates for Z in Angstroms . * 55 - 60 Real ( 6.2 ) occupancy Occupancy . * 61 - 66 Real ( 6.2 ) tempFactor Temperature factor . * 73 - 76 LString ( 4 ) segID Segment identifier , left - justified . * 77 - 78 LString ( 2 ) element Element symbol , right - justified . * 79 - 80 LString ( 2 ) charge Charge on the atom . * < / pre > */ private void pdb_ATOM_Handler ( String line ) { } }
if ( params . isHeaderOnly ( ) ) return ; // let ' s first get the chain name which will serve to identify if we are starting a new molecule String chainName = line . substring ( 21 , 22 ) ; if ( chainName . equals ( " " ) ) { blankChainIdsPresent = true ; } if ( currentChain != null && ! currentChain . getName ( ) . equals ( chainName ) ) { // new chain name : another molecule coming startOfMolecule = true ; } if ( startOfMolecule ) { // we add last chain if there was one if ( currentChain != null ) { currentModel . add ( currentChain ) ; // let ' s not forget adding the last group to the finishing chain if ( currentGroup != null ) { currentChain . addGroup ( currentGroup ) ; } } // we initialise the new molecule to come currentChain = new ChainImpl ( ) ; // note that the chainId ( asym id ) is set properly later in assignAsymIds currentChain . setId ( chainName ) ; currentChain . setName ( chainName ) ; } if ( startOfModel ) { // we add last model if there was one if ( currentModel != null ) { allModels . add ( currentModel ) ; } // we initialise the model to come currentModel = new ArrayList < > ( ) ; } // let ' s get the residue number and see if we need to start a new group String groupCode3 = line . substring ( 17 , 20 ) . trim ( ) ; String resNum = line . substring ( 22 , 26 ) . trim ( ) ; Character iCode = line . substring ( 26 , 27 ) . charAt ( 0 ) ; if ( iCode == ' ' ) iCode = null ; ResidueNumber residueNumber = new ResidueNumber ( chainName , Integer . valueOf ( resNum ) , iCode ) ; // recordName groupCode3 // | | resNum // | | | iCode // ATOM 1 N ASP A 15 110.964 24.941 59.191 1.00 83.44 N // ATOM 1964 N ARG H 221A 5.963 - 16.715 27.669 1.00 28.59 N Character aminoCode1 = StructureTools . get1LetterCode ( groupCode3 ) ; String recordName = line . substring ( 0 , 6 ) . trim ( ) ; boolean isHetAtomInFile = false ; if ( recordName . equals ( "HETATM" ) ) { // HETATOM RECORDS are treated slightly differently // some modified amino acids that we want to treat as amino acids // can be found as HETATOM records if ( aminoCode1 != null && aminoCode1 . equals ( StructureTools . UNKNOWN_GROUP_LABEL ) ) aminoCode1 = null ; isHetAtomInFile = true ; } if ( startOfMolecule ) { currentGroup = getNewGroup ( recordName , aminoCode1 , groupCode3 ) ; currentGroup . setPDBName ( groupCode3 ) ; currentGroup . setResidueNumber ( residueNumber ) ; currentGroup . setHetAtomInFile ( isHetAtomInFile ) ; } // resetting states startOfModel = false ; startOfMolecule = false ; Character altLoc = new Character ( line . substring ( 16 , 17 ) . charAt ( 0 ) ) ; Group altGroup = null ; // check if residue number is the same . . . if ( ! residueNumber . equals ( currentGroup . getResidueNumber ( ) ) ) { currentChain . addGroup ( currentGroup ) ; currentGroup . trimToSize ( ) ; currentGroup = getNewGroup ( recordName , aminoCode1 , groupCode3 ) ; currentGroup . setPDBName ( groupCode3 ) ; currentGroup . setResidueNumber ( residueNumber ) ; currentGroup . setHetAtomInFile ( isHetAtomInFile ) ; } else { // same residueNumber , but altLocs . . . // test altLoc if ( ! altLoc . equals ( ' ' ) ) { logger . debug ( "found altLoc! " + currentGroup + " " + altGroup ) ; altGroup = getCorrectAltLocGroup ( altLoc , recordName , aminoCode1 , groupCode3 ) ; if ( altGroup . getChain ( ) == null ) { // need to set current chain altGroup . setChain ( currentChain ) ; } } } atomCount ++ ; if ( atomCount == atomCAThreshold ) { // throw away the SEQRES lines - too much to deal with . . . logger . warn ( "more than " + atomCAThreshold + " atoms in this structure, ignoring the SEQRES lines" ) ; seqResChains . clear ( ) ; switchCAOnly ( ) ; } if ( atomCount == loadMaxAtoms ) { logger . warn ( "File has more atoms than max specified in parsing parameters ({}). Ignoring atoms after line: {}" , loadMaxAtoms , line ) ; return ; } if ( atomCount > loadMaxAtoms ) { return ; } // 1 2 3 4 5 6 // 012345678901234567890123456789012345678901234567890123456789 // ATOM 1 N MET 1 20.154 29.699 5.276 1.0 // ATOM 112 CA ASP 112 41.017 33.527 28.371 1.00 0.00 // ATOM 53 CA MET 7 23.772 33.989 - 21.600 1.00 0.00 C // ATOM 112 CA ASP 112 37.613 26.621 33.571 0 0 String fullname = line . substring ( 12 , 16 ) ; // check for CA only if requested if ( parseCAonly ) { // yes , user wants to get CA only // only parse CA atoms . . . if ( ! fullname . equals ( " CA " ) ) { // System . out . println ( " ignoring " + line ) ; atomCount -- ; return ; } } if ( params . getAcceptedAtomNames ( ) != null ) { boolean found = false ; for ( String ok : params . getAcceptedAtomNames ( ) ) { // System . out . println ( ok + " < > " + fullname + " < " ) ; if ( ok . equals ( fullname . trim ( ) ) ) { found = true ; break ; } } if ( ! found ) { atomCount -- ; return ; } } // create new atom int pdbnumber = Integer . parseInt ( line . substring ( 6 , 11 ) . trim ( ) ) ; AtomImpl atom = new AtomImpl ( ) ; atom . setPDBserial ( pdbnumber ) ; atom . setAltLoc ( altLoc ) ; atom . setName ( fullname . trim ( ) ) ; double x = Double . parseDouble ( line . substring ( 30 , 38 ) . trim ( ) ) ; double y = Double . parseDouble ( line . substring ( 38 , 46 ) . trim ( ) ) ; double z = Double . parseDouble ( line . substring ( 46 , 54 ) . trim ( ) ) ; double [ ] coords = new double [ 3 ] ; coords [ 0 ] = x ; coords [ 1 ] = y ; coords [ 2 ] = z ; atom . setCoords ( coords ) ; float occu = 1.0f ; if ( line . length ( ) > 59 ) { try { // occu and tempf are sometimes not used : - / occu = Float . parseFloat ( line . substring ( 54 , 60 ) . trim ( ) ) ; } catch ( NumberFormatException e ) { } } float tempf = 0.0f ; if ( line . length ( ) > 65 ) { try { tempf = Float . parseFloat ( line . substring ( 60 , 66 ) . trim ( ) ) ; } catch ( NumberFormatException e ) { } } atom . setOccupancy ( occu ) ; atom . setTempFactor ( tempf ) ; // Parse element from the element field . If this field is // missing ( i . e . misformatted PDB file ) , then parse the // element from the chemical component . Element element = Element . R ; boolean guessElement = true ; if ( line . length ( ) > 77 ) { // parse element from element field String elementSymbol = line . substring ( 76 , 78 ) . trim ( ) ; if ( elementSymbol . isEmpty ( ) ) { logger . info ( "Element column was empty for atom {} {}. Assigning atom element " + "from Chemical Component Dictionary information" , fullname . trim ( ) , pdbnumber ) ; } else { try { element = Element . valueOfIgnoreCase ( elementSymbol ) ; guessElement = false ; } catch ( IllegalArgumentException e ) { logger . info ( "Element {} of atom {} {} was not recognised. Assigning atom element " + "from Chemical Component Dictionary information" , elementSymbol , fullname . trim ( ) , pdbnumber ) ; } } } else { logger . info ( "Missformatted PDB file: element column of atom {} {} is not present. " + "Assigning atom element from Chemical Component Dictionary information" , fullname . trim ( ) , pdbnumber ) ; } if ( guessElement ) { String elementSymbol = null ; if ( currentGroup . getChemComp ( ) != null ) { for ( ChemCompAtom a : currentGroup . getChemComp ( ) . getAtoms ( ) ) { if ( a . getAtom_id ( ) . equals ( fullname . trim ( ) ) ) { elementSymbol = a . getType_symbol ( ) ; break ; } } if ( elementSymbol == null ) { logger . info ( "Atom name {} was not found in the Chemical Component Dictionary information of {}. " + "Assigning generic element R to it" , fullname . trim ( ) , currentGroup . getPDBName ( ) ) ; } else { try { element = Element . valueOfIgnoreCase ( elementSymbol ) ; } catch ( IllegalArgumentException e ) { // this can still happen for cases like UNK logger . info ( "Element symbol {} found in chemical component dictionary for Atom {} {} could not be recognised as a known element. " + "Assigning generic element R to it" , elementSymbol , fullname . trim ( ) , pdbnumber ) ; } } } else { logger . warn ( "Chemical Component Dictionary information was not found for Atom name {}. " + "Assigning generic element R to it" , fullname . trim ( ) ) ; } } atom . setElement ( element ) ; // see if chain _ id is one of the previous chains . . . if ( altGroup != null ) { altGroup . addAtom ( atom ) ; altGroup = null ; } else { currentGroup . addAtom ( atom ) ; } // make sure that main group has all atoms // GitHub issue : # 76 if ( ! currentGroup . hasAtom ( atom . getName ( ) ) ) { currentGroup . addAtom ( atom ) ; }
public class Distance { /** * Gets the Squared Euclidean distance between two points . * @ param p IntPoint with X and Y axis coordinates . * @ param q IntPoint with X and Y axis coordinates . * @ return The Squared euclidean distance between x and y . */ public static double SquaredEuclidean ( IntPoint p , IntPoint q ) { } }
double dx = q . x - p . x ; double dy = q . y - p . y ; return dx * dx + dy * dy ;
public class HBaseRequestAdapter { /** * < p > adapt . < / p > * @ param increment a { @ link Increment } object . * @ return a { @ link ReadModifyWriteRow } object . */ public ReadModifyWriteRow adapt ( Increment increment ) { } }
ReadModifyWriteRow readModifyWriteRow = ReadModifyWriteRow . create ( bigtableTableName . getTableId ( ) , ByteString . copyFrom ( increment . getRow ( ) ) ) ; Adapters . INCREMENT_ADAPTER . adapt ( increment , readModifyWriteRow ) ; return readModifyWriteRow ;
public class DOMBuilder { /** * Tell if the current node is outside the document element . * @ return true if the current node is outside the document element . */ private boolean isOutsideDocElem ( ) { } }
return ( null == m_docFrag ) && m_elemStack . size ( ) == 0 && ( null == m_currentNode || m_currentNode . getNodeType ( ) == Node . DOCUMENT_NODE ) ;
public class TitlePaneCloseButtonPainter { /** * Create the shape for the mark interior . * @ param width the width . * @ param height the height . * @ return the shape of the mark interior . */ private Shape decodeMarkInterior ( int width , int height ) { } }
int left = ( width - 3 ) / 2 - 5 ; int top = ( height - 2 ) / 2 - 5 ; path . reset ( ) ; path . moveTo ( left + 1 , top + 1 ) ; path . lineTo ( left + 4 , top + 1 ) ; path . lineTo ( left + 5 , top + 3 ) ; path . lineTo ( left + 7 , top + 1 ) ; path . lineTo ( left + 10 , top + 1 ) ; path . lineTo ( left + 7 , top + 4 ) ; path . lineTo ( left + 7 , top + 5 ) ; path . lineTo ( left + 10 , top + 9 ) ; path . lineTo ( left + 6 , top + 8 ) ; path . lineTo ( left + 5 , top + 6 ) ; path . lineTo ( left + 4 , top + 9 ) ; path . lineTo ( left + 0 , top + 9 ) ; path . lineTo ( left + 4 , top + 5 ) ; path . lineTo ( left + 4 , top + 4 ) ; path . closePath ( ) ; return path ;
public class MariaDbResultSetMetaData { /** * Gets the designated column ' s table name . * @ param column the first column is 1 , the second is 2 , . . . * @ return table name or " " if not applicable * @ throws SQLException if a database access error occurs */ public String getTableName ( final int column ) throws SQLException { } }
if ( returnTableAlias ) { return getColumnInformation ( column ) . getTable ( ) ; } else { return getColumnInformation ( column ) . getOriginalTable ( ) ; }
public class OptionsBuilder { /** * Sets the execution directory * @ param executionDirectory * execution directory * @ return updated OptionBuilder instance * @ see Options # EXECUTION _ DIRECTORY */ public OptionsBuilder executionDirectory ( File executionDirectory ) { } }
options . put ( Options . EXECUTION_DIRECTORY , executionDirectory != null ? executionDirectory : defaultExecutionDirectory ( ) ) ; return this ;
public class TimestampInterval { /** * / * [ deutsch ] * < p > Erzeugt ein unbegrenztes offenes Intervall bis zum angegebenen * Endzeitpunkt . < / p > * @ param end timestamp of upper boundary ( exclusive ) * @ return new timestamp interval * @ since 2.0 */ public static TimestampInterval until ( PlainTimestamp end ) { } }
Boundary < PlainTimestamp > past = Boundary . infinitePast ( ) ; return new TimestampInterval ( past , Boundary . of ( OPEN , end ) ) ;
public class DeviceDelegatingViewResolverFactory { /** * Create a { @ link LiteDeviceDelegatingViewResolver } delegating to the specified * { @ link ViewResolver } and computing a sensible order for it . The specified * { @ link ViewResolver } should implement { @ link Ordered } , consider using * { @ link # createViewResolver ( ViewResolver , int ) } if that ' s not the case . * @ param delegate the view resolver to delegate to * @ return a { @ link LiteDeviceDelegatingViewResolver } handling the specified resolver */ public LiteDeviceDelegatingViewResolver createViewResolver ( ViewResolver delegate ) { } }
if ( ! ( delegate instanceof Ordered ) ) { throw new IllegalStateException ( "ViewResolver " + delegate + " should implement " + Ordered . class . getName ( ) ) ; } int delegateOrder = ( ( Ordered ) delegate ) . getOrder ( ) ; return createViewResolver ( delegate , adjustOrder ( delegateOrder ) ) ;
public class CQJDBCStorageConnection { /** * { @ inheritDoc } */ @ Override protected QPath traverseQPath ( String cpid ) throws SQLException , InvalidItemStateException , IllegalNameException { } }
String id = getIdentifier ( cpid ) ; if ( id . equals ( Constants . ROOT_UUID ) ) { return Constants . ROOT_PATH ; } // get item by Identifier usecase List < QPathEntry > qrpath = new ArrayList < QPathEntry > ( ) ; // reverted path String caid = cpid ; // container ancestor id boolean isRoot = false ; do { ResultSet result = null ; try { result = findItemQPathByIdentifierCQ ( caid ) ; if ( ! result . next ( ) ) { throw new InvalidItemStateException ( "Parent not found, uuid: " + getIdentifier ( caid ) ) ; } String cid = result . getString ( COLUMN_ID ) ; QPathEntry qpe1 = new QPathEntry ( InternalQName . parse ( result . getString ( COLUMN_NAME ) ) , result . getInt ( COLUMN_INDEX ) , getIdentifier ( cid ) ) ; boolean isChild = caid . equals ( cid ) ; caid = result . getString ( COLUMN_PARENTID ) ; if ( cid . equals ( caid ) ) { throw new InvalidItemStateException ( "An item with id='" + getIdentifier ( caid ) + "' is its own parent" ) ; } if ( result . next ( ) ) { QPathEntry qpe2 = new QPathEntry ( InternalQName . parse ( result . getString ( COLUMN_NAME ) ) , result . getInt ( COLUMN_INDEX ) , getIdentifier ( result . getString ( COLUMN_ID ) ) ) ; if ( isChild ) { // The child is the first result then we have the parent qrpath . add ( qpe1 ) ; qrpath . add ( qpe2 ) ; // We need to take the value of the parent node caid = result . getString ( COLUMN_PARENTID ) ; } else { // The parent is the first result then we have the child qrpath . add ( qpe2 ) ; qrpath . add ( qpe1 ) ; } } else { qrpath . add ( qpe1 ) ; } } finally { if ( result != null ) { try { result . close ( ) ; } catch ( SQLException e ) { LOG . error ( "Can't close the ResultSet: " + e . getMessage ( ) ) ; } } } if ( caid . equals ( Constants . ROOT_PARENT_UUID ) || ( id = getIdentifier ( caid ) ) . equals ( Constants . ROOT_UUID ) ) { if ( id . equals ( Constants . ROOT_UUID ) ) { qrpath . add ( Constants . ROOT_PATH . getEntries ( ) [ 0 ] ) ; } isRoot = true ; } } while ( ! isRoot ) ; QPathEntry [ ] qentries = new QPathEntry [ qrpath . size ( ) ] ; int qi = 0 ; for ( int i = qrpath . size ( ) - 1 ; i >= 0 ; i -- ) { qentries [ qi ++ ] = qrpath . get ( i ) ; } return new QPath ( qentries ) ;
public class AvroUtils { /** * Helper method that does the actual work for { @ link # getField ( Schema , String ) } * @ param schema passed from { @ link # getFieldSchema ( Schema , String ) } * @ param pathList passed from { @ link # getFieldSchema ( Schema , String ) } * @ param field keeps track of the index used to access the list pathList * @ return the field */ private static Optional < Field > getFieldHelper ( Schema schema , List < String > pathList , int field ) { } }
Field curField = schema . getField ( pathList . get ( field ) ) ; if ( field + 1 == pathList . size ( ) ) { return Optional . fromNullable ( curField ) ; } Schema fieldSchema = curField . schema ( ) ; switch ( fieldSchema . getType ( ) ) { case UNION : throw new AvroRuntimeException ( "Union of complex types cannot be handled : " + schema ) ; case MAP : return AvroUtils . getFieldHelper ( fieldSchema . getValueType ( ) , pathList , ++ field ) ; case RECORD : return AvroUtils . getFieldHelper ( fieldSchema , pathList , ++ field ) ; default : throw new AvroRuntimeException ( "Invalid type in schema : " + schema ) ; }
public class FSDataset { /** * Copies a file as fast as possible . Tries to do a hardlink instead of a copy * if the hardlink parameter is specified . * @ param src * the source file for copying * @ param dst * the destination file for copying * @ param hardlink * whether or not to attempt a hardlink * @ throws IOException */ public void copyFile ( File src , File dst , boolean hardlink ) throws IOException { } }
if ( src == null || dst == null ) { throw new IOException ( "src/dst file is null" ) ; } try { if ( hardlink && shouldHardLinkBlockCopy ) { // Remove destination before hard linking , since this file might already // exist and a hardlink would fail as a result . if ( dst . exists ( ) ) { if ( ! dst . delete ( ) ) { throw new IOException ( "Deletion of file : " + dst + " failed" ) ; } } NativeIO . link ( src , dst ) ; DataNode . LOG . info ( "Hard Link Created from : " + src + " to " + dst ) ; return ; } } catch ( IOException e ) { DataNode . LOG . warn ( "Hard link failed from : " + src + " to " + dst + " continuing with regular file copy" ) ; } FileChannel input = null ; FileChannel output = null ; try { // This improves copying performance a lot , it uses native buffers // for copying . input = new FileInputStream ( src ) . getChannel ( ) ; output = new FileOutputStream ( dst ) . getChannel ( ) ; if ( input == null || output == null ) { throw new IOException ( "Could not create file channels for src : " + src + " dst : " + dst ) ; } long bytesLeft = input . size ( ) ; long position = 0 ; while ( bytesLeft > 0 ) { long bytesWritten = output . transferFrom ( input , position , bytesLeft ) ; bytesLeft -= bytesWritten ; position += bytesWritten ; } if ( datanode . syncOnClose ) { output . force ( true ) ; } } finally { if ( input != null ) { input . close ( ) ; } if ( output != null ) { output . close ( ) ; } }
public class LineBotAutoConfiguration { /** * Expose { @ link FixedChannelTokenSupplier } as { @ link Bean } * in case of no other definition for { @ link ChannelTokenSupplier } type . */ @ Bean @ ConditionalOnMissingBean ( ChannelTokenSupplier . class ) public ChannelTokenSupplier channelTokenSupplier ( ) { } }
final String channelToken = lineBotProperties . getChannelToken ( ) ; return FixedChannelTokenSupplier . of ( channelToken ) ;
public class MACAddressSection { /** * This produces a canonical string using the canonical standardized IEEE 802 MAC address representation of xx - xx - xx - xx - xx - xx * For range segments , ' . . ' is used : 11-22-33 . . 44-55-66 */ @ Override public String toCanonicalString ( ) { } }
String result ; if ( hasNoStringCache ( ) || ( result = getStringCache ( ) . canonicalString ) == null ) { getStringCache ( ) . canonicalString = result = toNormalizedString ( MACStringCache . canonicalParams ) ; } return result ;
public class ClassScreener { /** * ( non - Javadoc ) * @ see edu . umd . cs . findbugs . IClassScreener # matches ( java . lang . String ) */ @ Override public boolean matches ( String fileName ) { } }
// Special case : if no classes or packages have been defined , // then the screener matches all class files . if ( patternList . isEmpty ( ) ) { return true ; } LOG . debug ( "Matching: {}" , fileName ) ; // Scan through list of regexes for ( Matcher matcher : patternList ) { matcher . reset ( fileName ) ; if ( matcher . find ( ) ) { LOG . debug ( "\\tTrying [{}]: yes!" , matcher . pattern ( ) ) ; return true ; } LOG . debug ( "\\tTrying [{}]: no" , matcher . pattern ( ) ) ; } return false ;
public class XMLParser { /** * Read comment . * @ param returnText the return text * @ return the string * @ throws IOException Signals that an I / O exception has occurred . * @ throws KriptonRuntimeException the kripton runtime exception */ private String readComment ( boolean returnText ) throws IOException , KriptonRuntimeException { } }
read ( START_COMMENT ) ; if ( relaxed ) { return readUntil ( END_COMMENT , returnText ) ; } String commentText = readUntil ( COMMENT_DOUBLE_DASH , returnText ) ; if ( peekCharacter ( ) != '>' ) { throw new KriptonRuntimeException ( "Comments may not contain --" , true , this . getLineNumber ( ) , this . getColumnNumber ( ) , getPositionDescription ( ) , null ) ; } position ++ ; return commentText ;
public class StrutsApp { /** * Get the MessageResourcesModel for which no " key " is set ( the default one used at runtime ) . */ public MessageResourcesModel getDefaultMessageResources ( ) { } }
for ( java . util . Iterator ii = _messageResources . iterator ( ) ; ii . hasNext ( ) ; ) { MessageResourcesModel i = ( MessageResourcesModel ) ii . next ( ) ; if ( i . getKey ( ) == null ) return i ; } return null ;
public class ClientController { /** * Update properties for a specific client id * @ param model * @ param profileIdentifier * @ param clientUUID * @ param active - true false depending on if the client should be active * @ param reset - true to reset the state of a client ( clears settings for all paths and disables the client ) * @ return * @ throws Exception */ @ RequestMapping ( value = "/api/profile/{profileIdentifier}/clients/{clientUUID}" , method = RequestMethod . POST ) public @ ResponseBody HashMap < String , Object > updateClient ( Model model , @ PathVariable ( "profileIdentifier" ) String profileIdentifier , @ PathVariable ( "clientUUID" ) String clientUUID , @ RequestParam ( required = false ) Boolean active , @ RequestParam ( required = false ) String friendlyName , @ RequestParam ( required = false ) Boolean reset ) throws Exception { } }
Integer profileId = ControllerUtils . convertProfileIdentifier ( profileIdentifier ) ; if ( active != null ) { logger . info ( "Active: {}" , active ) ; clientService . updateActive ( profileId , clientUUID , active ) ; } if ( friendlyName != null ) { clientService . setFriendlyName ( profileId , clientUUID , friendlyName ) ; } if ( reset != null && reset ) { clientService . reset ( profileId , clientUUID ) ; } HashMap < String , Object > valueHash = new HashMap < String , Object > ( ) ; valueHash . put ( "client" , clientService . findClient ( clientUUID , profileId ) ) ; return valueHash ;
public class Serializer { /** * Deserialize an instance of the given type reference from the input stream . * @ param input The { @ link InputStream } that contains the data to deserialize . * @ param ref The { @ link TypeReference } of the type to deserialize the result into . * @ param < T > The type to deserialize the result into . * @ return An instance of the given type T deserialized from the input stream . * @ throws IOException on general I / O error . */ public < T extends OmiseObject > T deserialize ( InputStream input , TypeReference < T > ref ) throws IOException { } }
return objectMapper . readerFor ( ref ) . readValue ( input ) ;
public class TelnetAdapter { /** * Rewrites the command line to show changes as a result of hitting the delete key * when the cursor is positioned in the middle of the command line * @ throws IOException */ private void handleDeleteKey ( ) throws IOException { } }
if ( commandLineBuffer . size ( ) > 0 && curPos < commandLineBuffer . size ( ) ) { String curLine = overwriteCommandlineSkippingChar ( curPos ) ; eraseLastCharAndMoveBack ( curLine , curPos + 1 ) ; }
public class MarkLogicRepositoryConnection { /** * add triples via Reader * @ param reader * @ param baseURI * @ param dataFormat * @ param contexts * @ throws IOException * @ throws RDFParseException * @ throws RepositoryException */ @ Override public void add ( Reader reader , String baseURI , RDFFormat dataFormat , Resource ... contexts ) throws IOException , RDFParseException , RepositoryException { } }
getClient ( ) . sendAdd ( reader , baseURI , dataFormat , contexts ) ;
public class ModelsEngine { /** * Marks a map on the hillslope with the values on the channel of an attribute map . * @ param flowIter map of flow direction with the network cells * all marked as { @ link FlowNode # NETVALUE } . This is very important ! * @ param attributeIter map of attributes . * @ param markedIter the map to be marked . * @ param cols region cols . * @ param rows region rows . * @ param pm monitor . */ public static void markHillSlopeWithLinkValue ( RandomIter flowIter , RandomIter attributeIter , WritableRandomIter markedIter , int cols , int rows , IHMProgressMonitor pm ) { } }
pm . beginTask ( "Marking the hillslopes with the channel value..." , rows ) ; for ( int r = 0 ; r < rows ; r ++ ) { for ( int c = 0 ; c < cols ; c ++ ) { FlowNode flowNode = new FlowNode ( flowIter , cols , rows , c , r ) ; if ( flowNode . isHeadingOutside ( ) ) { // ignore single cells on borders that exit anyway continue ; } if ( flowNode . isMarkedAsOutlet ( ) ) { double attributeValue = flowNode . getDoubleValueFromMap ( attributeIter ) ; flowNode . setDoubleValueInMap ( markedIter , attributeValue ) ; continue ; } if ( flowNode . isValid ( ) && flowNode . isSource ( ) ) { /* * run down to the net to find the * attribute map content on the net */ double attributeValue = doubleNovalue ; FlowNode runningNode = flowNode . goDownstream ( ) ; int runningRow = - 1 ; int runningCol = - 1 ; while ( runningNode != null && runningNode . isValid ( ) ) { runningRow = runningNode . row ; runningCol = runningNode . col ; if ( runningNode . isMarkedAsOutlet ( ) ) { attributeValue = runningNode . getDoubleValueFromMap ( attributeIter ) ; break ; } runningNode = runningNode . goDownstream ( ) ; } if ( ! isNovalue ( attributeValue ) ) { // run down marking the hills runningNode = flowNode ; while ( runningNode != null && runningNode . isValid ( ) ) { runningNode . setDoubleValueInMap ( markedIter , attributeValue ) ; if ( runningNode . isMarkedAsOutlet ( ) ) { break ; } runningNode = runningNode . goDownstream ( ) ; } } else { throw new ModelsIllegalargumentException ( "Could not find a value of the attributes map in the channel after point: " + runningCol + "/" + runningRow + ". Are you sure that everything leads to a channel or outlet?" , "MODELSENGINE" , pm ) ; } } } pm . worked ( 1 ) ; } pm . done ( ) ;
public class InstanceInformation { /** * / * ( non - Javadoc ) * @ see com . yahoo . labs . samoa . instances . InstanceInformationInterface # insertAttributeAt ( com . yahoo . labs . samoa . instances . Attribute , int ) */ public void insertAttributeAt ( Attribute attribute , int i ) { } }
this . attributesInformation . insertAttributeAt ( attribute , i ) ; if ( this . classIndex >= i ) { this . classIndex ++ ; }
public class Ramp { /** * Computes the membership function evaluated at ` x ` * @ param x * @ return * ` \ begin { cases } * 0h & \ mbox { if $ x = e $ } \ cr * \ begin { cases } 0h & \ mbox { if $ x \ leq s $ } \ cr 1h & \ mbox { if $ x \ geq e $ } \ cr h * ( x - s ) / ( e - s ) & \ mbox { otherwise } \ cr \ end { cases } & \ mbox { if $ s < e $ } \ cr * \ begin { cases } * 0h & \ mbox { if $ x \ geq s $ } \ cr * 1h & \ mbox { if $ x \ leq e $ } \ cr * h ( s - x ) / ( s - e ) & \ mbox { otherwise } * \ end { cases } & \ mbox { if $ s > e $ } \ cr \ end { cases } ` * where ` h ` is the height of the Term , * ` s ` is the start of the Ramp , * ` e ` is the end of the Ramp */ @ Override public double membership ( double x ) { } }
if ( Double . isNaN ( x ) ) { return Double . NaN ; } if ( Op . isEq ( start , end ) ) { return height * 0.0 ; } if ( Op . isLt ( start , end ) ) { if ( Op . isLE ( x , start ) ) { return height * 0.0 ; } if ( Op . isGE ( x , end ) ) { return height * 1.0 ; } return height * ( x - start ) / ( end - start ) ; } else { if ( Op . isGE ( x , start ) ) { return height * 0.0 ; } if ( Op . isLE ( x , end ) ) { return height * 1.0 ; } return height * ( start - x ) / ( start - end ) ; }
public class SkillServlet { /** * Sets a { @ code Proxy } object that this servlet may use if Request Signature Verification is enabled . * @ param proxy the { @ code Proxy } to associate with this servlet . */ public void setProxy ( Proxy proxy ) { } }
if ( verifiers . removeIf ( verifier -> verifier instanceof SkillRequestSignatureVerifier ) ) { verifiers . add ( new SkillRequestSignatureVerifier ( proxy ) ) ; }
public class BaseCustomDfuImpl { /** * Wends the whole init packet stream to the given characteristic . * @ param characteristic the target characteristic * @ param crc32 the CRC object to be updated based on the data sent * @ throws DeviceDisconnectedException Thrown when the device will disconnect in the middle of * the transmission . * @ throws DfuException Thrown if DFU error occur . * @ throws UploadAbortedException Thrown if DFU operation was aborted by user . */ void writeInitData ( final BluetoothGattCharacteristic characteristic , final CRC32 crc32 ) throws DfuException , DeviceDisconnectedException , UploadAbortedException { } }
try { byte [ ] data = mBuffer ; int size ; while ( ( size = mInitPacketStream . read ( data , 0 , data . length ) ) != - 1 ) { writeInitPacket ( characteristic , data , size ) ; if ( crc32 != null ) crc32 . update ( data , 0 , size ) ; } } catch ( final IOException e ) { loge ( "Error while reading Init packet file" , e ) ; throw new DfuException ( "Error while reading Init packet file" , DfuBaseService . ERROR_FILE_ERROR ) ; }
public class Searcher { /** * Search . * @ param _ search the search * @ return the search result * @ throws EFapsException on error */ protected SearchResult executeSearch ( final ISearch _search ) throws EFapsException { } }
final SearchResult ret = new SearchResult ( ) ; try { LOG . debug ( "Starting search with: {}" , _search . getQuery ( ) ) ; final StandardQueryParser queryParser = new StandardQueryParser ( Index . getAnalyzer ( ) ) ; queryParser . setAllowLeadingWildcard ( true ) ; if ( EFapsSystemConfiguration . get ( ) . containsAttributeValue ( KernelSettings . INDEXDEFAULTOP ) ) { queryParser . setDefaultOperator ( EnumUtils . getEnum ( StandardQueryConfigHandler . Operator . class , EFapsSystemConfiguration . get ( ) . getAttributeValue ( KernelSettings . INDEXDEFAULTOP ) ) ) ; } else { queryParser . setDefaultOperator ( StandardQueryConfigHandler . Operator . AND ) ; } final Query query = queryParser . parse ( _search . getQuery ( ) , "ALL" ) ; final IndexReader reader = DirectoryReader . open ( Index . getDirectory ( ) ) ; Sort sort = _search . getSort ( ) ; if ( sort == null ) { sort = new Sort ( new SortField ( Key . CREATED . name ( ) , SortField . Type . LONG , true ) ) ; } final FacetsConfig facetConfig = Index . getFacetsConfig ( ) ; final DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader ( Index . getTaxonomyDirectory ( ) ) ; final IndexSearcher searcher = new IndexSearcher ( reader ) ; final FacetsCollector fc = new FacetsCollector ( ) ; final TopFieldDocs topFieldDocs = FacetsCollector . search ( searcher , query , _search . getNumHits ( ) , sort , fc ) ; if ( _search . getConfigs ( ) . contains ( SearchConfig . ACTIVATE_DIMENSION ) ) { final Facets facets = new FastTaxonomyFacetCounts ( taxoReader , facetConfig , fc ) ; for ( final FacetResult result : facets . getAllDims ( 1000 ) ) { LOG . debug ( "FacetResult {}." , result ) ; final DimConfig dimConfig = facetConfig . getDimConfig ( result . dim ) ; final Dimension retDim = new Dimension ( ) . setKey ( result . dim ) ; ret . getDimensions ( ) . add ( retDim ) ; for ( final LabelAndValue labelValue : result . labelValues ) { final DimValue dimValue = new DimValue ( ) . setLabel ( labelValue . label ) . setValue ( labelValue . value . intValue ( ) ) ; dimValue . setPath ( new String [ ] { retDim . getKey ( ) } ) ; retDim . getValues ( ) . add ( dimValue ) ; if ( dimConfig . hierarchical ) { addSubDimension ( facets , dimValue , result . dim , labelValue . label ) ; } } } } ret . setHitCount ( topFieldDocs . totalHits ) ; if ( ret . getHitCount ( ) > 0 ) { final ScoreDoc [ ] hits = topFieldDocs . scoreDocs ; LOG . debug ( "Found {} hits." , hits . length ) ; for ( int i = 0 ; i < hits . length ; ++ i ) { final Document doc = searcher . doc ( hits [ i ] . doc ) ; final String oid = doc . get ( Key . OID . name ( ) ) ; final String text = doc . get ( Key . MSGPHRASE . name ( ) ) ; LOG . debug ( "{}. {}\t {}" , i + 1 , oid , text ) ; final Instance instance = Instance . get ( oid ) ; final List < Instance > list ; if ( this . typeMapping . containsKey ( instance . getType ( ) ) ) { list = this . typeMapping . get ( instance . getType ( ) ) ; } else { list = new ArrayList < Instance > ( ) ; this . typeMapping . put ( instance . getType ( ) , list ) ; } list . add ( instance ) ; final Element element = new Element ( ) . setOid ( oid ) . setText ( text ) ; for ( final Entry < String , Collection < String > > entry : _search . getResultFields ( ) . entrySet ( ) ) { for ( final String name : entry . getValue ( ) ) { final String value = doc . get ( name ) ; if ( value != null ) { element . addField ( name , value ) ; } } } this . elements . put ( instance , element ) ; } } reader . close ( ) ; checkAccess ( ) ; ret . getElements ( ) . addAll ( this . elements . values ( ) ) ; } catch ( final IOException | QueryNodeException e ) { LOG . error ( "Catched Exception" , e ) ; } return ret ;
public class BlockDataHandler { /** * Called on the client when receiving the data from the server , either because client started to watch the chunk or server manually * sent the data . * @ param chunkX the chunk X * @ param chunkZ the chunk Z * @ param identifier the identifier * @ param data the data */ static void setBlockData ( int chunkX , int chunkZ , String identifier , ByteBuf data ) { } }
HandlerInfo < ? > handlerInfo = instance . handlerInfos . get ( identifier ) ; if ( handlerInfo == null ) return ; // MalisisCore . message ( " Received blockData ( " + chunkX + " / " + chunkZ + " ) for " + identifier ) ; Chunk chunk = Utils . getClientWorld ( ) . getChunkFromChunkCoords ( chunkX , chunkZ ) ; ChunkData < ? > chunkData = new ChunkData < > ( handlerInfo ) . fromBytes ( data ) ; datas . get ( ) . put ( handlerInfo . identifier , chunk , chunkData ) ;
public class SasFileParser { /** * The function to convert an array of bytes that stores the number of days elapsed from 01/01/1960 into a variable * of the { @ link Date } type . { @ link SasFileConstants # DATE _ FORMAT _ STRINGS } stores the formats of columns that contain * such data . * @ param bytes the array of bytes that stores the number of days from 01/01/1960. * @ return a variable of the { @ link Date } type . */ private Date bytesToDate ( byte [ ] bytes ) { } }
double doubleDays = bytesToDouble ( bytes ) ; return Double . isNaN ( doubleDays ) ? null : new Date ( ( long ) ( ( doubleDays - START_DATES_DAYS_DIFFERENCE ) * SECONDS_IN_MINUTE * MINUTES_IN_HOUR * HOURS_IN_DAY * MILLISECONDS_IN_SECONDS ) ) ;
public class MmffAtomTypeMatcher { /** * Hydrogen types , assigned based on the MMFFHDEF . PAR parent associations . * @ param container input structure representation * @ param symbs symbolic atom types * @ param graph adjacency list graph */ private void assignHydrogenTypes ( IAtomContainer container , String [ ] symbs , int [ ] [ ] graph ) { } }
for ( int v = 0 ; v < graph . length ; v ++ ) { if ( container . getAtom ( v ) . getSymbol ( ) . equals ( "H" ) && graph [ v ] . length == 1 ) { int w = graph [ v ] [ 0 ] ; symbs [ v ] = this . hydrogenMap . get ( symbs [ w ] ) ; } }
public class GetComplianceDetailsByResourceResult { /** * Indicates whether the specified AWS resource complies each AWS Config rule . * @ param evaluationResults * Indicates whether the specified AWS resource complies each AWS Config rule . */ public void setEvaluationResults ( java . util . Collection < EvaluationResult > evaluationResults ) { } }
if ( evaluationResults == null ) { this . evaluationResults = null ; return ; } this . evaluationResults = new com . amazonaws . internal . SdkInternalList < EvaluationResult > ( evaluationResults ) ;
public class ServerSecurityAlertPoliciesInner { /** * Creates or updates a threat detection policy . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param parameters The server security alert policy . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ServerSecurityAlertPolicyInner object if successful . */ public ServerSecurityAlertPolicyInner createOrUpdate ( String resourceGroupName , String serverName , ServerSecurityAlertPolicyInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class BitInputStream { /** * Read a block of bytes ( aligned ) without updating the CRC value . * @ param val The array to receive the bytes . If null , no bytes are returned * @ param nvals The number of bytes to read * @ throws IOException Thrown if error reading input stream */ public void readByteBlockAlignedNoCRC ( byte [ ] val , int nvals ) throws IOException { } }
int destlength = nvals ; while ( nvals > 0 ) { int chunk = Math . min ( nvals , putByte - getByte ) ; if ( chunk == 0 ) { readFromStream ( ) ; } else { if ( val != null ) System . arraycopy ( buffer , getByte , val , destlength - nvals , chunk ) ; nvals -= chunk ; getByte += chunk ; // totalConsumedBits = ( getByte < < BITS _ PER _ BLURB _ LOG2 ) ; availBits -= ( chunk << BITS_PER_BLURB_LOG2 ) ; totalBitsRead += ( chunk << BITS_PER_BLURB_LOG2 ) ; } }
public class ClassInfo { /** * Get the full class name . */ public String getPackageName ( ClassProject . CodeType codeType ) { } }
if ( codeType == null ) codeType = ClassProject . CodeType . THICK ; String packageName = this . getField ( ClassInfo . CLASS_PACKAGE ) . toString ( ) ; ClassProject classProject = ( ClassProject ) ( ( ReferenceField ) this . getField ( ClassInfo . CLASS_PROJECT_ID ) ) . getReference ( ) ; if ( classProject != null ) if ( ( classProject . getEditMode ( ) == DBConstants . EDIT_IN_PROGRESS ) || ( classProject . getEditMode ( ) == DBConstants . EDIT_CURRENT ) ) packageName = classProject . getFullPackage ( codeType , packageName ) ; return packageName ;
public class PassiveRole { /** * Checks the previous index of the given AppendRequest , returning a boolean indicating whether to continue * handling the request . */ protected boolean checkPreviousEntry ( AppendRequest request , CompletableFuture < AppendResponse > future ) { } }
RaftLogWriter writer = raft . getLogWriter ( ) ; RaftLogReader reader = raft . getLogReader ( ) ; // If the previous term is set , validate that it matches the local log . // We check the previous log term since that indicates whether any entry is present in the leader ' s // log at the previous log index . It ' s possible that the leader can send a non - zero previous log index // with a zero term in the event the leader has compacted its logs and is sending the first entry . if ( request . prevLogTerm ( ) != 0 ) { // Get the last entry written to the log . Indexed < RaftLogEntry > lastEntry = writer . getLastEntry ( ) ; // If the local log is non - empty . . . if ( lastEntry != null ) { // If the previous log index is greater than the last entry index , fail the attempt . if ( request . prevLogIndex ( ) > lastEntry . index ( ) ) { log . debug ( "Rejected {}: Previous index ({}) is greater than the local log's last index ({})" , request , request . prevLogIndex ( ) , lastEntry . index ( ) ) ; return failAppend ( lastEntry . index ( ) , future ) ; } // If the previous log index is less than the last written entry index , look up the entry . if ( request . prevLogIndex ( ) < lastEntry . index ( ) ) { // Reset the reader to the previous log index . if ( reader . getNextIndex ( ) != request . prevLogIndex ( ) ) { reader . reset ( request . prevLogIndex ( ) ) ; } // The previous entry should exist in the log if we ' ve gotten this far . if ( ! reader . hasNext ( ) ) { log . debug ( "Rejected {}: Previous entry does not exist in the local log" , request ) ; return failAppend ( lastEntry . index ( ) , future ) ; } // Read the previous entry and validate that the term matches the request previous log term . Indexed < RaftLogEntry > previousEntry = reader . next ( ) ; if ( request . prevLogTerm ( ) != previousEntry . entry ( ) . term ( ) ) { log . debug ( "Rejected {}: Previous entry term ({}) does not match local log's term for the same entry ({})" , request , request . prevLogTerm ( ) , previousEntry . entry ( ) . term ( ) ) ; return failAppend ( request . prevLogIndex ( ) - 1 , future ) ; } } // If the previous log term doesn ' t equal the last entry term , fail the append , sending the prior entry . else if ( request . prevLogTerm ( ) != lastEntry . entry ( ) . term ( ) ) { log . debug ( "Rejected {}: Previous entry term ({}) does not equal the local log's last term ({})" , request , request . prevLogTerm ( ) , lastEntry . entry ( ) . term ( ) ) ; return failAppend ( request . prevLogIndex ( ) - 1 , future ) ; } } else { // If the previous log index is set and the last entry is null , fail the append . if ( request . prevLogIndex ( ) > 0 ) { log . debug ( "Rejected {}: Previous index ({}) is greater than the local log's last index (0)" , request , request . prevLogIndex ( ) ) ; return failAppend ( 0 , future ) ; } } } return true ;
public class RoleMappingMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RoleMapping roleMapping , ProtocolMarshaller protocolMarshaller ) { } }
if ( roleMapping == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( roleMapping . getType ( ) , TYPE_BINDING ) ; protocolMarshaller . marshall ( roleMapping . getAmbiguousRoleResolution ( ) , AMBIGUOUSROLERESOLUTION_BINDING ) ; protocolMarshaller . marshall ( roleMapping . getRulesConfiguration ( ) , RULESCONFIGURATION_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class DBCluster { /** * Contains one or more identifiers of the Read Replicas associated with this DB cluster . * @ param readReplicaIdentifiers * Contains one or more identifiers of the Read Replicas associated with this DB cluster . */ public void setReadReplicaIdentifiers ( java . util . Collection < String > readReplicaIdentifiers ) { } }
if ( readReplicaIdentifiers == null ) { this . readReplicaIdentifiers = null ; return ; } this . readReplicaIdentifiers = new java . util . ArrayList < String > ( readReplicaIdentifiers ) ;
public class EncodingUtilsImpl { /** * DS method for runtime updates to configuration without stopping and * restarting the component . * @ param config */ @ Modified protected void modified ( Map < String , Object > config ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Config modified: " + this ) ; } if ( null == config ) { return ; } final String ENC = "encoding." ; final String CONV = "converter." ; for ( Map . Entry < String , Object > entry : config . entrySet ( ) ) { String key = entry . getKey ( ) ; int len = key . length ( ) ; try { if ( len > ENC . length ( ) && key . startsWith ( ENC ) ) { String value = ( String ) entry . getValue ( ) ; localeMap . put ( key . substring ( ENC . length ( ) ) , value ) ; } else if ( len > CONV . length ( ) && key . startsWith ( CONV ) ) { String value = ( String ) entry . getValue ( ) ; converterMap . put ( key . substring ( CONV . length ( ) ) . toLowerCase ( ) , value . toLowerCase ( ) ) ; } } catch ( Throwable t ) { FFDCFilter . processException ( t , "EncodingUtils.processConfig" , "1" , new Object [ ] { key , entry . getValue ( ) } ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Invalid property: [" + key + "]=[" + config . get ( key ) + "]" ) ; } } } // loop on the entries
public class UnreviewedPagesTitles { /** * generates the next MediaWiki - request ( GetMethod ) and adds it to msgs . * @ param namespace the namespace ( s ) that will be searched for links , as a string of numbers * separated by ' | ' ; if null , this parameter is omitted * @ param urstart Start listing at this page title * @ param urend Stop listing at this page title */ private HttpAction generateRequest ( int [ ] namespace , String urstart , String urend ) { } }
RequestBuilder requestBuilder = new ApiRequestBuilder ( ) . action ( "query" ) . formatXml ( ) . param ( "list" , "unreviewedpages" ) . param ( "urlimit" , LIMIT ) ; if ( namespace != null ) { String urnamespace = MediaWiki . urlEncode ( MWAction . createNsString ( namespace ) ) ; requestBuilder . param ( "urnamespace" , urnamespace ) ; } if ( urstart . length ( ) > 0 ) { requestBuilder . param ( "urstart" , urstart ) ; } if ( urend . length ( ) > 0 ) { requestBuilder . param ( "urend" , urend ) ; } return requestBuilder . buildGet ( ) ;
public class InternalService { /** * Returns observable to create a conversation . * @ param conversationId ID of a conversation to obtain . * @ return Observable to to create a conversation . */ public Observable < ComapiResult < ConversationDetails > > getConversation ( @ NonNull final String conversationId ) { } }
final String token = getToken ( ) ; if ( sessionController . isCreatingSession ( ) ) { return getTaskQueue ( ) . queueGetConversation ( conversationId ) ; } else if ( TextUtils . isEmpty ( token ) ) { return Observable . error ( getSessionStateErrorDescription ( ) ) ; } else { return doGetConversation ( token , conversationId ) ; }
public class ScoreBuildHistogram { /** * histograms once - per - NID , but requires pre - sorting the rows by NID . */ private void accum_all ( Chunk chks [ ] , Chunk wrks , int nnids [ ] ) { } }
final DHistogram hcs [ ] [ ] = _hcs ; // Sort the rows by NID , so we visit all the same NIDs in a row // Find the count of unique NIDs in this chunk int nh [ ] = new int [ hcs . length + 1 ] ; for ( int i : nnids ) if ( i >= 0 ) nh [ i + 1 ] ++ ; // Rollup the histogram of rows - per - NID in this chunk for ( int i = 0 ; i < hcs . length ; i ++ ) nh [ i + 1 ] += nh [ i ] ; // Splat the rows into NID - groups int rows [ ] = new int [ nnids . length ] ; for ( int row = 0 ; row < nnids . length ; row ++ ) if ( nnids [ row ] >= 0 ) rows [ nh [ nnids [ row ] ] ++ ] = row ; // rows [ ] has Chunk - local ROW - numbers now , in - order , grouped by NID . // nh [ ] lists the start of each new NID , and is indexed by NID + 1. accum_all2 ( chks , wrks , nh , rows ) ;
public class Quad { /** * Gets coordinates of a corner . * @ param index The index of a corner . */ public Vector3D getConer ( int index ) { } }
Vector3D v = new Vector3D ( 0 , 0 , 0 ) ; if ( index <= 0 ) { v . set ( this . x1 , this . y1 ) ; } else if ( index == 1 ) { v . set ( this . x2 , this . y2 ) ; } else if ( index == 2 ) { v . set ( this . x3 , this . y3 ) ; } else if ( 3 <= index ) { v . set ( this . x4 , this . y4 ) ; } return v ;
public class SqlConnRunner { /** * 分页查询 < br > * 此方法不会关闭Connection * @ param conn 数据库连接对象 * @ param fields 返回的字段列表 , null则返回所有字段 * @ param where 条件实体类 ( 包含表名 ) * @ param page 分页对象 * @ return 结果对象 * @ throws SQLException SQL执行异常 */ public PageResult < Entity > page ( Connection conn , Collection < String > fields , Entity where , Page page ) throws SQLException { } }
checkConn ( conn ) ; // 查询全部 if ( null == page ) { List < Entity > entityList = this . find ( conn , fields , where , new EntityListHandler ( ) ) ; final PageResult < Entity > pageResult = new PageResult < Entity > ( 0 , entityList . size ( ) , entityList . size ( ) ) ; pageResult . addAll ( entityList ) ; return pageResult ; } final int count = count ( conn , where ) ; PageResultHandler pageResultHandler = PageResultHandler . create ( new PageResult < Entity > ( page . getPageNumber ( ) , page . getPageSize ( ) , count ) ) ; return this . page ( conn , fields , where , page , pageResultHandler ) ;
public class GConvolveImageOps { /** * Performs a horizontal 1D convolution across the image . The horizontal border is not processed . * @ param input The original image . Not modified . * @ param output Where the resulting image is written to . Modified . * @ param kernel The kernel that is being convolved . Not modified . */ public static < In extends ImageBase < In > , Out extends ImageBase < Out > , K extends Kernel1D > void horizontal ( K kernel , In input , Out output ) { } }
switch ( input . getImageType ( ) . getFamily ( ) ) { case GRAY : { if ( input instanceof GrayF32 ) { ConvolveImageNoBorder . horizontal ( ( Kernel1D_F32 ) kernel , ( GrayF32 ) input , ( GrayF32 ) output ) ; } else if ( input instanceof GrayU8 ) { if ( GrayI16 . class . isAssignableFrom ( output . getClass ( ) ) ) ConvolveImageNoBorder . horizontal ( ( Kernel1D_S32 ) kernel , ( GrayU8 ) input , ( GrayI16 ) output ) ; else ConvolveImageNoBorder . horizontal ( ( Kernel1D_S32 ) kernel , ( GrayU8 ) input , ( GrayS32 ) output ) ; } else if ( input instanceof GrayS16 ) { ConvolveImageNoBorder . horizontal ( ( Kernel1D_S32 ) kernel , ( GrayS16 ) input , ( GrayI16 ) output ) ; } else { throw new IllegalArgumentException ( "Unknown image type: " + input . getClass ( ) . getName ( ) ) ; } } break ; case INTERLEAVED : { if ( output instanceof InterleavedF32 ) { ConvolveImageNoBorder . horizontal ( ( Kernel1D_F32 ) kernel , ( InterleavedF32 ) input , ( InterleavedF32 ) output ) ; } else if ( input instanceof InterleavedU8 ) { if ( InterleavedI16 . class . isAssignableFrom ( output . getClass ( ) ) ) ConvolveImageNoBorder . horizontal ( ( Kernel1D_S32 ) kernel , ( InterleavedU8 ) input , ( InterleavedI16 ) output ) ; else ConvolveImageNoBorder . horizontal ( ( Kernel1D_S32 ) kernel , ( InterleavedU8 ) input , ( InterleavedS32 ) output ) ; } else if ( input instanceof InterleavedS16 ) { ConvolveImageNoBorder . horizontal ( ( Kernel1D_S32 ) kernel , ( InterleavedS16 ) input , ( InterleavedI16 ) output ) ; } else { throw new IllegalArgumentException ( "Unknown image type: " + input . getClass ( ) . getName ( ) ) ; } } break ; case PLANAR : { Planar inp = ( Planar ) input ; Planar outp = ( Planar ) output ; for ( int i = 0 ; i < inp . getNumBands ( ) ; i ++ ) { horizontal ( kernel , inp . getBand ( i ) , outp . getBand ( i ) ) ; } } break ; default : throw new IllegalArgumentException ( "Unknown image family" ) ; }
public class XCollectionLiteralImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } }
switch ( featureID ) { case XbasePackage . XCOLLECTION_LITERAL__ELEMENTS : getElements ( ) . clear ( ) ; getElements ( ) . addAll ( ( Collection < ? extends XExpression > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ;
public class FaceListsImpl { /** * Create an empty face list . Up to 64 face lists are allowed to exist in one subscription . * @ param faceListId Id referencing a particular face list . * @ param createOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > createAsync ( String faceListId , CreateFaceListsOptionalParameter createOptionalParameter ) { } }
return createWithServiceResponseAsync ( faceListId , createOptionalParameter ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class Migrators { /** * Inspects the vault and checks if it is supported by this library . * @ param pathToVault Path to the vault ' s root * @ param masterkeyFilename Name of the masterkey file located in the vault * @ return < code > true < / code > if the vault at the given path is of an older format than supported by this library * @ throws IOException if an I / O error occurs parsing the masterkey file */ public boolean needsMigration ( Path pathToVault , String masterkeyFilename ) throws IOException { } }
Path masterKeyPath = pathToVault . resolve ( masterkeyFilename ) ; byte [ ] keyFileContents = Files . readAllBytes ( masterKeyPath ) ; KeyFile keyFile = KeyFile . parse ( keyFileContents ) ; return keyFile . getVersion ( ) < Constants . VAULT_VERSION ;
public class EditManager { /** * Searches for a dlm : pref command which indicates that a user preference was change and if * found removes it from the user ' s PLF . */ public static void removePreferenceDirective ( IPerson person , String elementId , String attributeName ) { } }
removeDirective ( elementId , attributeName , Constants . ELM_PREF , person ) ;
public class StatisticsApi { /** * Get export statistic definitions . * Get export statistic definitions * @ return ApiResponse & lt ; GetExportStatisticDefinitions & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < GetExportStatisticDefinitions > getExportStatisticDefinitionsWithHttpInfo ( ) throws ApiException { } }
com . squareup . okhttp . Call call = getExportStatisticDefinitionsValidateBeforeCall ( null , null ) ; Type localVarReturnType = new TypeToken < GetExportStatisticDefinitions > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class FixedSizeBitSet { /** * Flips all bits : 1 - - > 0 and 0 - - > 1 */ public void flip ( ) { } }
int fromIndex = 0 , toIndex = size ( ) ; if ( fromIndex == toIndex ) return ; int startWordIndex = wordIndex ( fromIndex ) ; int endWordIndex = wordIndex ( toIndex ) ; long firstWordMask = WORD_MASK << fromIndex ; long lastWordMask = WORD_MASK >>> - toIndex ; if ( startWordIndex == endWordIndex ) { // Case 1 : One word words [ startWordIndex ] ^= ( firstWordMask & lastWordMask ) ; } else { // Case 2 : Multiple words // Handle first word words [ startWordIndex ] ^= firstWordMask ; // Handle intermediate words , if any for ( int i = startWordIndex + 1 ; i < endWordIndex ; i ++ ) words [ i ] ^= WORD_MASK ; // Handle last word words [ endWordIndex ] ^= lastWordMask ; }
public class ProductSearchClient { /** * Creates and returns a new product resource . * < p > Possible errors : * < p > & # 42 ; Returns INVALID _ ARGUMENT if display _ name is missing or longer than 4096 characters . * & # 42 ; Returns INVALID _ ARGUMENT if description is longer than 4096 characters . & # 42 ; Returns * INVALID _ ARGUMENT if product _ category is missing or invalid . * < p > Sample code : * < pre > < code > * try ( ProductSearchClient productSearchClient = ProductSearchClient . create ( ) ) { * LocationName parent = LocationName . of ( " [ PROJECT ] " , " [ LOCATION ] " ) ; * Product product = Product . newBuilder ( ) . build ( ) ; * String productId = " " ; * Product response = productSearchClient . createProduct ( parent . toString ( ) , product , productId ) ; * < / code > < / pre > * @ param parent The project in which the Product should be created . * < p > Format is ` projects / PROJECT _ ID / locations / LOC _ ID ` . * @ param product The product to create . * @ param productId A user - supplied resource id for this Product . If set , the server will attempt * to use this value as the resource id . If it is already in use , an error is returned with * code ALREADY _ EXISTS . Must be at most 128 characters long . It cannot contain the character * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Product createProduct ( String parent , Product product , String productId ) { } }
CreateProductRequest request = CreateProductRequest . newBuilder ( ) . setParent ( parent ) . setProduct ( product ) . setProductId ( productId ) . build ( ) ; return createProduct ( request ) ;
public class FlexiBean { /** * Sets a property in this bean to the specified value . * This creates a property if one does not exist . * @ param propertyName the property name , not empty * @ param newValue the new value , may be null * @ return the old value of the property , may be null */ public Object put ( String propertyName , Object newValue ) { } }
if ( VALID_KEY . matcher ( propertyName ) . matches ( ) == false ) { throw new IllegalArgumentException ( "Invalid key for FlexiBean: " + propertyName ) ; } return dataWritable ( ) . put ( propertyName , newValue ) ;
public class JHelpView { /** * Initialize the HTMLView window . * @ param parent The parent screen / applet . * @ param strURL The initial URL ( otherwise , you should supply the URL with the properties ) . */ public void init ( Object parent , Object strURL ) { } }
BaseApplet applet = null ; if ( parent instanceof BaseApplet ) applet = ( BaseApplet ) parent ; if ( strURL instanceof BaseApplet ) applet = ( BaseApplet ) strURL ; if ( applet == null ) applet = this . getBaseApplet ( ) ; if ( strURL == null ) if ( applet != null ) if ( applet . getProperty ( Params . URL ) == null ) strURL = "?" + Params . HELP + "=" ; this . setOpaque ( false ) ; super . init ( parent , strURL ) ; if ( parent instanceof BaseApplet ) ( ( BaseApplet ) parent ) . setHelpView ( this . getHtmlEditor ( ) ) ;
public class CmsDbUtil { /** * Fills a given prepared statement with parameters from a list of objects . < p > * @ param stmt the prepared statement * @ param params the parameter objects * @ throws SQLException if something goes wrong */ public static void fillParameters ( PreparedStatement stmt , List < Object > params ) throws SQLException { } }
int i = 1 ; for ( Object param : params ) { if ( param instanceof String ) { stmt . setString ( i , ( String ) param ) ; } else if ( param instanceof Integer ) { stmt . setInt ( i , ( ( Integer ) param ) . intValue ( ) ) ; } else if ( param instanceof Long ) { stmt . setLong ( i , ( ( Long ) param ) . longValue ( ) ) ; } else { throw new IllegalArgumentException ( ) ; } i += 1 ; }
public class Texture2dProgram { /** * Configures the convolution filter values . * This only has an effect for programs that use the * FRAGMENT _ SHADER _ EXT _ FILT Fragment shader . * @ param values Normalized filter values ; must be KERNEL _ SIZE elements . */ public void setKernel ( float [ ] values , float colorAdj ) { } }
if ( values . length != KERNEL_SIZE ) { throw new IllegalArgumentException ( "Kernel size is " + values . length + " vs. " + KERNEL_SIZE ) ; } System . arraycopy ( values , 0 , mKernel , 0 , KERNEL_SIZE ) ; mColorAdjust = colorAdj ; // Log . d ( TAG , " filt kernel : " + Arrays . toString ( mKernel ) + " , adj = " + colorAdj ) ;
public class PDF417Writer { /** * Takes encoder , accounts for width / height , and retrieves bit matrix */ private static BitMatrix bitMatrixFromEncoder ( PDF417 encoder , String contents , int errorCorrectionLevel , int width , int height , int margin ) throws WriterException { } }
encoder . generateBarcodeLogic ( contents , errorCorrectionLevel ) ; int aspectRatio = 4 ; byte [ ] [ ] originalScale = encoder . getBarcodeMatrix ( ) . getScaledMatrix ( 1 , aspectRatio ) ; boolean rotated = false ; if ( ( height > width ) != ( originalScale [ 0 ] . length < originalScale . length ) ) { originalScale = rotateArray ( originalScale ) ; rotated = true ; } int scaleX = width / originalScale [ 0 ] . length ; int scaleY = height / originalScale . length ; int scale ; if ( scaleX < scaleY ) { scale = scaleX ; } else { scale = scaleY ; } if ( scale > 1 ) { byte [ ] [ ] scaledMatrix = encoder . getBarcodeMatrix ( ) . getScaledMatrix ( scale , scale * aspectRatio ) ; if ( rotated ) { scaledMatrix = rotateArray ( scaledMatrix ) ; } return bitMatrixFromBitArray ( scaledMatrix , margin ) ; } return bitMatrixFromBitArray ( originalScale , margin ) ;
public class FileUtil { /** * Returns invalid fileSize error message . * @ param maxFileSize allowed fileSize * @ return human readable message */ public static String getInvalidFileSizeMessage ( final long maxFileSize ) { } }
return String . format ( I18nUtilities . format ( null , InternalMessages . DEFAULT_VALIDATION_ERROR_FILE_WRONG_SIZE ) , FileUtil . readableFileSize ( maxFileSize ) ) ;
public class Es6RewriteDestructuring { /** * Transpiles a destructuring pattern in a declaration or assignment to ES5 * @ param nodeToDetach a statement node containing the pattern . This method will replace the node * with one or more other statements . */ private void replacePattern ( NodeTraversal t , Node pattern , Node rhs , Node parent , Node nodeToDetach ) { } }
checkArgument ( NodeUtil . isStatement ( nodeToDetach ) , nodeToDetach ) ; switch ( pattern . getToken ( ) ) { case ARRAY_PATTERN : replaceArrayPattern ( t , pattern , rhs , parent , nodeToDetach ) ; break ; case OBJECT_PATTERN : replaceObjectPattern ( t , pattern , rhs , parent , nodeToDetach ) ; break ; default : throw new IllegalStateException ( "unexpected" ) ; }
public class CompColMatrix { /** * Finds the insertion index */ private int getIndex ( int row , int column ) { } }
int i = no . uib . cipr . matrix . sparse . Arrays . binarySearch ( rowIndex , row , columnPointer [ column ] , columnPointer [ column + 1 ] ) ; if ( i >= 0 && rowIndex [ i ] == row ) return i ; else throw new IndexOutOfBoundsException ( "Entry (" + ( row + 1 ) + ", " + ( column + 1 ) + ") is not in the matrix structure" ) ;
public class AOStream { /** * Callback from JSRemoteConsumerPoint that the given tick in the stream should be changed to the value state . * The message has already been non - persistently locked . * @ param tick The tick in the stream * @ param msg The value */ public final void satisfiedRequest ( long tick , SIMPMessage msg ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "satisfiedRequest" , new Object [ ] { Long . valueOf ( tick ) , msg } ) ; boolean sendMsg = false ; // used to send a message outside the synchronized block long startstamp = tick ; AOValue value = null ; boolean transitionToFlushed = false ; synchronized ( this ) { try { if ( startedFlush || closed ) { // unlock the message msg . unlockMsg ( msg . getLockID ( ) , null , true ) ; // change this tick to final TickRange r = new TickRange ( TickRange . Completed , tick , tick ) ; stream . writeCompleted ( r ) ; transitionToFlushed = tryTransitionToFlushed ( ) ; } else { stream . setCursor ( tick ) ; TickRange r = stream . getNext ( ) ; if ( r . type != TickRange . Requested ) { // this tick is no longer requested // unlock the message msg . unlockMsg ( msg . getLockID ( ) , null , true ) ; } else { AORequested requested = ( AORequested ) r . value ; long waitTime = System . currentTimeMillis ( ) - requested . startTime ; if ( msg . getReliability ( ) . compareTo ( Reliability . RELIABLE_NONPERSISTENT ) <= 0 ) { // Note that this means that the message storage strategy in the MS is either // STORE _ NEVER or STORE _ MAYBE . In either case , we create an AOValue tick with // storage strategy equal to STORE _ NEVER . In fact , we don ' t even add this AOValue // tick to an ItemStream . Also , we do not persistently lock the message int reliability = msg . getReliability ( ) . getIndex ( ) ; int priority = msg . getMessage ( ) . getPriority ( ) . intValue ( ) ; // message is already locked . change tick to value state value = new AOValue ( tick , msg , msg . getID ( ) , AbstractItem . STORE_NEVER , 0L , waitTime , latestTick [ reliability ] [ priority ] ) ; latestTick [ reliability ] [ priority ] = tick ; // update latestTick TickRange r2 = TickRange . newValueTick ( tick , value , 0L ) ; stream . writeCombinedRange ( r2 ) ; r2 = stream . findCompletedRange ( r2 ) ; // now start the DecisionExpected timer dem . addTimeoutEntry ( value ) ; // send message if active if ( active ) { sendMsg = true ; startstamp = r2 . startstamp ; } } else { int storagePolicy ; if ( msg . getReliability ( ) . compareTo ( Reliability . ASSURED_PERSISTENT ) < 0 ) storagePolicy = AbstractItem . STORE_EVENTUALLY ; else storagePolicy = AbstractItem . STORE_ALWAYS ; // change the AORequested tick to INSERTING state ( ( AORequested ) r . value ) . inserting = true ; // message needs to be persistently locked , and tick persisted int reliability = msg . getReliability ( ) . getIndex ( ) ; int priority = msg . getMessage ( ) . getPriority ( ) . intValue ( ) ; PersistLockAndTick update = new PersistLockAndTick ( tick , msg , storagePolicy , waitTime , latestTick [ reliability ] [ priority ] ) ; latestTick [ reliability ] [ priority ] = tick ; // update latestTick parent . getPersistLockThread ( ) . enqueueWork ( update ) ; countAsyncUpdatesOutstanding ++ ; } } } } catch ( MessageStoreException e ) { // MessageStoreException shouldn ' t occur so FFDC . FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.AOStream.satisfiedRequest" , "1:1058:1.80.3.24" , this ) ; SibTr . exception ( tc , e ) ; } catch ( Exception e ) { // probably a bug - log error FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.AOStream.satisfiedRequest" , "1:1069:1.80.3.24" , this ) ; SIErrorException e2 = new SIErrorException ( nls . getFormattedMessage ( "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.AOStream" , "1:1077:1.80.3.24" , e } , null ) ) ; SibTr . exception ( tc , e2 ) ; SibTr . error ( tc , "INTERNAL_MESSAGING_ERROR_CWSIP0002" , new Object [ ] { "com.ibm.ws.sib.processor.impl.AOStream" , "1:1084:1.80.3.24" , e } ) ; } } // end synchronized ( this ) if ( sendMsg ) { long prevTick = value . getPrevTick ( ) ; parent . sendRemoteGetData ( msg , remoteMEUuid , gatheringTargetDestUuid , streamId , prevTick , startstamp , tick , value . getWaitTime ( ) ) ; msg . releaseJsMessage ( ) ; } if ( transitionToFlushed ) { parent . sendFlushed ( remoteMEUuid , gatheringTargetDestUuid , streamId ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "satisfiedRequest" ) ;
public class ContainerKeyCache { /** * Updates the Last Indexed Offset for a given Segment . This is used for cache eviction purposes - no cache entry with * a segment offsets smaller than this value may be evicted . A Segment must be registered either via this method or * via { @ link # updateSegmentIndexOffsetIfMissing } in order to have backpointers recorded for the tail - end section of * the index . * @ param segmentId The Id of the Segment to update the Last Indexed Offset for . * @ param indexOffset The Last Indexed Offset to set . If negative , this will clear up the value . */ void updateSegmentIndexOffset ( long segmentId , long indexOffset ) { } }
boolean remove = indexOffset < 0 ; SegmentKeyCache cache ; int generation ; synchronized ( this . segmentCaches ) { generation = this . currentCacheGeneration ; if ( remove ) { cache = this . segmentCaches . remove ( segmentId ) ; } else { cache = this . segmentCaches . computeIfAbsent ( segmentId , s -> new SegmentKeyCache ( s , this . cache ) ) ; } } if ( cache != null ) { if ( remove ) { evict ( cache . evictAll ( ) ) ; } else { cache . setLastIndexedOffset ( indexOffset , generation ) ; } }
public class RangeUtils { /** * Recursive function that splits a given token range to a given number of token ranges . * @ param range the token range to be splitted . * @ param partitioner the cassandra partitioner . * @ param bisectFactor the actual number of pieces the original token range will be splitted to . * @ param accumulator a token range accumulator ( ne */ private static void bisectTokeRange ( DeepTokenRange range , final IPartitioner partitioner , final int bisectFactor , final List < DeepTokenRange > accumulator ) { } }
final AbstractType tkValidator = partitioner . getTokenValidator ( ) ; Token leftToken = partitioner . getTokenFactory ( ) . fromByteArray ( tkValidator . decompose ( range . getStartToken ( ) ) ) ; Token rightToken = partitioner . getTokenFactory ( ) . fromByteArray ( tkValidator . decompose ( range . getEndToken ( ) ) ) ; Token midToken = partitioner . midpoint ( leftToken , rightToken ) ; Comparable midpoint = ( Comparable ) tkValidator . compose ( tkValidator . fromString ( midToken . toString ( ) ) ) ; DeepTokenRange left = new DeepTokenRange ( range . getStartToken ( ) , midpoint , range . getReplicas ( ) ) ; DeepTokenRange right = new DeepTokenRange ( midpoint , range . getEndToken ( ) , range . getReplicas ( ) ) ; if ( bisectFactor / 2 <= 1 ) { accumulator . add ( left ) ; accumulator . add ( right ) ; } else { bisectTokeRange ( left , partitioner , bisectFactor / 2 , accumulator ) ; bisectTokeRange ( right , partitioner , bisectFactor / 2 , accumulator ) ; }
public class SoyDataException { /** * Prepends a key to the data path where this error occurred . E . g . if the dataPath was previously * ' foo . goo ' and the key to prepend is ' boo ' , then the new data path will be ' boo . foo . goo ' . * @ param key The key to prepend . */ public void prependKeyToDataPath ( String key ) { } }
if ( dataPath == null ) { dataPath = key ; } else { dataPath = key + ( ( dataPath . charAt ( 0 ) == '[' ) ? "" : "." ) + dataPath ; }
public class RegExp { /** * Replace the matches in the input with the replacement . * @ param input the input string * @ param replacement the replacement * @ return the resulting string * @ throws ParameterOutOfBoundsException if Java can not replace it like Javascript */ public String replace ( String input , String replacement ) throws ParameterOutOfBoundsException { } }
// Replace \ in the replacement with \ \ to escape it for Java replace . replacement = REPLACEMENT_BACKSLASH . matcher ( replacement ) . replaceAll ( REPLACEMENT_BACKSLASH_FOR_JAVA ) ; // Replace the Javascript - ese $ & in the replacement with Java - ese $ 0 , but // watch out for $ $ & , which should stay $ $ & , to be changed to \ $ & below . replacement = REPLACEMENT_DOLLAR_AMPERSAND . matcher ( replacement ) . replaceAll ( REPLACEMENT_DOLLAR_AMPERSAND_FOR_JAVA ) ; // Test for Javascript - ese $ ` and $ ' , which we do not support in the pure // Java version . if ( REPLACEMENT_DOLLAR_APOSTROPHE . matcher ( replacement ) . find ( ) ) { throw new ParameterOutOfBoundsException ( ) ; } // Replace the Javascript - ese $ $ in the replacement with Java - ese \ $ . replacement = REPLACEMENT_DOLLAR_DOLLAR . matcher ( replacement ) . replaceAll ( REPLACEMENT_DOLLAR_DOLLAR_FOR_JAVA ) ; Matcher matcher = pattern . matcher ( input ) ; return global ? matcher . replaceAll ( replacement ) : matcher . replaceFirst ( replacement ) ;
public class Serializer { /** * Registers an abstract type serializer for the given abstract type . * Abstract serializers allow abstract types to be serialized without explicitly registering a concrete type . * The concept of abstract serializers differs from { @ link # registerDefault ( Class , TypeSerializerFactory ) default serializers } * in that abstract serializers can be registered with a serializable type ID , and types { @ link # register ( Class ) registered } * without a specific { @ link TypeSerializer } do not inheret from abstract serializers . * < pre > * { @ code * serializer . registerAbstract ( List . class , AbstractListSerializer . class ) ; * < / pre > * @ param abstractType The abstract type for which to register the abstract serializer . Types that extend * the abstract type will be serialized using the given abstract serializer unless a * serializer has been registered for the specific concrete type . * @ param serializer The abstract type serializer with which to serialize instances of the abstract type . * @ return The serializer . * @ throws NullPointerException if the { @ code abstractType } or { @ code serializer } is { @ code null } */ public Serializer registerAbstract ( Class < ? > abstractType , Class < ? extends TypeSerializer > serializer ) { } }
registry . registerAbstract ( abstractType , serializer ) ; return this ;
public class SunriseSunsetCalculator { /** * Computes the sunrise for an arbitrary declination . * @ param latitude * @ param longitude * Coordinates for the location to compute the sunrise / sunset for . * @ param timeZone * timezone to compute the sunrise / sunset times in . * @ param date * < code > Calendar < / code > object containing the date to compute the official sunset for . * @ param degrees * Angle under the horizon for which to compute sunrise . For example , " civil sunrise " * corresponds to 6 degrees . * @ return the requested sunset time as a < code > Calendar < / code > object . */ public static Calendar getSunrise ( double latitude , double longitude , TimeZone timeZone , Calendar date , double degrees ) { } }
SolarEventCalculator solarEventCalculator = new SolarEventCalculator ( new Location ( latitude , longitude ) , timeZone ) ; return solarEventCalculator . computeSunriseCalendar ( new Zenith ( 90 - degrees ) , date ) ;
public class ApiOvhTelephony { /** * Get this object properties * REST : GET / telephony / { billingAccount } / ovhPabx / { serviceName } / dialplan / { dialplanId } / extension / { extensionId } / conditionTime / { conditionId } * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] * @ param dialplanId [ required ] * @ param extensionId [ required ] * @ param conditionId [ required ] */ public OvhOvhPabxDialplanExtensionConditionTime billingAccount_ovhPabx_serviceName_dialplan_dialplanId_extension_extensionId_conditionTime_conditionId_GET ( String billingAccount , String serviceName , Long dialplanId , Long extensionId , Long conditionId ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/ovhPabx/{serviceName}/dialplan/{dialplanId}/extension/{extensionId}/conditionTime/{conditionId}" ; StringBuilder sb = path ( qPath , billingAccount , serviceName , dialplanId , extensionId , conditionId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOvhPabxDialplanExtensionConditionTime . class ) ;
public class SimpleBitSet { /** * Sets the bits from the specified < tt > fromIndex < / tt > ( inclusive ) to the * specified < tt > toIndex < / tt > ( exclusive ) to < code > true < / code > . * @ param fromIndex index of the first bit to be set . * @ param toIndex index after the last bit to be set . * @ exception IndexOutOfBoundsException if < tt > fromIndex < / tt > is negative , * or < tt > toIndex < / tt > is negative , or < tt > fromIndex < / tt > is * larger than < tt > toIndex < / tt > . * @ since 1.4 */ public void set ( int fromIndex , int toIndex ) { } }
checkRange ( fromIndex , toIndex ) ; if ( fromIndex == toIndex ) return ; // Increase capacity if necessary int startWordIndex = wordIndex ( fromIndex ) ; int endWordIndex = wordIndex ( toIndex - 1 ) ; expandTo ( endWordIndex ) ; long firstWordMask = WORD_MASK << fromIndex ; long lastWordMask = WORD_MASK >>> - toIndex ; if ( startWordIndex == endWordIndex ) { // Case 1 : One word words [ startWordIndex ] |= ( firstWordMask & lastWordMask ) ; } else { // Case 2 : Multiple words // Handle first word words [ startWordIndex ] |= firstWordMask ; // Handle intermediate words , if any for ( int i = startWordIndex + 1 ; i < endWordIndex ; i ++ ) words [ i ] = WORD_MASK ; // Handle last word ( restores invariants ) words [ endWordIndex ] |= lastWordMask ; } checkInvariants ( ) ;
public class SingleDocument { /** * Reset allParas */ private boolean resetAllParas ( DocumentCursorTools docCursor , FlatParagraphTools flatPara ) { } }
allParas = docCursor . getAllTextParagraphs ( ) ; if ( allParas == null || allParas . size ( ) < 1 ) { return false ; } // change all footnotes to \ u200B ( like in paraText ) // List of footnotes List < int [ ] > footnotes = flatPara . getFootnotePositions ( ) ; divNum = footnotes . size ( ) - allParas . size ( ) ; if ( divNum >= 0 ) { for ( int i = 0 ; i < allParas . size ( ) ; i ++ ) { for ( int pos : footnotes . get ( i + divNum ) ) { if ( pos <= allParas . get ( i ) . length ( ) ) { String paraText = allParas . get ( i ) . substring ( 0 , pos ) + ZERO_WIDTH_SPACE ; if ( pos < allParas . get ( i ) . length ( ) - 1 ) { paraText += allParas . get ( i ) . substring ( pos + 1 ) ; } allParas . set ( i , paraText ) ; } } } } return true ;
public class KunderaCoreUtils { /** * Gets the lucene query from jpa query . * @ return the lucene query from jpa query */ public static String getLuceneQueryFromJPAQuery ( final KunderaQuery kunderaQuery , final KunderaMetadata kunderaMetadata ) { } }
LuceneQueryBuilder queryBuilder = new LuceneQueryBuilder ( ) ; EntityMetadata metadata = kunderaQuery . getEntityMetadata ( ) ; MetamodelImpl metaModel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( metadata . getPersistenceUnit ( ) ) ; Class valueClazz = null ; EntityType entity = metaModel . entity ( metadata . getEntityClazz ( ) ) ; boolean partitionKeyCheck = true ; for ( Object object : kunderaQuery . getFilterClauseQueue ( ) ) { if ( object instanceof FilterClause ) { FilterClause filter = ( FilterClause ) object ; String property = filter . getProperty ( ) ; String condition = filter . getCondition ( ) ; String valueAsString = filter . getValue ( ) . get ( 0 ) . toString ( ) ; String fieldName = metadata . getFieldName ( property ) ; boolean isEmbeddedId = metaModel . isEmbeddable ( metadata . getIdAttribute ( ) . getBindableJavaType ( ) ) ; String idColumn = ( ( AbstractAttribute ) metadata . getIdAttribute ( ) ) . getJPAColumnName ( ) ; valueClazz = getValueType ( entity , fieldName ) ; if ( isEmbeddedId ) { if ( idColumn . equals ( property ) ) { valueAsString = prepareCompositeKey ( metadata . getIdAttribute ( ) , metaModel , filter . getValue ( ) . get ( 0 ) ) ; queryBuilder . appendIndexName ( metadata . getIndexName ( ) ) . appendPropertyName ( idColumn ) . buildQuery ( condition , valueAsString , valueClazz ) ; } else { valueClazz = metadata . getIdAttribute ( ) . getBindableJavaType ( ) ; if ( property . lastIndexOf ( '.' ) != property . indexOf ( '.' ) && partitionKeyCheck ) { isCompletePartitionKeyPresentInQuery ( kunderaQuery . getFilterClauseQueue ( ) , metaModel , metadata ) ; partitionKeyCheck = false ; } if ( metaModel . isEmbeddable ( filter . getValue ( ) . get ( 0 ) . getClass ( ) ) ) { prepareLuceneQueryForPartitionKey ( queryBuilder , filter . getValue ( ) . get ( 0 ) , metaModel , metadata . getIndexName ( ) , valueClazz ) ; } else { property = property . substring ( property . lastIndexOf ( "." ) + 1 ) ; queryBuilder . appendIndexName ( metadata . getIndexName ( ) ) . appendPropertyName ( getPropertyName ( metadata , property , kunderaMetadata ) ) . buildQuery ( condition , valueAsString , valueClazz ) ; } } } else { queryBuilder . appendIndexName ( metadata . getIndexName ( ) ) . appendPropertyName ( getPropertyName ( metadata , property , kunderaMetadata ) ) . buildQuery ( condition , valueAsString , valueClazz ) ; } } else { queryBuilder . buildQuery ( object . toString ( ) , object . toString ( ) , String . class ) ; } } queryBuilder . appendEntityName ( kunderaQuery . getEntityClass ( ) . getCanonicalName ( ) . toLowerCase ( ) ) ; return queryBuilder . getQuery ( ) ;
public class IOUtil { /** * 创建输入流 ( 经过IO适配器创建 ) * @ param path * @ return * @ throws IOException */ public static InputStream newInputStream ( String path ) throws IOException { } }
if ( IOAdapter == null ) return new FileInputStream ( path ) ; return IOAdapter . open ( path ) ;
public class SeaGlassStyle { /** * Returns true if the region is opaque . * < p > Overridden to cause this style to populate itself with data from * UIDefaults , if necessary . If opacity is not specified in UI defaults , * then it defaults to being non - opaque . < / p > * @ param ctx context SynthContext identifying requester * @ return true if region is opaque . */ @ Override public boolean isOpaque ( SynthContext ctx ) { } }
// Force Table CellRenderers to be opaque if ( "Table.cellRenderer" . equals ( ctx . getComponent ( ) . getName ( ) ) ) { return true ; } Boolean opaque = ( Boolean ) get ( ctx , "opaque" ) ; return opaque == null ? false : opaque ;
public class ActionMethod { /** * 执行本体方法 * @ throws ActionException */ private void invokeActionMethod ( ) throws ActionException { } }
Object returnValue = null ; Class < ? > [ ] parameterTypes = this . method . getParameterTypes ( ) ; try { returnValue = this . method . invoke ( action , paramTypesToObj ( parameterTypes ) ) ; } catch ( InvocationTargetException te ) { throw new ActionException ( te . getCause ( ) ) ; } catch ( Exception e ) { throw new ActionException ( "Invoke action method error!" , e ) ; } // 对于带有返回值的Action方法 , 执行Render if ( null != returnValue ) { if ( false == ( returnValue instanceof View ) ) { // 将未识别响应对象包装为View returnValue = DefaultView . wrap ( returnValue ) ; } ( ( View ) returnValue ) . render ( ) ; }
public class KeyMatcher { /** * Access the bucket for this specific character . * @ param c * @ return HeaderBucket */ protected KeyBucket getBucket ( char c ) { } }
if ( c > this . buckets . length ) { // can ' t handle non - ASCII chars return null ; } int index = c ; // if we ' re case - insensitive , push uppercase into lowercase buckets if ( ! isCaseSensitive ( ) && ( c >= 'A' && c <= 'Z' ) ) { index += 32 ; } return this . buckets [ index ] ;
public class LocatableInputSplitAssigner { @ Override public LocatableInputSplit getNextInputSplit ( String host , int taskId ) { } }
// for a null host , we return a remote split if ( host == null ) { synchronized ( this . remoteSplitChooser ) { synchronized ( this . unassigned ) { LocatableInputSplitWithCount split = this . remoteSplitChooser . getNextUnassignedMinLocalCountSplit ( this . unassigned ) ; if ( split != null ) { // got a split to assign . Double check that it hasn ' t been assigned before . if ( this . unassigned . remove ( split ) ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( "Assigning split to null host (random assignment)." ) ; } remoteAssignments ++ ; return split . getSplit ( ) ; } else { throw new IllegalStateException ( "Chosen InputSplit has already been assigned. This should not happen!" ) ; } } else { // all splits consumed if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "No more unassigned input splits remaining." ) ; } return null ; } } } } host = host . toLowerCase ( Locale . US ) ; // for any non - null host , we take the list of non - null splits LocatableInputSplitChooser localSplits = this . localPerHost . get ( host ) ; // if we have no list for this host yet , create one if ( localSplits == null ) { localSplits = new LocatableInputSplitChooser ( ) ; // lock the list , to be sure that others have to wait for that host ' s local list synchronized ( localSplits ) { LocatableInputSplitChooser prior = this . localPerHost . putIfAbsent ( host , localSplits ) ; // if someone else beat us in the case to create this list , then we do not populate this one , but // simply work with that other list if ( prior == null ) { // we are the first , we populate // first , copy the remaining splits to release the lock on the set early // because that is shared among threads LocatableInputSplitWithCount [ ] remaining ; synchronized ( this . unassigned ) { remaining = this . unassigned . toArray ( new LocatableInputSplitWithCount [ this . unassigned . size ( ) ] ) ; } for ( LocatableInputSplitWithCount isw : remaining ) { if ( isLocal ( host , isw . getSplit ( ) . getHostnames ( ) ) ) { // Split is local on host . // Increment local count isw . incrementLocalCount ( ) ; // and add to local split list localSplits . addInputSplit ( isw ) ; } } } else { // someone else was faster localSplits = prior ; } } } // at this point , we have a list of local splits ( possibly empty ) // we need to make sure no one else operates in the current list ( that protects against // list creation races ) and that the unassigned set is consistent // NOTE : we need to obtain the locks in this order , strictly ! ! ! synchronized ( localSplits ) { synchronized ( this . unassigned ) { LocatableInputSplitWithCount split = localSplits . getNextUnassignedMinLocalCountSplit ( this . unassigned ) ; if ( split != null ) { // found a valid split . Double check that it hasn ' t been assigned before . if ( this . unassigned . remove ( split ) ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( "Assigning local split to host " + host ) ; } localAssignments ++ ; return split . getSplit ( ) ; } else { throw new IllegalStateException ( "Chosen InputSplit has already been assigned. This should not happen!" ) ; } } } } // we did not find a local split , return a remote split synchronized ( this . remoteSplitChooser ) { synchronized ( this . unassigned ) { LocatableInputSplitWithCount split = this . remoteSplitChooser . getNextUnassignedMinLocalCountSplit ( this . unassigned ) ; if ( split != null ) { // found a valid split . Double check that it hasn ' t been assigned yet . if ( this . unassigned . remove ( split ) ) { if ( LOG . isInfoEnabled ( ) ) { LOG . info ( "Assigning remote split to host " + host ) ; } remoteAssignments ++ ; return split . getSplit ( ) ; } else { throw new IllegalStateException ( "Chosen InputSplit has already been assigned. This should not happen!" ) ; } } else { // all splits consumed if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "No more input splits remaining." ) ; } return null ; } } }
public class UserZoneEventHandler { /** * Check zone name * @ param handler * structure of handle class * @ param apiZone * api zone reference * @ return true of false */ protected boolean checkHandler ( ZoneHandlerClass handler , ApiZone apiZone ) { } }
return apiZone . getName ( ) . startsWith ( handler . getZoneName ( ) ) ;
public class FLACDecoder { /** * Fill the given ByteData object with PCM data from the frame . * @ param frame the frame to send to the PCM processors * @ param pcmData the byte data to be filled , or null if it should be allocated * @ return the ByteData that was filled ( may be a new instance from < code > space < / code > ) */ public ByteData decodeFrame ( Frame frame , ByteData pcmData ) { } }
// required size of the byte buffer int byteSize = frame . header . blockSize * channels * ( ( streamInfo . getBitsPerSample ( ) + 7 ) / 2 ) ; if ( pcmData == null || pcmData . getData ( ) . length < byteSize ) { pcmData = new ByteData ( byteSize ) ; } else { pcmData . setLen ( 0 ) ; } if ( streamInfo . getBitsPerSample ( ) == 8 ) { for ( int i = 0 ; i < frame . header . blockSize ; i ++ ) { for ( int channel = 0 ; channel < channels ; channel ++ ) { pcmData . append ( ( byte ) ( channelData [ channel ] . getOutput ( ) [ i ] + 0x80 ) ) ; } } } else if ( streamInfo . getBitsPerSample ( ) == 16 ) { for ( int i = 0 ; i < frame . header . blockSize ; i ++ ) { for ( int channel = 0 ; channel < channels ; channel ++ ) { short val = ( short ) ( channelData [ channel ] . getOutput ( ) [ i ] ) ; pcmData . append ( ( byte ) ( val & 0xff ) ) ; pcmData . append ( ( byte ) ( ( val >> 8 ) & 0xff ) ) ; } } } else if ( streamInfo . getBitsPerSample ( ) == 24 ) { for ( int i = 0 ; i < frame . header . blockSize ; i ++ ) { for ( int channel = 0 ; channel < channels ; channel ++ ) { int val = ( channelData [ channel ] . getOutput ( ) [ i ] ) ; pcmData . append ( ( byte ) ( val & 0xff ) ) ; pcmData . append ( ( byte ) ( ( val >> 8 ) & 0xff ) ) ; pcmData . append ( ( byte ) ( ( val >> 16 ) & 0xff ) ) ; } } } return pcmData ;
public class JsGeometryEditService { /** * Move a set of indices to new locations . These indices can point to vertices , edges or sub - geometries . For each * index , a list of new coordinates is provided . * @ param indices * The list of indices to move . * @ param coordinates * The coordinates to move the indices to . Must be a nested array of coordinates . In other words , for * each index an array of coordinates must be supplied . * @ throws GeometryOperationFailedException * In case one of the indices could not be found . No changes will have been performed . */ public void move ( GeometryIndex [ ] indices , JsArray < JsArrayObject > coordinates ) { } }
List < List < Coordinate > > coords = new ArrayList < List < Coordinate > > ( coordinates . length ( ) ) ; for ( int i = 0 ; i < coordinates . length ( ) ; i ++ ) { JsArrayObject jsObj = coordinates . get ( i ) ; coords . add ( Arrays . asList ( ExporterUtil . toArrObject ( jsObj , new Coordinate [ jsObj . length ( ) ] ) ) ) ; } try { delegate . move ( Arrays . asList ( indices ) , coords ) ; } catch ( GeometryOperationFailedException e ) { throw new RuntimeException ( e . getMessage ( ) ) ; }
public class AOBrowserSession { /** * The alarm has expired */ public void alarm ( Object thandle ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "alarm" , thandle ) ; synchronized ( this ) { if ( expiryAlarmHandle != null ) { expiryAlarmHandle = null ; close ( ) ; // call into parent asking it to remove me parent . removeBrowserSession ( key ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "alarm" ) ;