signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PersonImpl { /** * Sets the specified attribute to a value . * < p > Reference implementation checks for the setting of the username attribute and updates the * EntityIdentifier accordingly * @ param key Attribute ' s name * @ param value Attribute ' s value */ @ Override public void setAttribute ( String key , Object value ) { } }
if ( value == null ) { setAttribute ( key , null ) ; } else { setAttribute ( key , Collections . singletonList ( value ) ) ; }
public class SqlTableTag { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . paros . TableTag # deleteTagsForHistoryID ( long ) */ @ Override public void deleteTagsForHistoryID ( long historyId ) throws DatabaseException { } }
SqlPreparedStatementWrapper psDeleteTagsForHistoryId = null ; try { psDeleteTagsForHistoryId = DbSQL . getSingleton ( ) . getPreparedStatement ( "tag.ps.deletetagsforhid" ) ; psDeleteTagsForHistoryId . getPs ( ) . setLong ( 1 , historyId ) ; psDeleteTagsForHistoryId . getPs ( ) . execute ( ) ; } catch ( SQLException e ) { throw new DatabaseException ( e ) ; } finally { DbSQL . getSingleton ( ) . releasePreparedStatement ( psDeleteTagsForHistoryId ) ; }
public class DerInputBuffer { /** * Returns the integer which takes up the specified number * of bytes in this buffer . * @ throws IOException if the result is not within the valid * range for integer , i . e . between Integer . MIN _ VALUE and * Integer . MAX _ VALUE . * @ param len the number of bytes to use . * @ return the integer . */ public int getInteger ( int len ) throws IOException { } }
BigInteger result = getBigInteger ( len , false ) ; if ( result . compareTo ( BigInteger . valueOf ( Integer . MIN_VALUE ) ) < 0 ) { throw new IOException ( "Integer below minimum valid value" ) ; } if ( result . compareTo ( BigInteger . valueOf ( Integer . MAX_VALUE ) ) > 0 ) { throw new IOException ( "Integer exceeds maximum valid value" ) ; } return result . intValue ( ) ;
public class Commands { /** * Returns a command that will return the result of this command * if the result applies to the given condition , or otherwise , the * result of the alternative command ( the condition is not applied there ) . * < p > If any of the two commands throws then the whole thing aborts with the exception . < / p > */ public static < A , B > BaseCommand < A , B > firstIfTrue ( Command < A , B > first , Predicate < B > cond , Command < A , B > secondary ) { } }
return new ConditionCommand < > ( first , secondary , cond ) ;
public class JQMTabs { /** * If the tabs are currently collapsed , oldTabHeader and oldTabContent will be null . * < br > If the tabs are collapsing , newTabHeader and newTabContent will be null . * @ param newTabHeader - JQMButton or JQMListItem * @ param oldTabHeader - JQMButton or JQMListItem * @ param newTabContent - Widget * @ param oldTabContent - Widget */ protected boolean onBeforeActivate ( Widget newTabHeader , Widget oldTabHeader , Widget newTabContent , Widget oldTabContent ) { } }
return true ;
public class SuperToast { /** * Sets the text size of the main TextView . This value cannot be below 12. * @ param textSize The desired text size * @ return The current SuperToast instance */ public SuperToast setTextSize ( @ Style . TextSize int textSize ) { } }
if ( textSize < Style . TEXTSIZE_VERY_SMALL ) { Log . e ( getClass ( ) . getName ( ) , "SuperToast text size cannot be below 12." ) ; this . mStyle . messageTextSize = Style . TEXTSIZE_VERY_SMALL ; return this ; } else if ( textSize > Style . TEXTSIZE_VERY_LARGE ) { Log . e ( getClass ( ) . getName ( ) , "SuperToast text size cannot be above 20." ) ; this . mStyle . messageTextSize = Style . TEXTSIZE_VERY_LARGE ; return this ; } this . mStyle . messageTextSize = textSize ; return this ;
public class HashUtils { /** * 换算法 ? MD5 SHA - 1 MurMurHash ? ? ? * @ param value the value * @ return the byte [ ] */ public static byte [ ] messageDigest ( String value ) { } }
MessageDigest md5 ; try { md5 = MessageDigest . getInstance ( "MD5" ) ; md5 . update ( value . getBytes ( "UTF-8" ) ) ; return md5 . digest ( ) ; } catch ( NoSuchAlgorithmException e ) { throw new SofaRpcRuntimeException ( "No such algorithm named md5" , e ) ; } catch ( UnsupportedEncodingException e ) { throw new SofaRpcRuntimeException ( "Unsupported encoding of" + value , e ) ; }
public class ApiOvhIpLoadbalancing { /** * Get this object properties * REST : GET / ipLoadbalancing / { serviceName } / http / frontend / { frontendId } * @ param serviceName [ required ] The internal name of your IP load balancing * @ param frontendId [ required ] Id of your frontend */ public OvhFrontendHttp serviceName_http_frontend_frontendId_GET ( String serviceName , Long frontendId ) throws IOException { } }
String qPath = "/ipLoadbalancing/{serviceName}/http/frontend/{frontendId}" ; StringBuilder sb = path ( qPath , serviceName , frontendId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhFrontendHttp . class ) ;
public class DetectorFactoryCollection { /** * Look up bug pattern . * @ param bugType * the bug type for the bug pattern * @ return the BugPattern , or null if it can ' t be found */ public @ CheckForNull BugPattern lookupBugPattern ( String bugType ) { } }
if ( bugType == null ) { return null ; } return bugPatternMap . get ( bugType ) ;
public class ServicesInner { /** * Get DMS Service Instance . * The services resource is the top - level resource that represents the Data Migration Service . The GET method retrieves information about a service instance . * @ param groupName Name of the resource group * @ param serviceName Name of the service * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DataMigrationServiceInner object */ public Observable < DataMigrationServiceInner > getByResourceGroupAsync ( String groupName , String serviceName ) { } }
return getByResourceGroupWithServiceResponseAsync ( groupName , serviceName ) . map ( new Func1 < ServiceResponse < DataMigrationServiceInner > , DataMigrationServiceInner > ( ) { @ Override public DataMigrationServiceInner call ( ServiceResponse < DataMigrationServiceInner > response ) { return response . body ( ) ; } } ) ;
public class ModelHandlerClassXMLBuilder { /** * 将所有的ConfigureLoader包含的内容合并在一起 。 本方法相当于start ( ) 参考 { @ link # detroy ( ) } method */ public void start ( ) { } }
try { Iterator iter = configLoadedList . keySet ( ) . iterator ( ) ; while ( iter . hasNext ( ) ) { String configFile = ( String ) iter . next ( ) ; Debug . logVerbose ( "[JdonFramework] start configFile = " + configFile , module ) ; ConfigureReader configureLoader = ( ConfigureReader ) configLoadedList . get ( configFile ) ; Map modelMappings = configureLoader . load ( ) ; mps . putAll ( modelMappings ) ; Iterator mpsIter = modelMappings . keySet ( ) . iterator ( ) ; while ( mpsIter . hasNext ( ) ) { String formName = ( String ) mpsIter . next ( ) ; build ( formName ) ; } } configLoadedList . clear ( ) ; } catch ( Exception ex ) { Debug . logError ( "[JdonFramework] !!!!!!!framework started error: " + ex , module ) ; }
public class StackdriverWriter { /** * Send given metrics to the Stackdriver server using HTTP * @ param results * Iterable collection of data points */ @ Override public void write ( Iterable < QueryResult > results ) { } }
logger . debug ( "Export to '{}', proxy {} metrics {}" , url , proxy , results ) ; HttpURLConnection urlConnection = null ; try { if ( proxy == null ) { urlConnection = ( HttpURLConnection ) url . openConnection ( ) ; } else { urlConnection = ( HttpURLConnection ) url . openConnection ( proxy ) ; } urlConnection . setRequestMethod ( "POST" ) ; urlConnection . setDoInput ( true ) ; urlConnection . setDoOutput ( true ) ; urlConnection . setReadTimeout ( stackdriverApiTimeoutInMillis ) ; urlConnection . setRequestProperty ( "content-type" , "application/json; charset=utf-8" ) ; urlConnection . setRequestProperty ( "x-stackdriver-apikey" , apiKey ) ; serialize ( results , urlConnection . getOutputStream ( ) ) ; int responseCode = urlConnection . getResponseCode ( ) ; if ( responseCode != 200 && responseCode != 201 ) { exceptionCounter . incrementAndGet ( ) ; logger . warn ( "Failure {}:'{}' to send result to Stackdriver server '{}' with proxy {}" , responseCode , urlConnection . getResponseMessage ( ) , url , proxy ) ; } if ( logger . isTraceEnabled ( ) ) { IoUtils2 . copy ( urlConnection . getInputStream ( ) , System . out ) ; } } catch ( Exception e ) { exceptionCounter . incrementAndGet ( ) ; logger . warn ( "Failure to send result to Stackdriver server '{}' with proxy {}" , url , proxy , e ) ; } finally { if ( urlConnection != null ) { try { InputStream in = urlConnection . getInputStream ( ) ; IoUtils2 . copy ( in , IoUtils2 . nullOutputStream ( ) ) ; IoUtils2 . closeQuietly ( in ) ; InputStream err = urlConnection . getErrorStream ( ) ; if ( err != null ) { IoUtils2 . copy ( err , IoUtils2 . nullOutputStream ( ) ) ; IoUtils2 . closeQuietly ( err ) ; } urlConnection . disconnect ( ) ; } catch ( IOException e ) { logger . warn ( "Error flushing http connection for one result, continuing" ) ; logger . debug ( "Stack trace for the http connection, usually a network timeout" , e ) ; } } }
public class SynchronousRequest { /** * for getWvWMatchInfo ( int , WvWMatch . Endpoint ) */ private List < WvWMatchDetail > getWvWMatchInfoUsingID ( String [ ] ids ) throws GuildWars2Exception { } }
try { Response < List < WvWMatchDetail > > response = gw2API . getWvWMatchInfoUsingID ( processIds ( ids ) ) . execute ( ) ; if ( ! response . isSuccessful ( ) ) throwError ( response . code ( ) , response . errorBody ( ) ) ; return response . body ( ) ; } catch ( IOException e ) { throw new GuildWars2Exception ( ErrorCode . Network , "Network Error: " + e . getMessage ( ) ) ; }
public class SoyMapData { /** * Important : Do not use outside of Soy code ( treat as superpackage - private ) . * < p > Puts data into this data object at the specified key . * @ param key An individual key . * @ param value The data to put at the specified key . */ @ Override public void putSingle ( String key , SoyData value ) { } }
map . put ( key , value ) ;
public class JsApiHdrsImpl { /** * Get the contents of the ccsid field from the payload part . * d395685 * @ return Integer The value of the CharacterSetID for the message payload . */ final Integer getCcsid ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getCcsid" ) ; Integer value = ( Integer ) jmo . getPayloadPart ( ) . getField ( JsPayloadAccess . CCSID_DATA ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getCcsid" , value ) ; return value ;
public class application { /** * Use this API to delete application . */ public static base_response delete ( nitro_service client , application resource ) throws Exception { } }
application deleteresource = new application ( ) ; deleteresource . appname = resource . appname ; return deleteresource . delete_resource ( client ) ;
public class CollectionUtils { /** * Get a random element from the list . * @ param list the input list * @ param < T > the type of elements in the list * @ return a random element from the list or null if the list is empty */ public static < T > T randomElementOf ( final List < T > list ) { } }
if ( list . isEmpty ( ) ) { return null ; } return list . get ( nextInt ( 0 , list . size ( ) ) ) ;
public class EbeanUpdater { /** * { @ inheritDoc } */ @ Override public Update < M > setParameter ( String name , Object param ) { } }
return getUpdate ( ) . setParameter ( name , param ) ;
public class AbstractMarkerLanguageParser { /** * Read the given input stream and transform its content in order to have a raw text . * @ param reader the input stream . * @ param inputFile the name of the input file for locating included features and formatting error messages . * @ return the raw file content . */ public final String transform ( Reader reader , File inputFile ) { } }
return transform ( reader , inputFile , true ) ;
public class DB { /** * Clears all metadata for a user . * @ param userId The user id . * @ throws SQLException on database error . */ public void clearUserMeta ( final long userId ) throws SQLException { } }
Connection conn = null ; PreparedStatement stmt = null ; Timer . Context ctx = metrics . clearUserMetaTimer . time ( ) ; try { conn = connectionSupplier . getConnection ( ) ; stmt = conn . prepareStatement ( deleteUserMetaSQL ) ; stmt . setLong ( 1 , userId ) ; stmt . executeUpdate ( ) ; } finally { ctx . stop ( ) ; SQLUtil . closeQuietly ( conn , stmt ) ; }
public class SuperCfUpdater { /** * collapse the state of the active HSuperColumn */ void updateInternal ( ) { } }
// HSuperColumnImpl needs a refactor , this construction is lame . // the value serializer is not used in HSuperColumnImpl , so this is safe for name if ( ! subColumns . isEmpty ( ) ) { log . debug ( "Adding column {} for key {} and cols {}" , new Object [ ] { getCurrentSuperColumn ( ) , getCurrentKey ( ) , subColumns } ) ; HSuperColumnImpl < SN , N , ? > column = new HSuperColumnImpl ( getCurrentSuperColumn ( ) , subColumns , 0 , template . getTopSerializer ( ) , template . getSubSerializer ( ) , TypeInferringSerializer . get ( ) ) ; mutator . addInsertion ( getCurrentKey ( ) , template . getColumnFamily ( ) , column ) ; }
public class SafeInputStream { /** * Skips over and discards numBytes bytes of data from the underlying input * stream . Will throw an exception if the given number of bytes could not be * skipped . * @ param numBytes * the number of bytes to skip * @ throws IOException * if the given number of bytes could not be skipped */ public void skipSafe ( long numBytes ) throws IOException { } }
long skippedBytes = inputStream . skip ( numBytes ) ; if ( skippedBytes == - 1 ) { throw new IOException ( "Could not skip '" + numBytes + "' bytes in stream" ) ; }
public class LandmarkStorage { /** * From all available landmarks pick just a few active ones */ boolean initActiveLandmarks ( int fromNode , int toNode , int [ ] activeLandmarkIndices , int [ ] activeFroms , int [ ] activeTos , boolean reverse ) { } }
if ( fromNode < 0 || toNode < 0 ) throw new IllegalStateException ( "from " + fromNode + " and to " + toNode + " nodes have to be 0 or positive to init landmarks" ) ; int subnetworkFrom = subnetworkStorage . getSubnetwork ( fromNode ) ; int subnetworkTo = subnetworkStorage . getSubnetwork ( toNode ) ; if ( subnetworkFrom <= UNCLEAR_SUBNETWORK || subnetworkTo <= UNCLEAR_SUBNETWORK ) return false ; if ( subnetworkFrom != subnetworkTo ) { throw new ConnectionNotFoundException ( "Connection between locations not found. Different subnetworks " + subnetworkFrom + " vs. " + subnetworkTo , new HashMap < String , Object > ( ) ) ; } int [ ] tmpIDs = landmarkIDs . get ( subnetworkFrom ) ; // kind of code duplication to approximate List < Map . Entry < Integer , Integer > > list = new ArrayList < > ( tmpIDs . length ) ; for ( int lmIndex = 0 ; lmIndex < tmpIDs . length ; lmIndex ++ ) { int fromWeight = getFromWeight ( lmIndex , toNode ) - getFromWeight ( lmIndex , fromNode ) ; int toWeight = getToWeight ( lmIndex , fromNode ) - getToWeight ( lmIndex , toNode ) ; list . add ( new MapEntry < > ( reverse ? Math . max ( - fromWeight , - toWeight ) : Math . max ( fromWeight , toWeight ) , lmIndex ) ) ; } Collections . sort ( list , SORT_BY_WEIGHT ) ; if ( activeLandmarkIndices [ 0 ] >= 0 ) { IntHashSet set = new IntHashSet ( activeLandmarkIndices . length ) ; set . addAll ( activeLandmarkIndices ) ; int existingLandmarkCounter = 0 ; final int COUNT = Math . min ( activeLandmarkIndices . length - 2 , 2 ) ; for ( int i = 0 ; i < activeLandmarkIndices . length ; i ++ ) { if ( i >= activeLandmarkIndices . length - COUNT + existingLandmarkCounter ) { // keep at least two of the previous landmarks ( pick the best ) break ; } else { activeLandmarkIndices [ i ] = list . get ( i ) . getValue ( ) ; if ( set . contains ( activeLandmarkIndices [ i ] ) ) existingLandmarkCounter ++ ; } } } else { for ( int i = 0 ; i < activeLandmarkIndices . length ; i ++ ) { activeLandmarkIndices [ i ] = list . get ( i ) . getValue ( ) ; } } // store weight values of active landmarks in ' cache ' arrays for ( int i = 0 ; i < activeLandmarkIndices . length ; i ++ ) { int lmIndex = activeLandmarkIndices [ i ] ; activeFroms [ i ] = getFromWeight ( lmIndex , toNode ) ; activeTos [ i ] = getToWeight ( lmIndex , toNode ) ; } return true ;
public class Environment { /** * Returns the Data directory of the application for the logged in user . * On Unix systems , the $ HOME / . < i > applicationname < / i > ( all lower - case ) directory will be returned . * On Windows systems , $ APPDATA / < i > applicationname < / i > ( with uppercase , first letter ) will be returned . * @ param applicationname The name of the application . * @ return The directory where the application can store data . */ public static File getDataDirectory ( String applicationname ) { } }
if ( isUnix ( ) ) { String dirname = "/home/" + USERNAME + "." + applicationname . toLowerCase ( ) ; File dir = new File ( dirname ) ; return dir ; } if ( isWindows ( ) ) { String name = toFirstCap ( applicationname ) ; String dirname = getEnvVariable ( "APPDATA" ) + File . separator + name ; File dir = new File ( dirname ) ; return dir ; } String message = "Environment operations not supported on unrecognized operatings system" ; UnsupportedOperationException cause = new UnsupportedOperationException ( message ) ; throw new EnvironmentException ( cause ) ;
public class ExtensionHook { /** * Gets the { @ link ApiImplementor } s added to this hook . * @ return an unmodifiable { @ code List } containing the added { @ code ApiImplementor } s , never { @ code null } . * @ since 2.6.0 */ List < ApiImplementor > getApiImplementors ( ) { } }
if ( apiImplementors == null ) { return Collections . emptyList ( ) ; } return Collections . unmodifiableList ( apiImplementors ) ;
public class OMMapBufferEntry { /** * Force closing of file if it ' s opened yet . */ void close ( ) { } }
acquireExclusiveLock ( ) ; try { if ( buffer != null ) { if ( dirty ) buffer . force ( ) ; if ( sunClass != null ) { // USE SUN JVM SPECIAL METHOD TO FREE RESOURCES try { final Method m = sunClass . getMethod ( "cleaner" ) ; final Object cleaner = m . invoke ( buffer ) ; cleaner . getClass ( ) . getMethod ( "clean" ) . invoke ( cleaner ) ; } catch ( Exception e ) { OLogManager . instance ( ) . error ( this , "Error on calling Sun's MMap buffer clean" , e ) ; } } buffer = null ; } counter = 0 ; file = null ; } finally { releaseExclusiveLock ( ) ; }
public class AbstractParser { /** * convert an xml element in boolean value . Empty elements results with true ( tag presence is sufficient condition ) * @ param reader the StAX reader * @ param key The key * @ param expressions The expressions * @ return the boolean representing element * @ throws XMLStreamException StAX exception * @ throws ParserException in case of non valid boolean for given element value */ protected Boolean elementAsBoolean ( XMLStreamReader reader , String key , Map < String , String > expressions ) throws XMLStreamException , ParserException { } }
String elementtext = rawElementText ( reader ) ; if ( key != null && expressions != null && elementtext != null && elementtext . indexOf ( "${" ) != - 1 ) expressions . put ( key , elementtext ) ; String stringValue = getSubstitutionValue ( elementtext ) ; if ( StringUtils . isEmpty ( stringValue ) || stringValue . trim ( ) . equalsIgnoreCase ( "true" ) || stringValue . trim ( ) . equalsIgnoreCase ( "false" ) ) { return StringUtils . isEmpty ( stringValue ) ? Boolean . TRUE : Boolean . valueOf ( stringValue . trim ( ) ) ; } else { throw new ParserException ( bundle . elementAsBoolean ( elementtext , reader . getLocalName ( ) ) ) ; }
public class HawkbitCommonUtil { /** * Set localization considering properties and UI settings . * @ param ui * UI to setup * @ param localizationProperties * UI localization settings * @ param i18n * Localization message source */ public static void initLocalization ( final UI ui , final Localization localizationProperties , final VaadinMessageSource i18n ) { } }
ui . setLocale ( HawkbitCommonUtil . getLocaleToBeUsed ( localizationProperties , ui . getSession ( ) . getLocale ( ) ) ) ; ui . getReconnectDialogConfiguration ( ) . setDialogText ( i18n . getMessage ( UIMessageIdProvider . VAADIN_SYSTEM_TRYINGRECONNECT ) ) ;
public class BatchModify { /** * Converts file into string . * @ param path * The absolute file path of the file . * @ return The contents of the file as a string . * @ throws Exception * If any type of error occurs during the conversion . */ private static String fileAsString ( String path ) throws Exception { } }
StringBuffer buffer = new StringBuffer ( ) ; InputStream fis = new FileInputStream ( path ) ; InputStreamReader isr = new InputStreamReader ( fis , "UTF-8" ) ; Reader in = new BufferedReader ( isr ) ; int ch ; while ( ( ch = in . read ( ) ) > - 1 ) { buffer . append ( ( char ) ch ) ; } in . close ( ) ; return buffer . toString ( ) ;
public class LdapUtils { /** * New ldaptive search request . * Returns all attributes . * @ param baseDn the base dn * @ param filter the filter * @ return the search request */ public static SearchRequest newLdaptiveSearchRequest ( final String baseDn , final SearchFilter filter ) { } }
return newLdaptiveSearchRequest ( baseDn , filter , ReturnAttributes . ALL_USER . value ( ) , ReturnAttributes . ALL_USER . value ( ) ) ;
public class Rule { /** * If this mixin match the calling parameters . * @ param formatter current formatter * @ param paramValues calling parameters * @ param isDefault the value of the keyword " default " in guard . * @ return the match or null if there is no match of the parameter lists */ MixinMatch match ( CssFormatter formatter , List < Expression > paramValues , boolean isDefault ) { } }
if ( guard == null && formatter . containsRule ( this ) ) { return null ; } Map < String , Expression > mixinParameters = getMixinParams ( formatter , paramValues ) ; if ( mixinParameters == NO_MATCH ) { return null ; } boolean matching = true ; if ( guard != null ) { formatter . addGuardParameters ( mixinParameters , isDefault ) ; matching = guard . booleanValue ( formatter ) ; formatter . removeGuardParameters ( mixinParameters ) ; } return new MixinMatch ( this , mixinParameters , matching , formatter . wasDefaultFunction ( ) ) ;
public class PBConstraint { /** * Normalizes this constraint s . t . it can be converted to CNF . * @ return the normalized constraint */ public Formula normalize ( ) { } }
final LNGVector < Literal > normPs = new LNGVector < > ( this . literals . length ) ; final LNGIntVector normCs = new LNGIntVector ( this . literals . length ) ; int normRhs ; switch ( this . comparator ) { case EQ : for ( int i = 0 ; i < this . literals . length ; i ++ ) { normPs . push ( this . literals [ i ] ) ; normCs . push ( this . coefficients [ i ] ) ; } normRhs = this . rhs ; final Formula f1 = this . normalize ( normPs , normCs , normRhs ) ; normPs . clear ( ) ; normCs . clear ( ) ; for ( int i = 0 ; i < this . literals . length ; i ++ ) { normPs . push ( this . literals [ i ] ) ; normCs . push ( - this . coefficients [ i ] ) ; } normRhs = - this . rhs ; final Formula f2 = this . normalize ( normPs , normCs , normRhs ) ; return this . f . and ( f1 , f2 ) ; case LT : case LE : for ( int i = 0 ; i < this . literals . length ; i ++ ) { normPs . push ( this . literals [ i ] ) ; normCs . push ( this . coefficients [ i ] ) ; } normRhs = this . comparator == CType . LE ? this . rhs : this . rhs - 1 ; return this . normalize ( normPs , normCs , normRhs ) ; case GT : case GE : for ( int i = 0 ; i < this . literals . length ; i ++ ) { normPs . push ( this . literals [ i ] ) ; normCs . push ( - this . coefficients [ i ] ) ; } normRhs = this . comparator == CType . GE ? - this . rhs : - this . rhs - 1 ; return this . normalize ( normPs , normCs , normRhs ) ; default : throw new IllegalStateException ( "Unknown pseudo-Boolean comparator: " + this . comparator ) ; }
public class DefaultComponent { /** * Sets up the component . */ private void setup ( final Handler < AsyncResult < Void > > doneHandler ) { } }
// Retrieve the component context from the coordinator ( the current cluster ) . // If the context has changed due to a network configuration change , the // internal context and input / output connections will be automatically updated . log . debug ( String . format ( "%s - Starting cluster coordination" , DefaultComponent . this ) ) ; coordinator . start ( new Handler < AsyncResult < Void > > ( ) { @ Override public void handle ( AsyncResult < Void > result ) { if ( result . failed ( ) ) { new DefaultFutureResult < Void > ( result . cause ( ) ) . setHandler ( doneHandler ) ; } else { // We have to make sure the input and output collectors are started // simultaneously in order to support circular connections . If both // input and output aren ' t started at the same time then circular // connections will never open . final CountingCompletionHandler < Void > ioHandler = new CountingCompletionHandler < Void > ( 2 ) ; ioHandler . setHandler ( new Handler < AsyncResult < Void > > ( ) { @ Override public void handle ( AsyncResult < Void > result ) { if ( result . failed ( ) ) { new DefaultFutureResult < Void > ( result . cause ( ) ) . setHandler ( doneHandler ) ; } else { // Tell the coordinator we ' re ready for the network to start . coordinator . resume ( ) ; } } } ) ; output . open ( new Handler < AsyncResult < Void > > ( ) { @ Override public void handle ( AsyncResult < Void > result ) { if ( result . failed ( ) ) { log . error ( String . format ( "%s - Failed to open component outputs" , DefaultComponent . this ) , result . cause ( ) ) ; ioHandler . fail ( result . cause ( ) ) ; } else { ioHandler . succeed ( ) ; } } } ) ; input . open ( new Handler < AsyncResult < Void > > ( ) { @ Override public void handle ( AsyncResult < Void > result ) { if ( result . failed ( ) ) { log . error ( String . format ( "%s - Failed to open component inputs" , DefaultComponent . this ) , result . cause ( ) ) ; ioHandler . fail ( result . cause ( ) ) ; } else { ioHandler . succeed ( ) ; } } } ) ; } } } ) ; // The resume handler will be called by the coordinator once the // network ' s manager has indicated that all the components in the // network have finished setting up their connections . coordinator . resumeHandler ( new Handler < Void > ( ) { @ Override @ SuppressWarnings ( "unchecked" ) public void handle ( Void _ ) { if ( ! started ) { started = true ; log . debug ( String . format ( "%s - Started" , DefaultComponent . this , context . component ( ) . name ( ) , context . number ( ) ) ) ; List < ComponentHook > hooks = context . component ( ) . hooks ( ) ; for ( ComponentHook hook : hooks ) { hook . handleStart ( DefaultComponent . this ) ; } new DefaultFutureResult < Void > ( ( Void ) null ) . setHandler ( doneHandler ) ; } } } ) ;
public class Checker { /** * 检查数组是否已经排好序 * @ param nums 数组 * @ return { @ link Boolean } */ public static boolean isSorted ( int [ ] nums ) { } }
boolean desc = nums [ 0 ] - nums [ nums . length - 1 ] >= 0 ; for ( int i = 0 ; i < nums . length - 1 ; i ++ ) { if ( ! desc && nums [ i ] > nums [ i + 1 ] ) { return false ; } if ( desc && nums [ i ] < nums [ i + 1 ] ) { return false ; } } return true ;
public class ResourceIndexModule { /** * { @ inheritDoc } */ public void add ( List < Triple > triples , boolean flush ) throws IOException , TrippiException { } }
_ri . add ( triples , flush ) ;
public class SDValidation { /** * Validate that the operation is being applied on a boolean type SDVariable * @ param opName Operation name to print in the exception * @ param v Variable to validate datatype for ( input to operation ) */ protected static void validateBool ( String opName , SDVariable v ) { } }
if ( v == null ) return ; if ( v . dataType ( ) != DataType . BOOL ) throw new IllegalStateException ( "Cannot apply operation \"" + opName + "\" to variable \"" + v . getVarName ( ) + "\" with non-boolean point data type " + v . dataType ( ) ) ;
public class Environment { /** * Stores key / value to be used . * @ param key * @ param value */ public void setSymbol ( String key , String value ) { } }
if ( value == null ) { symbols . remove ( key ) ; } else { symbols . put ( key , value ) ; }
public class DStreamExecutionGraphBuilder { /** * Determines which unmap function to use . For cases when previous operation was ' classify ' * the unmap function is { @ link # unmapFunction } otherwise it will produce a pass thru function . */ private SerFunction < ? , ? > determineUnmapFunction ( String lastOperationName ) { } }
return Ops . classify . name ( ) . equals ( lastOperationName ) ? this . shuffleResultNormalizer : s -> s ;
public class AbstractFCKConnector { /** * Compile REST path of the given resource . * @ param workspace * @ param resource * , we assume that path starts with ' / ' * @ return */ protected String makeRESTPath ( String repoName , String workspace , String resource ) { } }
final StringBuilder sb = new StringBuilder ( 512 ) ; ExoContainer container = ExoContainerContext . getCurrentContainerIfPresent ( ) ; if ( container instanceof PortalContainer ) { PortalContainer pContainer = ( PortalContainer ) container ; sb . append ( '/' ) . append ( pContainer . getRestContextName ( ) ) . append ( '/' ) ; } else { sb . append ( '/' ) . append ( PortalContainer . DEFAULT_REST_CONTEXT_NAME ) . append ( '/' ) ; } return sb . append ( "jcr/" ) . append ( repoName ) . append ( '/' ) . append ( workspace ) . append ( resource ) . toString ( ) ;
public class KrakenImpl { /** * Closes the manager . */ public void close ( ) { } }
if ( _isClosed ) { return ; } _isClosed = true ; KelpManager backing = _kelpBacking ; // _ localBacking = null ; if ( backing != null ) { backing . close ( ) ; }
public class ApiOvhHostingweb { /** * Get statistics about this web hosting * REST : GET / hosting / web / { serviceName } / statistics * @ param period [ required ] * @ param type [ required ] * @ param serviceName [ required ] The internal name of your hosting */ public ArrayList < OvhChartSerie < OvhChartTimestampValue > > serviceName_statistics_GET ( String serviceName , OvhStatisticsPeriodEnum period , OvhStatisticsTypeEnum type ) throws IOException { } }
String qPath = "/hosting/web/{serviceName}/statistics" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "period" , period ) ; query ( sb , "type" , type ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t6 ) ;
public class GeoJsonToAssembler { /** * Serializes all points of the input into a list of their coordinates * @ param input a geometry whose points are to be converted to a list of coordinates * @ return an array containing arrays with x , y and optionally z and m values . */ private double [ ] [ ] getPoints ( Geometry input ) { } }
double [ ] [ ] result = new double [ input . getNumPoints ( ) ] [ ] ; for ( int i = 0 ; i < input . getPoints ( ) . size ( ) ; i ++ ) { Point p = input . getPointN ( i ) ; if ( p . isMeasured ( ) && p . is3D ( ) ) { result [ i ] = new double [ ] { p . getX ( ) , p . getY ( ) , p . getZ ( ) , p . getM ( ) } ; } else if ( p . isMeasured ( ) ) { // ideally we ' d use something like Double . Nan , but JSON doesn ' t support that . result [ i ] = new double [ ] { p . getX ( ) , p . getY ( ) , 0 , p . getM ( ) } ; } else if ( p . is3D ( ) ) { result [ i ] = new double [ ] { p . getX ( ) , p . getY ( ) , p . getZ ( ) } ; } else { result [ i ] = new double [ ] { p . getX ( ) , p . getY ( ) } ; } } return result ;
public class ClientSessionManager { /** * Closes the session manager . * @ return A completable future to be completed once the session manager is closed . */ public CompletableFuture < Void > close ( ) { } }
if ( state . getState ( ) == Session . State . EXPIRED ) return CompletableFuture . completedFuture ( null ) ; CompletableFuture < Void > future = new CompletableFuture < > ( ) ; context . executor ( ) . execute ( ( ) -> { if ( keepAlive != null ) { keepAlive . cancel ( ) ; keepAlive = null ; } unregister ( future ) ; } ) ; return future ;
public class TCQueryMessageImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . tcap . asn . Encodable # decode ( org . mobicents . protocols . asn . AsnInputStream ) */ public void decode ( AsnInputStream ais ) throws ParseException { } }
this . dialogTermitationPermission = false ; this . originatingTransactionId = null ; this . dp = null ; this . component = null ; try { if ( ais . getTag ( ) == TCQueryMessage . _TAG_QUERY_WITH_PERM ) dialogTermitationPermission = true ; else dialogTermitationPermission = false ; AsnInputStream localAis = ais . readSequenceStream ( ) ; // transaction portion TransactionID tid = TcapFactory . readTransactionID ( localAis ) ; if ( tid . getFirstElem ( ) == null || tid . getSecondElem ( ) != null ) { throw new ParseException ( PAbortCause . BadlyStructuredTransactionPortion , "Error decoding TCQueryMessage: transactionId must contain only one transactionId" ) ; } this . originatingTransactionId = tid . getFirstElem ( ) ; // dialog portion if ( localAis . available ( ) == 0 ) { throw new ParseException ( PAbortCause . UnrecognizedDialoguePortionID , "Error decoding TCQueryMessage: neither dialog no component portion is found" ) ; } int tag = localAis . readTag ( ) ; if ( tag == DialogPortion . _TAG_DIALOG_PORTION ) { this . dp = TcapFactory . createDialogPortion ( localAis ) ; if ( localAis . available ( ) == 0 ) return ; tag = localAis . readTag ( ) ; } // component portion this . component = TcapFactory . readComponents ( localAis ) ; } catch ( IOException e ) { throw new ParseException ( PAbortCause . BadlyStructuredDialoguePortion , "IOException while decoding TCQueryMessage: " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { throw new ParseException ( PAbortCause . BadlyStructuredDialoguePortion , "AsnException while decoding TCQueryMessage: " + e . getMessage ( ) , e ) ; }
public class CursorData { /** * Switches to the specified cursor , notifying observers of changes as necessary . */ private void changeCursor ( @ Nullable Cursor newCursor ) { } }
Cursor oldCursor = mCursor ; if ( mCursor != newCursor ) { int oldSize = 0 ; int deltaSize = 0 ; if ( oldCursor != null ) { int oldCount = oldCursor . getCount ( ) ; oldSize = oldCount ; deltaSize -= oldCount ; oldCursor . close ( ) ; } mCursor = newCursor ; int newSize = 0 ; if ( newCursor != null ) { int newCount = newCursor . getCount ( ) ; deltaSize += newCount ; newSize = newCount ; } updateCursorObserver ( ) ; int changed = min ( oldSize , newSize ) ; if ( changed > 0 ) { notifyItemRangeChanged ( 0 , changed ) ; } if ( deltaSize < 0 ) { notifyItemRangeRemoved ( oldSize + deltaSize , abs ( deltaSize ) ) ; } else if ( deltaSize > 0 ) { notifyItemRangeInserted ( oldSize , abs ( deltaSize ) ) ; } }
public class TemperatureConversion { /** * Convert a temperature value from another temperature scale into the Kelvin temperature scale . * @ param from TemperatureScale * @ param temperature value from other scale * @ return converted temperature value in Kelvin */ public static double convertToKelvin ( TemperatureScale from , double temperature ) { } }
switch ( from ) { case FARENHEIT : return convertFarenheitToKelvin ( temperature ) ; case CELSIUS : return convertCelsiusToKelvin ( temperature ) ; case KELVIN : return temperature ; case RANKINE : return convertRankineToKelvin ( temperature ) ; default : throw ( new RuntimeException ( "Invalid termpature conversion" ) ) ; }
public class AmazonAutoScalingClient { /** * Configures an Auto Scaling group to send notifications when specified events take place . Subscribers to the * specified topic can have messages delivered to an endpoint such as a web server or an email address . * This configuration overwrites any existing configuration . * For more information , see < a * href = " https : / / docs . aws . amazon . com / autoscaling / ec2 / userguide / ASGettingNotifications . html " > Getting Amazon SNS * Notifications When Your Auto Scaling Group Scales < / a > in the < i > Amazon EC2 Auto Scaling User Guide < / i > . * @ param putNotificationConfigurationRequest * @ return Result of the PutNotificationConfiguration operation returned by the service . * @ throws LimitExceededException * You have already reached a limit for your Amazon EC2 Auto Scaling resources ( for example , Auto Scaling * groups , launch configurations , or lifecycle hooks ) . For more information , see * < a > DescribeAccountLimits < / a > . * @ throws ResourceContentionException * You already have a pending update to an Amazon EC2 Auto Scaling resource ( for example , an Auto Scaling * group , instance , or load balancer ) . * @ throws ServiceLinkedRoleFailureException * The service - linked role is not yet ready for use . * @ sample AmazonAutoScaling . PutNotificationConfiguration * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / autoscaling - 2011-01-01 / PutNotificationConfiguration " * target = " _ top " > AWS API Documentation < / a > */ @ Override public PutNotificationConfigurationResult putNotificationConfiguration ( PutNotificationConfigurationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executePutNotificationConfiguration ( request ) ;
public class BaseImageCreative { /** * Sets the primaryImageAsset value for this BaseImageCreative . * @ param primaryImageAsset * The primary image asset associated with this creative . This * attribute is required . */ public void setPrimaryImageAsset ( com . google . api . ads . admanager . axis . v201805 . CreativeAsset primaryImageAsset ) { } }
this . primaryImageAsset = primaryImageAsset ;
public class BDDKernel { /** * Returns a full model in all variables for the given BDD . * @ param r the BDD root node * @ return a full model of this BDD */ public int fullSatOne ( final int r ) { } }
if ( r == 0 ) return 0 ; initRef ( ) ; int res = fullSatOneRec ( r ) ; for ( int v = level ( r ) - 1 ; v >= 0 ; v -- ) res = pushRef ( makeNode ( v , res , 0 ) ) ; return res ;
public class EventDispatcher { protected int subscribe_user_event ( String attr_name , String [ ] filters , boolean stateless ) throws DevFailed { } }
return event_supplier . subscribe_event ( attr_name , USER_EVENT , this , filters , stateless ) ;
public class InternalPureXbaseParser { /** * $ ANTLR start synpred37 _ InternalPureXbase */ public final void synpred37_InternalPureXbase_fragment ( ) throws RecognitionException { } }
// InternalPureXbase . g : 4542:5 : ( ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) ) // InternalPureXbase . g : 4542:6 : ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) { // InternalPureXbase . g : 4542:6 : ( ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) ) // InternalPureXbase . g : 4543:6 : ( ( ruleJvmTypeReference ) ) ( ( ruleValidID ) ) { // InternalPureXbase . g : 4543:6 : ( ( ruleJvmTypeReference ) ) // InternalPureXbase . g : 4544:7 : ( ruleJvmTypeReference ) { // InternalPureXbase . g : 4544:7 : ( ruleJvmTypeReference ) // InternalPureXbase . g : 4545:8 : ruleJvmTypeReference { pushFollow ( FOLLOW_12 ) ; ruleJvmTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } // InternalPureXbase . g : 4548:6 : ( ( ruleValidID ) ) // InternalPureXbase . g : 4549:7 : ( ruleValidID ) { // InternalPureXbase . g : 4549:7 : ( ruleValidID ) // InternalPureXbase . g : 4550:8 : ruleValidID { pushFollow ( FOLLOW_2 ) ; ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return ; } } } }
public class Rule { /** * The < code > Predicates < / code > object contains one < code > Predicate < / code > element for each < a > ByteMatchSet < / a > , * < a > IPSet < / a > , or < a > SqlInjectionMatchSet < / a > object that you want to include in a < code > Rule < / code > . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setPredicates ( java . util . Collection ) } or { @ link # withPredicates ( java . util . Collection ) } if you want to * override the existing values . * @ param predicates * The < code > Predicates < / code > object contains one < code > Predicate < / code > element for each * < a > ByteMatchSet < / a > , < a > IPSet < / a > , or < a > SqlInjectionMatchSet < / a > object that you want to include in a * < code > Rule < / code > . * @ return Returns a reference to this object so that method calls can be chained together . */ public Rule withPredicates ( Predicate ... predicates ) { } }
if ( this . predicates == null ) { setPredicates ( new java . util . ArrayList < Predicate > ( predicates . length ) ) ; } for ( Predicate ele : predicates ) { this . predicates . add ( ele ) ; } return this ;
public class ExpressionUtil { /** * Substitutes dynamic values for expressions in the input string . * @ param input raw input string * @ param model object containing the values to substitute * @ return string with values substituted */ public static String substitute ( String input , Object model ) throws MdwException { } }
return substitute ( input , model , null , false ) ;
public class RangeSeekBar { /** * Overridden to save instance state when device orientation changes . This method is called automatically if you assign an id to the RangeSeekBar widget using the { @ link # setId ( int ) } method . Other members of this class than the normalized min and max values don ' t need to be saved . */ @ Override protected Parcelable onSaveInstanceState ( ) { } }
final Bundle bundle = new Bundle ( ) ; bundle . putParcelable ( "SUPER" , super . onSaveInstanceState ( ) ) ; bundle . putDouble ( "MIN" , normalizedMinValue ) ; bundle . putDouble ( "MAX" , normalizedMaxValue ) ; return bundle ;
public class YubiKeyAccountCouchDbRepository { /** * Find by username . * @ param uid username to search for * @ return yubikey account for username provided */ @ View ( name = "by_username" , map = "function(doc) { if(doc.publicId && doc.username) {emit(doc.username, doc)}}" ) public YubiKeyAccount findByUsername ( final String uid ) { } }
val view = createQuery ( "by_username" ) . key ( uid ) . limit ( 1 ) . includeDocs ( true ) ; return db . queryView ( view , CouchDbYubiKeyAccount . class ) . stream ( ) . findFirst ( ) . orElse ( null ) ;
public class WriteChannelConfiguration { /** * Creates a builder for a BigQuery Load Configuration given the destination table and format . */ public static Builder newBuilder ( TableId destinationTable , FormatOptions format ) { } }
return newBuilder ( destinationTable ) . setFormatOptions ( format ) ;
public class QueryParsers { /** * Remove from this engine the language with the given name . * @ param firstLanguage the name of the first language to remove , which must match the { @ link QueryParser # getLanguage ( ) * language } of the parser * @ param additionalLanguages the names of the additional languages to remove , which must match the * { @ link QueryParser # getLanguage ( ) language } of the parser * @ return the parser for the language , or null if the engine had no support for the named language * @ throws IllegalArgumentException if the language is null */ public Collection < QueryParser > removeLanguages ( String firstLanguage , String ... additionalLanguages ) { } }
Collection < QueryParser > removed = new HashSet < QueryParser > ( ) ; QueryParser parser = removeLanguage ( firstLanguage ) ; if ( parser != null ) removed . add ( parser ) ; for ( String language : additionalLanguages ) { parser = removeLanguage ( language ) ; if ( parser != null ) removed . add ( parser ) ; } return removed ;
public class XMLUtil { /** * Parse the XML data in the given input stream , using the * specified handler object as both the content and error handler . * @ param handler the SAX event handler * @ param in the input stream containing the XML to be parsed */ public static void parse ( DefaultHandler handler , InputStream in ) throws IOException , ParserConfigurationException , SAXException { } }
XMLReader xr = _pfactory . newSAXParser ( ) . getXMLReader ( ) ; xr . setContentHandler ( handler ) ; xr . setErrorHandler ( handler ) ; xr . parse ( new InputSource ( in ) ) ;
public class WordVectorsImpl { /** * Get the top n words most similar to the given word * @ param word the word to compare * @ param n the n to get * @ return the top n words */ public Collection < String > wordsNearestSum ( String word , int n ) { } }
return modelUtils . wordsNearestSum ( word , n ) ;
public class ProductConfigGenerator { /** * Generate source code for new class . Class extends * < code > HashMap < / code > . * @ param logger Logger object * @ param context Generator context */ private void generateClass ( TreeLogger logger , GeneratorContext context ) throws Throwable { } }
// get print writer that receives the source code PrintWriter printWriter = context . tryCreate ( logger , packageName , className ) ; // print writer if null , source code has ALREADY been generated , return if ( printWriter == null ) { return ; } // init composer , set class properties , create source writer ClassSourceFileComposerFactory composerFactory = new ClassSourceFileComposerFactory ( packageName , className ) ; // Imports composerFactory . addImport ( "org.jboss.as.console.client.Console" ) ; composerFactory . addImport ( "org.jboss.as.console.client.ProductConfig" ) ; composerFactory . addImport ( "java.util.*" ) ; // Interfaces composerFactory . addImplementedInterface ( "org.jboss.as.console.client.ProductConfig" ) ; // SourceWriter SourceWriter sourceWriter = composerFactory . createSourceWriter ( context , printWriter ) ; // ctor generateConstructor ( sourceWriter ) ; // Methods generateMethods ( logger , sourceWriter , context ) ; // close generated class sourceWriter . outdent ( ) ; sourceWriter . println ( "}" ) ; // commit generated class context . commit ( logger , printWriter ) ;
public class BeaconParser { /** * Construct a Beacon from a Bluetooth LE packet collected by Android ' s Bluetooth APIs , * including the raw Bluetooth device info * @ param scanData The actual packet bytes * @ param rssi The measured signal strength of the packet * @ param device The Bluetooth device that was detected * @ return An instance of a < code > Beacon < / code > */ public Beacon fromScanData ( byte [ ] scanData , int rssi , BluetoothDevice device ) { } }
return fromScanData ( scanData , rssi , device , new Beacon ( ) ) ;
public class DomConfigurationFactory { /** * Parse codes from XML * @ param dc DOM document with configuration * @ return codes * @ throws TextProcessorFactoryException any problem */ private Map < String , Code > parseCodes ( Document dc , Map < String , Scope > scopes ) { } }
Map < String , Code > codes = new HashMap < String , Code > ( ) ; NodeList codeNodeList = dc . getDocumentElement ( ) . getElementsByTagNameNS ( SCHEMA_LOCATION , TAG_CODE ) ; for ( int i = 0 ; i < codeNodeList . getLength ( ) ; i ++ ) { Code code = parseCode ( ( Element ) codeNodeList . item ( i ) , scopes ) ; codes . put ( code . getName ( ) , code ) ; } return codes ;
public class FacesConfigFlowDefinitionFlowCallTypeImpl { /** * Returns all < code > outbound - parameter < / code > elements * @ return list of < code > outbound - parameter < / code > */ public List < FacesConfigFlowDefinitionFlowCallOutboundParameterType < FacesConfigFlowDefinitionFlowCallType < T > > > getAllOutboundParameter ( ) { } }
List < FacesConfigFlowDefinitionFlowCallOutboundParameterType < FacesConfigFlowDefinitionFlowCallType < T > > > list = new ArrayList < FacesConfigFlowDefinitionFlowCallOutboundParameterType < FacesConfigFlowDefinitionFlowCallType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "outbound-parameter" ) ; for ( Node node : nodeList ) { FacesConfigFlowDefinitionFlowCallOutboundParameterType < FacesConfigFlowDefinitionFlowCallType < T > > type = new FacesConfigFlowDefinitionFlowCallOutboundParameterTypeImpl < FacesConfigFlowDefinitionFlowCallType < T > > ( this , "outbound-parameter" , childNode , node ) ; list . add ( type ) ; } return list ;
public class JsonBuilder { /** * A method call on the JSON builder instance will create a root object with only one key * whose name is the name of the method being called . * This method takes as arguments : * < ul > * < li > a closure < / li > * < li > a map ( ie . named arguments ) < / li > * < li > a map and a closure < / li > * < li > or no argument at all < / li > * < / ul > * Example with a classical builder - style : * < pre > < code class = " groovyTestCase " > * def json = new groovy . json . JsonBuilder ( ) * def result = json . person { * name " Guillaume " * age 33 * assert result instanceof Map * assert json . toString ( ) = = ' { " person " : { " name " : " Guillaume " , " age " : 33 } } ' * < / code > < / pre > * Or alternatively with a method call taking named arguments : * < pre > < code class = " groovyTestCase " > * def json = new groovy . json . JsonBuilder ( ) * json . person name : " Guillaume " , age : 33 * assert json . toString ( ) = = ' { " person " : { " name " : " Guillaume " , " age " : 33 } } ' * < / code > < / pre > * If you use named arguments and a closure as last argument , * the key / value pairs of the map ( as named arguments ) * and the key / value pairs represented in the closure * will be merged together & mdash ; * the closure properties overriding the map key / values * in case the same key is used . * < pre > < code class = " groovyTestCase " > * def json = new groovy . json . JsonBuilder ( ) * json . person ( name : " Guillaume " , age : 33 ) { town " Paris " } * assert json . toString ( ) = = ' { " person " : { " name " : " Guillaume " , " age " : 33 , " town " : " Paris " } } ' * < / code > < / pre > * The empty args call will create a key whose value will be an empty JSON object : * < pre > < code class = " groovyTestCase " > * def json = new groovy . json . JsonBuilder ( ) * json . person ( ) * assert json . toString ( ) = = ' { " person " : { } } ' * < / code > < / pre > * @ param name the single key * @ param args the value associated with the key * @ return a map with a single key */ public Object invokeMethod ( String name , Object args ) { } }
if ( args != null && Object [ ] . class . isAssignableFrom ( args . getClass ( ) ) ) { Object [ ] arr = ( Object [ ] ) args ; if ( arr . length == 0 ) { return setAndGetContent ( name , new HashMap < String , Object > ( ) ) ; } else if ( arr . length == 1 ) { if ( arr [ 0 ] instanceof Closure ) { return setAndGetContent ( name , JsonDelegate . cloneDelegateAndGetContent ( ( Closure ) arr [ 0 ] ) ) ; } else if ( arr [ 0 ] instanceof Map ) { return setAndGetContent ( name , arr [ 0 ] ) ; } } else if ( arr . length == 2 ) { final Object first = arr [ 0 ] ; final Object second = arr [ 1 ] ; if ( second instanceof Closure ) { final Closure closure = ( Closure ) second ; if ( first instanceof Map ) { Map subMap = new LinkedHashMap ( ) ; subMap . putAll ( ( Map ) first ) ; subMap . putAll ( JsonDelegate . cloneDelegateAndGetContent ( closure ) ) ; return setAndGetContent ( name , subMap ) ; } else if ( first instanceof Iterable ) { List < Map < String , Object > > list = collectContentForEachEntry ( ( Iterable ) first , closure ) ; return setAndGetContent ( name , list ) ; } else if ( first != null && first . getClass ( ) . isArray ( ) ) { final Iterable coll = Arrays . asList ( ( Object [ ] ) first ) ; List < Map < String , Object > > list = collectContentForEachEntry ( coll , closure ) ; return setAndGetContent ( name , list ) ; } } } throw new JsonException ( "Expected no arguments, a single map, a single closure, or a map and closure as arguments." ) ; } else { return setAndGetContent ( name , new HashMap < String , Object > ( ) ) ; }
public class AggregatorImpl { /** * Implementation of eponymous console command . Provided to allow cache priming requests to be * issued via the server console by automation scripts . * @ param requestUrl * the URL to process * @ return the status code as a string * @ throws IOException * @ throws ServletException */ public String processRequestUrl ( String requestUrl ) throws IOException , ServletException { } }
ConsoleHttpServletRequest req = new ConsoleHttpServletRequest ( getServletConfig ( ) . getServletContext ( ) , requestUrl ) ; OutputStream nulOutputStream = new OutputStream ( ) { @ Override public void write ( int b ) throws IOException { } } ; ConsoleHttpServletResponse resp = new ConsoleHttpServletResponse ( nulOutputStream ) ; doGet ( req , resp ) ; return Integer . toString ( resp . getStatus ( ) ) ;
public class WReportPdf { /** * < p > Delegate request and servlet output stream to file - reporter handler . < p > * @ param pReq HttpServletRequest * @ param pResp HttpServletResponse * @ throws ServletException ServletException * @ throws IOException IOException */ public final void doReportPdf ( final HttpServletRequest pReq , final HttpServletResponse pResp ) throws ServletException , IOException { } }
pReq . setCharacterEncoding ( "UTF-8" ) ; try { HashMap < String , Object > reqVars = new HashMap < String , Object > ( ) ; String nameHandler = pReq . getParameter ( "nmHnd" ) ; IHndlFileReportReq srvHandleRequest = ( IHndlFileReportReq ) this . factoryAppBeans . lazyGet ( nameHandler ) ; HttpRequestData requestData = new HttpRequestData ( pReq , pResp ) ; requestData . setAttribute ( "reqVars" , reqVars ) ; IHandlerRequest hndlI18nRequest = ( IHandlerRequest ) this . factoryAppBeans . lazyGet ( "hndlI18nRequest" ) ; hndlI18nRequest . handle ( reqVars , requestData ) ; pResp . setContentType ( "application/pdf" ) ; String fileName = pReq . getParameter ( "fileName" ) ; if ( fileName != null && ! "" . equals ( fileName ) ) { pResp . setHeader ( "Content-Disposition" , "attachment; filename=" + fileName + ".pdf" ) ; } srvHandleRequest . handle ( reqVars , requestData , pResp . getOutputStream ( ) ) ; } catch ( Exception e ) { if ( factoryAppBeans != null ) { ILog logger = null ; try { logger = ( ILog ) factoryAppBeans . lazyGet ( "ILog" ) ; } catch ( Exception e1 ) { e1 . printStackTrace ( ) ; } if ( logger != null ) { logger . error ( null , getClass ( ) , "WORK" , e ) ; } else { e . printStackTrace ( ) ; } } else { e . printStackTrace ( ) ; } if ( e instanceof ExceptionWithCode ) { pReq . setAttribute ( "error_code" , ( ( ExceptionWithCode ) e ) . getCode ( ) ) ; pReq . setAttribute ( "short_message" , ( ( ExceptionWithCode ) e ) . getShortMessage ( ) ) ; } else { pReq . setAttribute ( "error_code" , HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; } pReq . setAttribute ( "javax.servlet.error.status_code" , HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; pReq . setAttribute ( "javax.servlet.error.exception" , e ) ; pReq . setAttribute ( "javax.servlet.error.request_uri" , pReq . getRequestURI ( ) ) ; pReq . setAttribute ( "javax.servlet.error.servlet_name" , this . getClass ( ) . getCanonicalName ( ) ) ; pResp . sendError ( HttpServletResponse . SC_INTERNAL_SERVER_ERROR ) ; }
public class PowerAdapter { /** * Returns a new adapter that presents all items of this adapter , up until the specified limit . * @ param limit The item limit . * @ return A new adapter . */ @ CheckResult @ NonNull public final PowerAdapter limit ( int limit ) { } }
if ( limit == Integer . MAX_VALUE ) { return this ; } if ( limit <= 0 ) { return EMPTY ; } return new LimitAdapter ( this , limit ) ;
public class GeometryRendererImpl { public void onCoordinateSnapAttempt ( CoordinateSnapEvent event ) { } }
if ( editingService . getEditingState ( ) == GeometryEditState . INSERTING ) { String identifier = baseName + "." + editingService . getIndexService ( ) . format ( editingService . getInsertIndex ( ) ) ; Object parentGroup = groups . get ( identifier . substring ( 0 , identifier . lastIndexOf ( '.' ) ) + ".vertices" ) ; Coordinate temp = event . getTo ( ) ; Coordinate coordinate = mapWidget . getMapModel ( ) . getMapView ( ) . getWorldViewTransformer ( ) . worldToPan ( temp ) ; addShapeToGraphicsContext ( mapWidget . getVectorContext ( ) , parentGroup , identifier , coordinate , event . hasSnapped ( ) ? styleService . getVertexSnappedStyle ( ) : new ShapeStyle ( ) ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link String } { @ code > } } */ @ XmlElementDecl ( namespace = "http://belframework.org/schema/1.0/xbel" , name = "copyright" ) public JAXBElement < String > createCopyright ( String value ) { } }
return new JAXBElement < String > ( _Copyright_QNAME , String . class , null , value ) ;
public class DescribeNetworkAclsRequest { /** * One or more network ACL IDs . * Default : Describes all your network ACLs . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setNetworkAclIds ( java . util . Collection ) } or { @ link # withNetworkAclIds ( java . util . Collection ) } if you want * to override the existing values . * @ param networkAclIds * One or more network ACL IDs . < / p > * Default : Describes all your network ACLs . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeNetworkAclsRequest withNetworkAclIds ( String ... networkAclIds ) { } }
if ( this . networkAclIds == null ) { setNetworkAclIds ( new com . amazonaws . internal . SdkInternalList < String > ( networkAclIds . length ) ) ; } for ( String ele : networkAclIds ) { this . networkAclIds . add ( ele ) ; } return this ;
public class GroupHierarchyConfig { /** * Set sub items * @ param subitems items , that specified for group * @ return current { @ link GroupHierarchyConfig } instance */ public GroupHierarchyConfig subitems ( InfrastructureItem ... subitems ) { } }
checkNotNull ( subitems , "List of server configs must be not a null" ) ; this . subitems . addAll ( asList ( subitems ) ) ; return this ;
public class LogFileHeader { /** * Determines if the supplied magic number is a valid log file header magic number * as stored in MAGIC _ NUMBER * @ param magicNumberBuffer The buffer containing the magic number tio compare * @ return boolean true if the headers match , otherwise false */ private boolean validMagicNumber ( byte [ ] magicNumberBuffer ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "validMagicNumber" , new java . lang . Object [ ] { RLSUtils . toHexString ( magicNumberBuffer , RLSUtils . MAX_DISPLAY_BYTES ) , this } ) ; boolean incorrectByteDetected = false ; int currentByte = 0 ; while ( ( ! incorrectByteDetected ) && ( currentByte < LogFileHeader . MAGIC_NUMBER . length ) ) { if ( magicNumberBuffer [ currentByte ] != LogFileHeader . MAGIC_NUMBER [ currentByte ] ) { incorrectByteDetected = true ; } currentByte ++ ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "validMagicNumber" , new Boolean ( ! incorrectByteDetected ) ) ; return ! incorrectByteDetected ;
public class Remove { /** * remove ( input , string ) * remove a substring */ @ Override public Object apply ( Object value , Object ... params ) { } }
String original = super . asString ( value ) ; Object needle = super . get ( 0 , params ) ; if ( needle == null ) { throw new RuntimeException ( "invalid pattern: " + needle ) ; } return original . replace ( String . valueOf ( needle ) , "" ) ;
public class GobblinMetrics { /** * Add a { @ link Tag } to a { @ link Properties } with key { @ link # METRICS _ STATE _ CUSTOM _ TAGS } . * Also see { @ link # addCustomTagToState ( State , Tag ) } * The { @ link Properties } passed can be used to build a { @ link State } . * { @ link org . apache . gobblin . metrics . Tag } s under this key can later be parsed using the method { @ link # getCustomTagsFromState } . * @ param properties { @ link Properties } to add the tag to . * @ param tag { @ link Tag } to add . */ public static void addCustomTagToProperties ( Properties properties , Tag < ? > tag ) { } }
// Build a state wrapper to add custom tag to property State state = new State ( properties ) ; addCustomTagToState ( state , tag ) ;
public class DefaultInputResolver { /** * A very simple utility expansion method used generally when the * only way to resolve an entity is via passed resolver ; and where * failing to resolve it is not fatal . */ public static WstxInputSource resolveEntityUsing ( WstxInputSource refCtxt , String entityName , String publicId , String systemId , XMLResolver resolver , ReaderConfig cfg , int xmlVersion ) throws IOException , XMLStreamException { } }
URL ctxt = ( refCtxt == null ) ? null : refCtxt . getSource ( ) ; if ( ctxt == null ) { ctxt = URLUtil . urlFromCurrentDir ( ) ; } Object source = resolver . resolveEntity ( publicId , systemId , ctxt . toExternalForm ( ) , entityName ) ; return ( source == null ) ? null : sourceFrom ( refCtxt , cfg , entityName , xmlVersion , source ) ;
public class PersonDictionary { /** * 模式匹配 * @ param nrList 确定的标注序列 * @ param vertexList 原始的未加角色标注的序列 * @ param wordNetOptimum 待优化的图 * @ param wordNetAll 全词图 */ public static void parsePattern ( List < NR > nrList , List < Vertex > vertexList , final WordNet wordNetOptimum , final WordNet wordNetAll ) { } }
// 拆分UV ListIterator < Vertex > listIterator = vertexList . listIterator ( ) ; StringBuilder sbPattern = new StringBuilder ( nrList . size ( ) ) ; NR preNR = NR . A ; boolean backUp = false ; int index = 0 ; for ( NR nr : nrList ) { ++ index ; Vertex current = listIterator . next ( ) ; // logger . trace ( " { } / { } " , current . realWord , nr ) ; switch ( nr ) { case U : if ( ! backUp ) { vertexList = new ArrayList < Vertex > ( vertexList ) ; listIterator = vertexList . listIterator ( index ) ; backUp = true ; } sbPattern . append ( NR . K . toString ( ) ) ; sbPattern . append ( NR . B . toString ( ) ) ; preNR = B ; listIterator . previous ( ) ; String nowK = current . realWord . substring ( 0 , current . realWord . length ( ) - 1 ) ; String nowB = current . realWord . substring ( current . realWord . length ( ) - 1 ) ; listIterator . set ( new Vertex ( nowK ) ) ; listIterator . next ( ) ; listIterator . add ( new Vertex ( nowB ) ) ; continue ; case V : if ( ! backUp ) { vertexList = new ArrayList < Vertex > ( vertexList ) ; listIterator = vertexList . listIterator ( index ) ; backUp = true ; } if ( preNR == B ) { sbPattern . append ( NR . E . toString ( ) ) ; // BE } else { sbPattern . append ( NR . D . toString ( ) ) ; // CD } sbPattern . append ( NR . L . toString ( ) ) ; // 对串也做一些修改 listIterator . previous ( ) ; String EorD = current . realWord . substring ( 0 , 1 ) ; String L = current . realWord . substring ( 1 , current . realWord . length ( ) ) ; listIterator . set ( new Vertex ( EorD ) ) ; listIterator . next ( ) ; listIterator . add ( new Vertex ( L ) ) ; continue ; default : sbPattern . append ( nr . toString ( ) ) ; break ; } preNR = nr ; } String pattern = sbPattern . toString ( ) ; // logger . trace ( " 模式串 : { } " , pattern ) ; // logger . trace ( " 对应串 : { } " , vertexList ) ; // if ( pattern . length ( ) ! = vertexList . size ( ) ) // logger . warn ( " 人名识别模式串有bug " , pattern , vertexList ) ; // return ; final Vertex [ ] wordArray = vertexList . toArray ( new Vertex [ 0 ] ) ; final int [ ] offsetArray = new int [ wordArray . length ] ; offsetArray [ 0 ] = 0 ; for ( int i = 1 ; i < wordArray . length ; ++ i ) { offsetArray [ i ] = offsetArray [ i - 1 ] + wordArray [ i - 1 ] . realWord . length ( ) ; } trie . parseText ( pattern , new AhoCorasickDoubleArrayTrie . IHit < NRPattern > ( ) { @ Override public void hit ( int begin , int end , NRPattern value ) { // logger . trace ( " 匹配到 : { } " , keyword ) ; StringBuilder sbName = new StringBuilder ( ) ; for ( int i = begin ; i < end ; ++ i ) { sbName . append ( wordArray [ i ] . realWord ) ; } String name = sbName . toString ( ) ; // logger . trace ( " 识别出 : { } " , name ) ; // 对一些bad case做出调整 switch ( value ) { case BCD : if ( name . charAt ( 0 ) == name . charAt ( 2 ) ) return ; // 姓和最后一个名不可能相等的 // String cd = name . substring ( 1 ) ; // if ( CoreDictionary . contains ( cd ) ) // EnumItem < NR > item = PersonDictionary . dictionary . get ( cd ) ; // if ( item = = null | | ! item . containsLabel ( Z ) ) return ; / / 三字名字但是后两个字不在词典中 , 有很大可能性是误命中 break ; } if ( isBadCase ( name ) ) return ; // 正式算它是一个名字 if ( HanLP . Config . DEBUG ) { System . out . printf ( "识别出人名:%s %s\n" , name , value ) ; } int offset = offsetArray [ begin ] ; wordNetOptimum . insert ( offset , new Vertex ( Predefine . TAG_PEOPLE , name , ATTRIBUTE , WORD_ID ) , wordNetAll ) ; } } ) ;
public class SS { /** * Returns a < code > SetAction < / code > object used for building update * expression . If the attribute referred to by this path operand doesn ' t * exist , the returned object represents adding the specified value as an * attribute to an item . If the attribute referred to by this path operand * already exists , the returned object represents the value replacement of * the current attribute by the specified value . */ public final SetAction set ( String ... values ) { } }
return new SetAction ( this , new LiteralOperand ( new LinkedHashSet < String > ( Arrays . asList ( values ) ) ) ) ;
public class OrcParser { /** * This method writes a column of H2O frame for Orc File column types of string , varchar , char and * binary at some point . * @ param col * @ param cIdx * @ param rowNumber * @ param dout */ private void writeStringcolumn ( BytesColumnVector col , int cIdx , int rowNumber , ParseWriter dout ) { } }
BufferedString bs = new BufferedString ( ) ; if ( col . isRepeating ) { assert col . length [ 0 ] >= 0 : getClass ( ) . getSimpleName ( ) + ".writeStringcolumn/1: col.length[0]=" + col . length [ 0 ] + ", col.start[0]=" + col . start [ 0 ] ; dout . addStrCol ( cIdx , bs . set ( col . vector [ 0 ] , col . start [ 0 ] , col . length [ 0 ] ) ) ; for ( int rowIndex = 1 ; rowIndex < rowNumber ; ++ rowIndex ) dout . addStrCol ( cIdx , bs ) ; } else if ( col . noNulls ) { for ( int rowIndex = 0 ; rowIndex < rowNumber ; rowIndex ++ ) { int l = col . length [ rowIndex ] ; assert l >= 0 : getClass ( ) . getSimpleName ( ) + ".writeStringcolumn/2: col.col.length[rowIndex]=" + l + ", rowIndex=" + rowIndex ; dout . addStrCol ( cIdx , bs . set ( col . vector [ rowIndex ] , col . start [ rowIndex ] , l ) ) ; } } else { boolean [ ] isNull = col . isNull ; for ( int rowIndex = 0 ; rowIndex < rowNumber ; rowIndex ++ ) { if ( isNull [ rowIndex ] ) dout . addInvalidCol ( cIdx ) ; else { int l = col . length [ rowIndex ] ; assert l >= 0 : getClass ( ) . getSimpleName ( ) + ".writeStringcolumn/3: col.col.length[rowIndex]=" + l + ", rowIndex=" + rowIndex ; dout . addStrCol ( cIdx , bs . set ( col . vector [ rowIndex ] , col . start [ rowIndex ] , col . length [ rowIndex ] ) ) ; } } }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link MpaddedType } { @ code > } } */ @ XmlElementDecl ( namespace = "http://www.w3.org/1998/Math/MathML" , name = "mpadded" ) public JAXBElement < MpaddedType > createMpadded ( MpaddedType value ) { } }
return new JAXBElement < MpaddedType > ( _Mpadded_QNAME , MpaddedType . class , null , value ) ;
public class GraphService { /** * Removes the specified type from the frame . */ public static < T extends WindupVertexFrame > WindupVertexFrame removeTypeFromModel ( GraphContext graphContext , WindupVertexFrame frame , Class < T > type ) { } }
Vertex vertex = frame . getElement ( ) ; graphContext . getGraphTypeManager ( ) . removeTypeFromElement ( type , vertex ) ; return graphContext . getFramed ( ) . frameElement ( vertex , WindupVertexFrame . class ) ;
public class DefaultLightblueDataResponse { /** * Returns a result metadata array where each element corresponds * to the metadata for the result at the same index in processed * array . */ @ Override public ResultMetadata [ ] getResultMetadata ( ) throws LightblueParseException { } }
JsonNode node = getJson ( ) . get ( "resultMetadata" ) ; if ( node instanceof ArrayNode ) { try { return getMapper ( ) . readValue ( node . traverse ( ) , ResultMetadata [ ] . class ) ; } catch ( IOException e ) { throw new LightblueParseException ( "Error parsing lightblue response:" + getText ( ) + "\n" , e ) ; } } else { return null ; }
public class StatisticsMatrix { /** * Wraps a StatisticsMatrix around ' m ' . Does NOT create a copy of ' m ' but saves a reference * to it . */ public static StatisticsMatrix wrap ( DMatrixRMaj m ) { } }
StatisticsMatrix ret = new StatisticsMatrix ( ) ; ret . setMatrix ( m ) ; return ret ;
public class HpelHelper { /** * Converts provided thread id into eight character hexadecimal string * @ param threadId id to convert * @ return String representation of the thread id */ public static String threadIdToString ( int threadId ) { } }
StringBuffer buffer = new StringBuffer ( 8 ) ; // pad the HexString ThreadId so that it is always 8 characters long . for ( int shift = 7 ; shift >= 0 ; shift -- ) { buffer . append ( hexChars [ ( threadId >> ( shift << 2 ) ) & 0xF ] ) ; } return buffer . toString ( ) ;
public class ObjectType { /** * Returns a lazy , dynamic { @ link Iterable } for the types forming the implicit prototype chain of * this type . * < p > The chain is iterated bottom to top ; from the nearest ancestor to the most distant . * Iteration stops when the next ancestor would be a { @ code null } reference . * < p > The created { @ link Iterator } s will not reflect changes to the prototype chain of elements it * has already iterated past , but will reflect those of upcoming elements . Neither the { @ link * Iterable } nor its { @ link Iterator } support mutation . */ public final Iterable < ObjectType > getImplicitPrototypeChain ( ) { } }
final ObjectType self = this ; return ( ) -> new AbstractIterator < ObjectType > ( ) { private ObjectType next = self ; // We increment past this type before first access . @ Override public ObjectType computeNext ( ) { next = next . getImplicitPrototype ( ) ; return ( next != null ) ? next : endOfData ( ) ; } } ;
public class IconicsDrawable { /** * Loads and draws given text * @ return The current IconicsDrawable for chaining . */ @ NonNull public IconicsDrawable iconText ( @ NonNull String icon , @ Nullable Typeface typeface ) { } }
mPlainIcon = icon ; mIcon = null ; mIconBrush . getPaint ( ) . setTypeface ( typeface == null ? Typeface . DEFAULT : typeface ) ; invalidateSelf ( ) ; return this ;
public class AvroSerializer { @ Override public T copy ( T from ) { } }
if ( CONCURRENT_ACCESS_CHECK ) { enterExclusiveThread ( ) ; } try { checkAvroInitialized ( ) ; return avroData . deepCopy ( runtimeSchema , from ) ; } finally { if ( CONCURRENT_ACCESS_CHECK ) { exitExclusiveThread ( ) ; } }
public class CProductPersistenceImpl { /** * Returns a range of all the c products . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CProductModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of c products * @ param end the upper bound of the range of c products ( not inclusive ) * @ return the range of c products */ @ Override public List < CProduct > findAll ( int start , int end ) { } }
return findAll ( start , end , null ) ;
public class EGRImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . EGR__GDO_NAME : return getGdoName ( ) ; case AfplibPackage . EGR__TRIPLETS : return getTriplets ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class IdListProvider { /** * 保证 idList 不能为空 * @ param list * @ param errorMsg */ public static void notEmpty ( List < ? > list , String errorMsg ) { } }
if ( list == null || list . size ( ) == 0 ) { throw new MapperException ( errorMsg ) ; }
public class CmsXmlUtils { /** * Helper to unmarshal ( read ) xml contents from an input source into a document . < p > * Using this method ensures that the OpenCms XML entity resolver is used . < p > * Important : The encoding provided will NOT be used during unmarshalling , * the XML parser will do this on the base of the information in the source String . * The encoding is used for initializing the created instance of the document , * which means it will be used when marshalling the document again later . < p > * @ param source the XML input source to use * @ param resolver the XML entity resolver to use * @ param validate if the reader should try to validate the xml code * @ return the unmarshalled XML document * @ throws CmsXmlException if something goes wrong */ public static Document unmarshalHelper ( InputSource source , EntityResolver resolver , boolean validate ) throws CmsXmlException { } }
if ( null == source ) { throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_UNMARSHALLING_XML_DOC_1 , "source==null!" ) ) ; } try { SAXReader reader = new SAXReader ( ) ; if ( resolver != null ) { reader . setEntityResolver ( resolver ) ; } reader . setMergeAdjacentText ( true ) ; reader . setStripWhitespaceText ( true ) ; if ( ! validate ) { reader . setValidation ( false ) ; reader . setFeature ( "http://apache.org/xml/features/nonvalidating/load-external-dtd" , false ) ; } else { reader . setValidation ( true ) ; } return reader . read ( source ) ; } catch ( DocumentException e ) { String systemId = source != null ? source . getSystemId ( ) : "???" ; throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_UNMARSHALLING_XML_DOC_1 , "(systemId = " + systemId + ")" ) , e ) ; } catch ( SAXException e ) { String systemId = source != null ? source . getSystemId ( ) : "???" ; throw new CmsXmlException ( Messages . get ( ) . container ( Messages . ERR_UNMARSHALLING_XML_DOC_1 , "(systemId = " + systemId + ")" ) , e ) ; }
public class DebugAndFilterModule { /** * Just for the overflowing files . * @ param overflowingFile overflowingFile * @ return relative system path to out which ends in { @ link java . io . File # separator File . separator } */ private static String getRelativePathFromOut ( final File overflowingFile , final Job job ) { } }
final URI relativePath = URLUtils . getRelativePath ( job . getInputFile ( ) , overflowingFile . toURI ( ) ) ; final File outputDir = job . getOutputDir ( ) . getAbsoluteFile ( ) ; final File outputPathName = new File ( outputDir , "index.html" ) ; final File finalOutFilePathName = resolve ( outputDir , relativePath . getPath ( ) ) ; final File finalRelativePathName = FileUtils . getRelativePath ( finalOutFilePathName , outputPathName ) ; File parentDir = finalRelativePathName . getParentFile ( ) ; if ( parentDir == null || parentDir . getPath ( ) . isEmpty ( ) ) { parentDir = new File ( "." ) ; } return parentDir . getPath ( ) + File . separator ;
public class RequestServer { /** * Log the request ( unless it ' s an overly common one ) . * @ return flag whether the request parameters might be sensitive or not */ private static boolean maybeLogRequest ( RequestUri uri , Properties header , Properties parms ) { } }
LogFilterLevel level = LogFilterLevel . LOG ; for ( HttpLogFilter f : _filters ) level = level . reduce ( f . filter ( uri , header , parms ) ) ; switch ( level ) { case DO_NOT_LOG : return false ; // do not log the request by default but allow parameters to be logged on exceptional completion case URL_ONLY : Log . info ( uri , ", parms: <hidden>" ) ; return true ; // parameters are sensitive - never log them default : Log . info ( uri + ", parms: " + parms ) ; return false ; }
public class Bean { /** * Add a value to a property on this bean . * @ param propertyName name of the property as defined by the bean ' s schema . * @ param value final String representations of the property that conforms to * its type as defined by the bean ' s schema . */ public void addProperty ( final String propertyName , final String value ) { } }
Preconditions . checkNotNull ( propertyName ) ; Preconditions . checkNotNull ( value ) ; List < String > values = properties . get ( propertyName ) ; if ( values == null ) { values = new ArrayList < > ( ) ; values . add ( value ) ; properties . put ( propertyName , values ) ; } else { values . add ( value ) ; }
public class ConsumerDispatcherState { /** * Sets the ready flag . * @ param ready The ready to set */ public void setReady ( boolean ready ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setReady" , Boolean . valueOf ( ready ) ) ; this . ready = ready ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setReady" ) ;
public class Role { /** * Returns a new role given its string value . * < p > If the value contains no slash character ( { @ code ' / ' } ) , the prefix { @ code " roles / " " } is * prepended . This slightly simplifies usage for < a * href = " https : / / cloud . google . com / iam / docs / understanding - roles " > predefined roles < / a > . For < a * href = " https : / / cloud . google . com / iam / docs / creating - custom - roles " > custom roles < / a > , call this * method with the fully - qualified name , eg { @ code " projects / XXX / roles / YYY " } . * @ param value the string value for the role * @ see < a href = " https : / / cloud . google . com / iam / docs / viewing - grantable - roles " > Viewing the Grantable * Roles on Resources < / a > */ public static Role of ( String value ) { } }
checkNotNull ( value ) ; if ( ! value . contains ( "/" ) ) { value = ROLE_PREFIX + value ; } return new Role ( value ) ;
public class TargetSslProxyClient { /** * Returns the specified TargetSslProxy resource . Gets a list of available target SSL proxies by * making a list ( ) request . * < p > Sample code : * < pre > < code > * try ( TargetSslProxyClient targetSslProxyClient = TargetSslProxyClient . create ( ) ) { * ProjectGlobalTargetSslProxyName targetSslProxy = ProjectGlobalTargetSslProxyName . of ( " [ PROJECT ] " , " [ TARGET _ SSL _ PROXY ] " ) ; * TargetSslProxy response = targetSslProxyClient . getTargetSslProxy ( targetSslProxy ) ; * < / code > < / pre > * @ param targetSslProxy Name of the TargetSslProxy resource to return . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final TargetSslProxy getTargetSslProxy ( ProjectGlobalTargetSslProxyName targetSslProxy ) { } }
GetTargetSslProxyHttpRequest request = GetTargetSslProxyHttpRequest . newBuilder ( ) . setTargetSslProxy ( targetSslProxy == null ? null : targetSslProxy . toString ( ) ) . build ( ) ; return getTargetSslProxy ( request ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcLabel ( ) { } }
if ( ifcLabelEClass == null ) { ifcLabelEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 822 ) ; } return ifcLabelEClass ;
public class TypeValMapper { /** * Returns the SqlType value associated with the typeVal argument . Can be slow - er . */ public static SqlType getSqlTypeForTypeVal ( int typeVal ) { } }
// iterate through to save on the extra HashMap since only for errors for ( Map . Entry < SqlType , int [ ] > entry : typeToValMap . entrySet ( ) ) { for ( int val : entry . getValue ( ) ) { if ( val == typeVal ) { return entry . getKey ( ) ; } } } return SqlType . UNKNOWN ;
public class CheckpointTupleForwarder { /** * Checks if check points have been received from all tasks across * all input streams to this component */ private boolean shouldProcessTransaction ( Action action , long txid ) { } }
TransactionRequest request = new TransactionRequest ( action , txid ) ; Integer count ; if ( ( count = transactionRequestCount . get ( request ) ) == null ) { transactionRequestCount . put ( request , 1 ) ; count = 1 ; } else { transactionRequestCount . put ( request , ++ count ) ; } if ( count == checkPointInputTaskCount ) { transactionRequestCount . remove ( request ) ; return true ; } return false ;
public class ManagedAuditLoggerImpl { /** * protected by config ' s audit lock */ private void handleLoggingException ( final Exception e ) { } }
ControllerLogger . MGMT_OP_LOGGER . failedToUpdateAuditLog ( e ) ; if ( ++ failureCount == MAX_FAILURE_COUNT ) { // Continuous failure likely indicates some configuration problem setLoggerStatus ( Status . DISABLED ) ; ControllerLogger . MGMT_OP_LOGGER . disablingLoggingDueToFailures ( failureCount ) ; }