signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class LanguageAlchemyEntity { /** * Set link to Ethnologue containing information on the detected language . * For more information on Ethnologue : @ see < a href = " http : / / www . ethnologue . com / info . asp " > http : / / www . ethnologue . com / info . asp < / a > * @ param ethnologue link to Ethnologue containing information on the detected language */ public void setEthnologue ( String ethnologue ) { } }
if ( ethnologue != null ) { ethnologue = ethnologue . trim ( ) ; } this . ethnologue = ethnologue ;
public class Types { /** * Returns the precedence of the specified operator . Non - operator ' s will * receive - 1 or a GroovyBugError , depending on your preference . */ public static int getPrecedence ( int type , boolean throwIfInvalid ) { } }
switch ( type ) { case LEFT_PARENTHESIS : return 0 ; case EQUAL : case PLUS_EQUAL : case MINUS_EQUAL : case MULTIPLY_EQUAL : case DIVIDE_EQUAL : case INTDIV_EQUAL : case MOD_EQUAL : case POWER_EQUAL : case LOGICAL_OR_EQUAL : case LOGICAL_AND_EQUAL : case LEFT_SHIFT_EQUAL : case RIGHT_SHIFT_EQUAL : case RIGHT_SHIFT_UNSIGNED_EQUAL : case BITWISE_OR_EQUAL : case BITWISE_AND_EQUAL : case BITWISE_XOR_EQUAL : return 5 ; case QUESTION : return 10 ; case LOGICAL_OR : return 15 ; case LOGICAL_AND : return 20 ; case BITWISE_OR : case BITWISE_AND : case BITWISE_XOR : return 22 ; case COMPARE_IDENTICAL : case COMPARE_NOT_IDENTICAL : return 24 ; case COMPARE_NOT_EQUAL : case COMPARE_EQUAL : case COMPARE_LESS_THAN : case COMPARE_LESS_THAN_EQUAL : case COMPARE_GREATER_THAN : case COMPARE_GREATER_THAN_EQUAL : case COMPARE_TO : case FIND_REGEX : case MATCH_REGEX : case KEYWORD_INSTANCEOF : return 25 ; case DOT_DOT : case DOT_DOT_DOT : return 30 ; case LEFT_SHIFT : case RIGHT_SHIFT : case RIGHT_SHIFT_UNSIGNED : return 35 ; case PLUS : case MINUS : return 40 ; case MULTIPLY : case DIVIDE : case INTDIV : case MOD : return 45 ; case NOT : case REGEX_PATTERN : return 50 ; case SYNTH_CAST : return 55 ; case PLUS_PLUS : case MINUS_MINUS : case PREFIX_PLUS_PLUS : case PREFIX_MINUS_MINUS : case POSTFIX_PLUS_PLUS : case POSTFIX_MINUS_MINUS : return 65 ; case PREFIX_PLUS : case PREFIX_MINUS : return 70 ; case POWER : return 72 ; case SYNTH_METHOD : case LEFT_SQUARE_BRACKET : return 75 ; case DOT : case NAVIGATE : return 80 ; case KEYWORD_NEW : return 85 ; } if ( throwIfInvalid ) { throw new GroovyBugError ( "precedence requested for non-operator" ) ; } return - 1 ;
public class WebhooksInner { /** * Lists all the webhooks for the specified container registry . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; WebhookInner & gt ; object */ public Observable < Page < WebhookInner > > listNextAsync ( final String nextPageLink ) { } }
return listNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < WebhookInner > > , Page < WebhookInner > > ( ) { @ Override public Page < WebhookInner > call ( ServiceResponse < Page < WebhookInner > > response ) { return response . body ( ) ; } } ) ;
public class AbstractWALDAO { /** * This method may only be triggered with valid WAL filenames , as the passed * file is deleted ! */ final void _deleteWALFileAfterProcessing ( @ Nonnull @ Nonempty final String sWALFilename ) { } }
ValueEnforcer . notEmpty ( sWALFilename , "WALFilename" ) ; final File aWALFile = m_aIO . getFile ( sWALFilename ) ; if ( FileOperationManager . INSTANCE . deleteFile ( aWALFile ) . isFailure ( ) ) { if ( LOGGER . isErrorEnabled ( ) ) LOGGER . error ( "Failed to delete WAL file '" + aWALFile . getAbsolutePath ( ) + "'" ) ; } else { if ( ! isSilentMode ( ) ) if ( LOGGER . isInfoEnabled ( ) ) LOGGER . info ( "Deleted successfully imported WAL file '" + aWALFile . getAbsolutePath ( ) + "'" ) ; }
public class LargeBlockManager { /** * On startup , clear out the large query swap directory . * @ throws IOException */ private void startupInstance ( ) throws IOException { } }
assert ( m_blockPathMap . isEmpty ( ) ) ; try { clearSwapDir ( ) ; } catch ( Exception e ) { throw new IOException ( "Unable to clear large query swap directory: " + e . getMessage ( ) ) ; }
public class MonetizationApi { /** * Get devicetype & # 39 ; s pricing tiers . * Get devicetype & # 39 ; s pricing tiers . * @ param dtid DeviceType ID ( required ) * @ param version Version ( required ) * @ return ApiResponse & lt ; DeviceTypePricingTiersEnvelope & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < DeviceTypePricingTiersEnvelope > getThePricingTiersWithHttpInfo ( String dtid , Integer version ) throws ApiException { } }
com . squareup . okhttp . Call call = getThePricingTiersValidateBeforeCall ( dtid , version , null , null ) ; Type localVarReturnType = new TypeToken < DeviceTypePricingTiersEnvelope > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class DelayQueue { /** * Retrieves and removes the head of this queue , waiting if necessary * until an element with an expired delay is available on this queue . * @ return the head of this queue * @ throws InterruptedException { @ inheritDoc } */ public E take ( ) throws InterruptedException { } }
final ReentrantLock lock = this . lock ; lock . lockInterruptibly ( ) ; try { for ( ; ; ) { E first = q . peek ( ) ; if ( first == null ) available . await ( ) ; else { long delay = first . getDelay ( NANOSECONDS ) ; if ( delay <= 0L ) return q . poll ( ) ; first = null ; // don ' t retain ref while waiting if ( leader != null ) available . await ( ) ; else { Thread thisThread = Thread . currentThread ( ) ; leader = thisThread ; try { available . awaitNanos ( delay ) ; } finally { if ( leader == thisThread ) leader = null ; } } } } } finally { if ( leader == null && q . peek ( ) != null ) available . signal ( ) ; lock . unlock ( ) ; }
public class AuthHandler { /** * Check if the authentication process succeeded or failed based on the response status . */ private void checkIsAuthed ( final ChannelHandlerContext ctx , final ResponseStatus status ) { } }
if ( status . isSuccess ( ) ) { LOGGER . debug ( "Successfully authenticated against node {}" , ctx . channel ( ) . remoteAddress ( ) ) ; ctx . pipeline ( ) . remove ( this ) ; originalPromise ( ) . setSuccess ( ) ; ctx . fireChannelActive ( ) ; } else if ( status == AUTH_ERROR ) { originalPromise ( ) . setFailure ( new AuthenticationException ( "SASL Authentication Failure" ) ) ; } else { originalPromise ( ) . setFailure ( new AuthenticationException ( "Unhandled SASL auth status: " + status ) ) ; }
public class AuthenticationAPIClient { /** * Requests new Credentials using a valid Refresh Token . The received token will have the same audience and scope as first requested . How the new Credentials are requested depends on the { @ link Auth0 # isOIDCConformant ( ) } flag . * - If the instance is OIDC Conformant the endpoint will be / oauth / token with ' refresh _ token ' grant , and the response will include an id _ token and an access _ token if ' openid ' scope was requested when the refresh _ token was obtained . * - If the instance is not OIDC Conformant the endpoint will be / delegation with ' urn : ietf : params : oauth : grant - type : jwt - bearer ' grant , and the response will include an id _ token . * Example usage : * < pre > * { @ code * client . renewAuth ( " { refresh _ token } " ) * . addParameter ( " scope " , " openid profile email " ) * . start ( new BaseCallback < Credentials > ( ) { * { @ literal } Override * public void onSuccess ( Credentials payload ) { } * { @ literal } @ Override * public void onFailure ( AuthenticationException error ) { } * < / pre > * @ param refreshToken used to fetch the new Credentials . * @ return a request to start */ @ SuppressWarnings ( "WeakerAccess" ) public ParameterizableRequest < Credentials , AuthenticationException > renewAuth ( @ NonNull String refreshToken ) { } }
final Map < String , Object > parameters = ParameterBuilder . newBuilder ( ) . setClientId ( getClientId ( ) ) . setRefreshToken ( refreshToken ) . setGrantType ( auth0 . isOIDCConformant ( ) ? ParameterBuilder . GRANT_TYPE_REFRESH_TOKEN : ParameterBuilder . GRANT_TYPE_JWT ) . asDictionary ( ) ; HttpUrl url ; if ( auth0 . isOIDCConformant ( ) ) { url = HttpUrl . parse ( auth0 . getDomainUrl ( ) ) . newBuilder ( ) . addPathSegment ( OAUTH_PATH ) . addPathSegment ( TOKEN_PATH ) . build ( ) ; } else { url = HttpUrl . parse ( auth0 . getDomainUrl ( ) ) . newBuilder ( ) . addPathSegment ( DELEGATION_PATH ) . build ( ) ; } return factory . POST ( url , client , gson , Credentials . class , authErrorBuilder ) . addParameters ( parameters ) ;
public class HBCICallbackIOStreams { /** * Schreiben von Logging - Ausgaben in einen < code > PrintStream < / code > . Diese Methode implementiert die * Logging - Schnittstelle * des { @ link org . kapott . hbci . callback . HBCICallback } - Interfaces < / a > . Die Log - Informationen , * die dieser Methode übergeben werden , werden formatiert auf dem jeweiligen < code > outStream < / code > ausgegeben . * In dem * ausgegebenen String sind in enthalten das Log - Level der Message , ein Zeitstempel im * Format " < code > yyyy . MM . dd HH : mm : ss . SSS < / code > " , die Namen der ThreadGroup und des Threads , aus dem * heraus die Log - Message erzeugt wurde , der Klassenname der Klasse , welche die Log - Ausgabe * erzeugt hat sowie die eigentliche Log - Message */ public void log ( String msg , int level , Date date , StackTraceElement trace ) { } }
String line = createDefaultLogLine ( msg , level , date , trace ) ; getOutStream ( ) . println ( line ) ;
public class WebJarDeployer { /** * An accepted file was deleted . We remove the contained libraries from the list and delete the directory in * which the library was contained . * We can ' t open it to find the contained web jars , * to we need to use the ' source ' we set when the { @ link org . wisdom . resources . FileWebJarLib } instances were created . * @ param file the file */ @ Override public synchronized void onFileDelete ( File file ) { } }
// The file is already deleted , so we can ' t open it . // So we use the source . Set < FileWebJarLib > copy = new LinkedHashSet < > ( libs ) ; List < FileWebJarLib > toRemove = new ArrayList < > ( ) ; for ( FileWebJarLib lib : copy ) { if ( lib . source != null && lib . source . equals ( file . getName ( ) ) ) { // Found , remove it . libs . remove ( lib ) ; toRemove . add ( lib ) ; // Delete the directory FileUtils . deleteQuietly ( lib . root ) ; } } controller . removeWebJarLibs ( toRemove ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcConstructionEquipmentResource ( ) { } }
if ( ifcConstructionEquipmentResourceEClass == null ) { ifcConstructionEquipmentResourceEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 128 ) ; } return ifcConstructionEquipmentResourceEClass ;
public class PrcBeginningInventoryLineCopy { /** * < p > Process entity request . < / p > * @ param pAddParam additional param , e . g . return this line ' s * document in " nextEntity " for farther process * @ param pRequestData Request Data * @ param pEntity Entity to process * @ return Entity processed for farther process or null * @ throws Exception - an exception */ @ Override public final BeginningInventoryLine process ( final Map < String , Object > pAddParam , final BeginningInventoryLine pEntityPb , final IRequestData pRequestData ) throws Exception { } }
BeginningInventoryLine entity = this . prcAccEntityPbCopy . process ( pAddParam , pEntityPb , pRequestData ) ; entity . setItsQuantity ( BigDecimal . ZERO ) ; entity . setItsCost ( BigDecimal . ZERO ) ; entity . setItsTotal ( BigDecimal . ZERO ) ; return entity ;
public class Graph { /** * Performs Difference on the vertex and edge sets of the input graphs * removes common vertices and edges . If a source / target vertex is removed , * its corresponding edge will also be removed * @ param graph the graph to perform difference with * @ return a new graph where the common vertices and edges have been removed */ public Graph < K , VV , EV > difference ( Graph < K , VV , EV > graph ) { } }
DataSet < Vertex < K , VV > > removeVerticesData = graph . getVertices ( ) ; return this . removeVertices ( removeVerticesData ) ;
public class DatabaseDAODefaultImpl { private DbDatum get_obj_property ( Database database , String name , String type , String propname ) throws DevFailed { } }
if ( ! database . isAccess_checked ( ) ) checkAccess ( database ) ; // Format input parameters as string array String [ ] array ; array = new String [ 1 ] ; array [ 0 ] = propname ; DbDatum [ ] data = get_obj_property ( database , name , type , array ) ; return data [ 0 ] ;
public class AbstractCentralDogmaBuilder { /** * Adds the host name ( or IP address ) and the port number of the Central Dogma server . * @ param host the host name or IP address of the Central Dogma server * @ param port the port number of the Central Dogma server */ public final B host ( String host , int port ) { } }
requireNonNull ( host , "host" ) ; checkArgument ( ! host . startsWith ( "group:" ) , "host: %s (must not start with 'group:')" , host ) ; checkArgument ( port >= 1 && port < 65536 , "port: %s (expected: 1 .. 65535)" , port ) ; final InetSocketAddress addr = newEndpoint ( host , port ) ; checkState ( selectedProfile == null , "profile() and host() cannot be used together." ) ; hosts = ImmutableSet . < InetSocketAddress > builder ( ) . addAll ( hosts ) . add ( addr ) . build ( ) ; return self ( ) ;
public class Micronaut { /** * Run the application for the given arguments . * @ param classes The application classes * @ param args The arguments * @ return The { @ link ApplicationContext } */ public static ApplicationContext run ( Class [ ] classes , String ... args ) { } }
return new Micronaut ( ) . classes ( classes ) . args ( args ) . start ( ) ;
public class cmppolicylabel { /** * Use this API to fetch all the cmppolicylabel resources that are configured on netscaler . */ public static cmppolicylabel [ ] get ( nitro_service service ) throws Exception { } }
cmppolicylabel obj = new cmppolicylabel ( ) ; cmppolicylabel [ ] response = ( cmppolicylabel [ ] ) obj . get_resources ( service ) ; return response ;
public class GeoPackageValidate { /** * Check the GeoPackage for the minimum required tables * @ param geoPackage * GeoPackage * @ return true if has minimum tables */ public static boolean hasMinimumTables ( GeoPackageCore geoPackage ) { } }
boolean hasMinimum ; try { hasMinimum = geoPackage . getSpatialReferenceSystemDao ( ) . isTableExists ( ) && geoPackage . getContentsDao ( ) . isTableExists ( ) ; } catch ( SQLException e ) { throw new GeoPackageException ( "Failed to check for required minimum GeoPackage tables. GeoPackage Name: " + geoPackage . getName ( ) ) ; } return hasMinimum ;
public class DateUtils { /** * Checks to see if the string input represents a valid month . * @ param monthString user input representing the month * @ return { @ code true } if the string is a number between " 01 " and " 12 " inclusive , * { @ code false } otherwise */ static boolean isValidMonth ( @ Nullable String monthString ) { } }
if ( monthString == null ) { return false ; } try { int monthInt = Integer . parseInt ( monthString ) ; return monthInt > 0 && monthInt <= 12 ; } catch ( NumberFormatException numEx ) { return false ; }
public class XPath { /** * Upon failure to find element , the default behaviour is to throw an exception . */ public OMElement getElementFrom ( OMNode element , String expression ) throws XmlException { } }
return getElementFrom ( element , namespaces , expression , /* accept failure ? */ false ) ;
public class OperationsApi { /** * Get users . * Get [ CfgPerson ] ( https : / / docs . genesys . com / Documentation / PSDK / latest / ConfigLayerRef / CfgPerson ) objects based on the specified filters . * @ param searchParams an object containing the search parameters . Parameters include ( limit , offset , order , sortBy , filterName , filterParameters , roles , skills , userEnabled , userValid ) . * @ param callback The callback function called when the skills are returned asynchronously . Callback takes one parameter : Map & lt ; String , Object & lt ; results . * @ throws ProvisioningApiException if the call is unsuccessful . */ public void getUsersAsync ( UserSearchParams searchParams , AsyncCallback callback ) throws ProvisioningApiException { } }
getUsersAsync ( searchParams . getLimit ( ) , searchParams . getOffset ( ) , searchParams . getOrder ( ) , searchParams . getSortBy ( ) , searchParams . getFilterName ( ) , searchParams . getFilterParameters ( ) , searchParams . getRoles ( ) , searchParams . getSkills ( ) , searchParams . getUserEnabled ( ) , searchParams . getUserValid ( ) , callback ) ;
public class JSON { /** * Read a reconfiguration plan from a file . * A file ending with ' . gz ' is uncompressed first * @ param f the file to parse * @ return the resulting plan * @ throws IllegalArgumentException if an error occurred while reading the file */ public static ReconfigurationPlan readReconfigurationPlan ( File f ) { } }
try ( Reader in = makeIn ( f ) ) { return readReconfigurationPlan ( in ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( e ) ; }
public class IntentsClient { /** * Returns the list of all intents in the specified agent . * < p > Sample code : * < pre > < code > * try ( IntentsClient intentsClient = IntentsClient . create ( ) ) { * ProjectAgentName parent = ProjectAgentName . of ( " [ PROJECT ] " ) ; * String languageCode = " " ; * for ( Intent element : intentsClient . listIntents ( parent , languageCode ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param parent Required . The agent to list all intents from . Format : ` projects / & lt ; Project * ID & gt ; / agent ` . * @ param languageCode Optional . The language to list training phrases , parameters and rich * messages for . If not specified , the agent ' s default language is used . [ Many * languages ] ( https : / / cloud . google . com / dialogflow - enterprise / docs / reference / language ) are * supported . Note : languages must be enabled in the agent before they can be used . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListIntentsPagedResponse listIntents ( ProjectAgentName parent , String languageCode ) { } }
ListIntentsRequest request = ListIntentsRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . setLanguageCode ( languageCode ) . build ( ) ; return listIntents ( request ) ;
public class AssetVersion { /** * Create a AssetVersionCreator to execute create . * @ param pathServiceSid The service _ sid * @ param pathAssetSid The asset _ sid * @ param path The path * @ param visibility The visibility * @ return AssetVersionCreator capable of executing the create */ public static AssetVersionCreator creator ( final String pathServiceSid , final String pathAssetSid , final String path , final AssetVersion . Visibility visibility ) { } }
return new AssetVersionCreator ( pathServiceSid , pathAssetSid , path , visibility ) ;
public class TransitNetworkSelectionImpl { /** * This method is used in encode . Encodes digits part . This is because * @ param bos - where digits will be encoded * @ return - number of bytes encoded */ public int encodeDigits ( ByteArrayOutputStream bos ) { } }
boolean isOdd = this . oddFlag == _FLAG_ODD ; byte b = 0 ; int count = ( ! isOdd ) ? address . length ( ) : address . length ( ) - 1 ; int bytesCount = 0 ; for ( int i = 0 ; i < count - 1 ; i += 2 ) { String ds1 = address . substring ( i , i + 1 ) ; String ds2 = address . substring ( i + 1 , i + 2 ) ; int d1 = Integer . parseInt ( ds1 , 16 ) ; int d2 = Integer . parseInt ( ds2 , 16 ) ; b = ( byte ) ( d2 << 4 | d1 ) ; bos . write ( b ) ; bytesCount ++ ; } if ( isOdd ) { String ds1 = address . substring ( count , count + 1 ) ; int d = Integer . parseInt ( ds1 ) ; b = ( byte ) ( d & 0x0f ) ; bos . write ( b ) ; bytesCount ++ ; } return bytesCount ;
public class Database { public void delete_device_property ( String name , DbDatum [ ] properties ) throws DevFailed { } }
databaseDAO . delete_device_property ( this , name , properties ) ;
public class LoggingConfiguration { /** * Reconfigure log4j at run - time . * @ param name * - The name of the property that changed * @ param value * - The new value of the property * @ throws FileNotFoundException * @ throws ConfigurationException */ private void reconfigure ( ) throws ConfigurationException , FileNotFoundException { } }
Properties consolidatedProps = getConsolidatedProperties ( ) ; logger . info ( "The root category for log4j.rootCategory now is {}" , consolidatedProps . getProperty ( LOG4J_ROOT_CATEGORY ) ) ; logger . info ( "The root category for log4j.rootLogger now is {}" , consolidatedProps . getProperty ( LOG4J_ROOT_LOGGER ) ) ; // Pause the async appenders so that the appenders are not accessed for ( String originalAppenderName : originalAsyncAppenderNameMap . keySet ( ) ) { MessageBatcher asyncBatcher = BatcherFactory . getBatcher ( AsyncAppender . class . getName ( ) + "." + originalAppenderName ) ; if ( asyncBatcher == null ) { continue ; } asyncBatcher . pause ( ) ; } // Configure log4j using the new set of properties configureLog4j ( consolidatedProps ) ; // Resume all the batchers to continue logging for ( String originalAppenderName : originalAsyncAppenderNameMap . keySet ( ) ) { MessageBatcher asyncBatcher = BatcherFactory . getBatcher ( AsyncAppender . class . getName ( ) + "." + originalAppenderName ) ; if ( asyncBatcher == null ) { continue ; } asyncBatcher . resume ( ) ; }
public class EntityMention { /** * getter for head - gets * @ generated * @ return value of the feature */ public Head getHead ( ) { } }
if ( EntityMention_Type . featOkTst && ( ( EntityMention_Type ) jcasType ) . casFeat_head == null ) jcasType . jcas . throwFeatMissing ( "head" , "de.julielab.jules.types.ace.EntityMention" ) ; return ( Head ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( EntityMention_Type ) jcasType ) . casFeatCode_head ) ) ) ;
public class Span { /** * Returns true if the bounds of the other text do not extend outside the bounds of this text . * @ param other The other text to check if this one encloses * @ return True if the two texts are in the same document and this text encloses the other , False otherwise */ public boolean encloses ( Span other ) { } }
return other != null && other . start ( ) >= this . start ( ) && other . end ( ) < this . end ( ) ;
public class BpmnParse { public void parseDiagramInterchangeElements ( ) { } }
// Multiple BPMNDiagram possible List < Element > diagrams = rootElement . elementsNS ( BPMN_DI_NS , "BPMNDiagram" ) ; if ( ! diagrams . isEmpty ( ) ) { for ( Element diagramElement : diagrams ) { parseBPMNDiagram ( diagramElement ) ; } }
public class LogNormalProcess { /** * This method returns the realization of the process at a certain time index . * @ param timeIndex Time index at which the process should be observed * @ return A vector of process realizations ( on path ) */ public RandomVariableInterface [ ] getProcessValue ( int timeIndex ) { } }
// Thread safe lazy initialization synchronized ( this ) { if ( discreteProcess == null || discreteProcess . length == 0 ) { doPrecalculateProcess ( ) ; } } // Return value of process return discreteProcess [ timeIndex ] ;
public class StringUtils { /** * Strip a String of it ' s ISO control characters . * @ param value The String that should be stripped . * @ return { @ code String } A new String instance with its hexadecimal control characters replaced by a space . Or the * unmodified String if it does not contain any ISO control characters . */ public static String stripControlCharacters ( String rawValue ) { } }
if ( rawValue == null ) { return null ; } String value = replaceEntities ( rawValue ) ; boolean hasControlChars = false ; for ( int i = value . length ( ) - 1 ; i >= 0 ; i -- ) { if ( Character . isISOControl ( value . charAt ( i ) ) ) { hasControlChars = true ; break ; } } if ( ! hasControlChars ) { return value ; } StringBuilder buf = new StringBuilder ( value . length ( ) ) ; int i = 0 ; // Skip initial control characters ( i . e . left trim ) for ( ; i < value . length ( ) ; i ++ ) { if ( ! Character . isISOControl ( value . charAt ( i ) ) ) { break ; } } // Copy non control characters and substitute control characters with // a space . The last control characters are trimmed . boolean suppressingControlChars = false ; for ( ; i < value . length ( ) ; i ++ ) { if ( Character . isISOControl ( value . charAt ( i ) ) ) { suppressingControlChars = true ; continue ; } else { if ( suppressingControlChars ) { suppressingControlChars = false ; buf . append ( ' ' ) ; } buf . append ( value . charAt ( i ) ) ; } } return buf . toString ( ) ;
public class PropertyLoader { /** * Register custom converter for given type . */ public < T > PropertyLoader register ( Converter < T > converter , Class < T > type ) { } }
manager . register ( type , converter ) ; return this ;
public class Clock { /** * * * * * * Style related * * * * * */ @ Override protected Skin createDefaultSkin ( ) { } }
switch ( skinType ) { case YOTA2 : return new ClockSkin ( Clock . this ) ; case LCD : return new LcdClockSkin ( Clock . this ) ; case PEAR : return new PearClockSkin ( Clock . this ) ; case PLAIN : return new PlainClockSkin ( Clock . this ) ; case DB : return new DBClockSkin ( Clock . this ) ; case FAT : return new FatClockSkin ( Clock . this ) ; case ROUND_LCD : return new RoundLcdClockSkin ( Clock . this ) ; case SLIM : return new SlimClockSkin ( Clock . this ) ; case MINIMAL : return new MinimalClockSkin ( Clock . this ) ; case DIGITAL : return new DigitalClockSkin ( Clock . this ) ; case TEXT : return new TextClockSkin ( Clock . this ) ; case DESIGN : return new DesignClockSkin ( Clock . this ) ; case INDUSTRIAL : return new IndustrialClockSkin ( Clock . this ) ; case TILE : return new TileClockSkin ( Clock . this ) ; case DIGI : return new DigitalClockSkin ( Clock . this ) ; case MORPHING : return new MorphingClockSkin ( Clock . this ) ; case CLOCK : default : return new ClockSkin ( Clock . this ) ; }
public class DockerRule { /** * Ensures that data - image exist on docker . * Rebuilds data image if content ( plugin hashes ) doesn ' t match . * @ param forceUpdate rebuild data image . */ public String getDataImage ( boolean forceUpdate , final Map < String , File > pluginFiles , String imageName ) throws IOException , SettingsBuildingException , InterruptedException { } }
String existedDataImage = null ; final List < Image > images = getDockerCli ( ) . listImagesCmd ( ) . withShowAll ( true ) . exec ( ) ; OUTER : for ( Image image : images ) { final String [ ] repoTags = image . getRepoTags ( ) ; if ( nonNull ( repoTags ) ) { for ( String repoTag : repoTags ) { if ( repoTag . equals ( imageName ) ) { existedDataImage = image . getId ( ) ; break OUTER ; } } } } if ( nonNull ( existedDataImage ) && ( forceUpdate || ! isActualDataImage ( pluginFiles , existedDataImage ) ) ) { LOG . info ( "Removing data-image." ) ; // TODO https : / / github . com / docker - java / docker - java / issues / 398 getDockerCli ( ) . removeImageCmd ( existedDataImage ) . withForce ( true ) . exec ( ) ; existedDataImage = null ; } if ( isNull ( existedDataImage ) ) { LOG . debug ( "Preparing plugin files for" ) ; // final Set < Artifact > artifactResults = resolvePluginsFor ( THIS _ PLUGIN ) ; // LOG . debug ( " Resolved plugins : { } " , artifactResults ) ; // / / System . out . println ( artifactResults ) ; // artifactResults . stream ( ) . forEach ( artifact - > // pluginFiles . put ( // / / { @ link hudson . PluginManager . copyBundledPlugin ( ) } // / / artifact . getArtifactId ( ) + " . " + artifact . getExtension ( ) , // artifact . getArtifactId ( ) + " . jpi " , // artifact . getFile ( ) existedDataImage = buildImage ( pluginFiles ) ; } return existedDataImage ;
public class QrUpdate_DDRM { /** * Adjusts the values of the Q and R matrices to take in account the effects of inserting * a row to the ' A ' matrix at the specified location . This operation requires about 6mn + O ( n ) flops . * If Q and / or R does not have enough data elements to grow then an exception is thrown . * The adjustment done is by computing a series of planar Givens rotations that make the adjusted R * matrix upper triangular again . This is then used to modify the Q matrix . * @ param Q The Q matrix which is to be modified , must be big enough to grow . Must be n by n . . Is modified . * @ param R The R matrix which is to be modified , must be big enough to grow . Must be m by n . Is modified . * @ param row The row being inserted . Not modified . * @ param rowIndex Which row index it is to be inserted at . * @ param resizeR Should the number of rows in R be changed ? The additional rows are all zero . */ public void addRow ( DMatrixRMaj Q , DMatrixRMaj R , double [ ] row , int rowIndex , boolean resizeR ) { } }
// memory management and check precoditions setQR ( Q , R , 1 ) ; m_m = m + 1 ; if ( Q . data . length < m_m * m_m ) throw new IllegalArgumentException ( "Q matrix does not have enough data to grow" ) ; if ( resizeR && R . data . length < m_m * n ) throw new IllegalArgumentException ( "R matrix does not have enough data to grow" ) ; if ( resizeR ) R . reshape ( m_m , n , false ) ; U_tran . reshape ( m_m , m_m , false ) ; // apply givens rotation to the first two rows of the augmented R matrix applyFirstGivens ( row ) ; applyLaterGivens ( ) ; // compute new Q matrix updateInsertQ ( rowIndex ) ; // discard the reference since it is no longer needed this . Q = this . R = null ;
public class StreamAPI { /** * Returns an object ( item or status ) as a stream object . This is useful * when a new status has been posted and should be rendered directly in the * stream without reloading the entire stream . * @ param reference * The reference to the item * @ return The stream object */ public StreamObject getStreamObject ( Reference reference ) { } }
return getResourceFactory ( ) . getApiResource ( "/stream/" + reference . toURLFragment ( false ) ) . get ( StreamObject . class ) ;
public class EpollDatagramChannelConfig { /** * Set the SO _ REUSEPORT option on the underlying Channel . This will allow to bind multiple * { @ link EpollSocketChannel } s to the same port and so accept connections with multiple threads . * Be aware this method needs be called before { @ link EpollDatagramChannel # bind ( java . net . SocketAddress ) } to have * any affect . */ public EpollDatagramChannelConfig setReusePort ( boolean reusePort ) { } }
try { ( ( EpollDatagramChannel ) channel ) . socket . setReusePort ( reusePort ) ; return this ; } catch ( IOException e ) { throw new ChannelException ( e ) ; }
public class Journal { /** * Upgrade the local image storage with the given namespace . */ private void doUpgradeImage ( NamespaceInfo nsInfo ) throws IOException { } }
Preconditions . checkState ( nsInfo . getNamespaceID ( ) != 0 , "can't upgrade with uninitialized namespace info: %s" , nsInfo . toColonSeparatedString ( ) ) ; LOG . info ( "Upgrading image " + this . getJournalId ( ) + " with namespace info: (" + nsInfo . toColonSeparatedString ( ) + ")" ) ; // clear the digest for the most recent image , it might change during // upgrade checkpointImageDigests . remove ( mostRecentCheckpointTxid ) ; imageStorage . doUpgrade ( nsInfo ) ;
public class MySQLHelper { /** * Build the final connection string from the base JDBC URL and an optional * set of connection properties . * @ param sJdbcURL * The base JDBC URL . May neither be < code > null < / code > nor empty and * must started with { @ link CJDBC _ MySQL # CONNECTION _ PREFIX } * @ param aConnectionProperties * An map with all connection properties . May be < code > null < / code > . * @ return The final JDBC connection string to be used . Never * < code > null < / code > or empty */ @ Nonnull @ Nonempty public static String buildJDBCString ( @ Nonnull @ Nonempty final String sJdbcURL , @ Nullable final Map < EMySQLConnectionProperty , String > aConnectionProperties ) { } }
ValueEnforcer . notEmpty ( sJdbcURL , "JDBC URL" ) ; ValueEnforcer . isTrue ( sJdbcURL . startsWith ( CJDBC_MySQL . CONNECTION_PREFIX ) , "The JDBC URL '" + sJdbcURL + "' does not seem to be a MySQL connection string!" ) ; // Add the connection properties to the JDBC string final SimpleURL aURL = new SimpleURL ( sJdbcURL ) ; if ( aConnectionProperties != null ) for ( final Map . Entry < EMySQLConnectionProperty , String > aEntry : aConnectionProperties . entrySet ( ) ) aURL . add ( aEntry . getKey ( ) . getName ( ) , aEntry . getValue ( ) ) ; return aURL . getAsStringWithoutEncodedParameters ( ) ;
public class ViewServer { /** * Starts the server . * @ return True if the server was successfully created , or false if it already exists . * @ throws IOException If the server cannot be created . * @ see # stop ( ) * @ see # isRunning ( ) * @ see WindowManagerService # startViewServer ( int ) */ public boolean start ( ) throws IOException { } }
if ( mThread != null ) { return false ; } mThread = new Thread ( this , "Local View Server [port=" + mPort + "]" ) ; mThreadPool = Executors . newFixedThreadPool ( VIEW_SERVER_MAX_CONNECTIONS ) ; mThread . start ( ) ; return true ;
public class AuthleteApiImpl { /** * Call an API with HTTP DELETE method and Service credentials . */ private < TResponse > void callServiceDeleteApi ( String path , Map < String , String > queryParams ) throws AuthleteApiException { } }
callDeleteApi ( mServiceCredentials , path , queryParams ) ;
public class DomainsInner { /** * Get domain name recommendations based on keywords . * Get domain name recommendations based on keywords . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; NameIdentifierInner & gt ; object */ public Observable < Page < NameIdentifierInner > > listRecommendationsNextAsync ( final String nextPageLink ) { } }
return listRecommendationsNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < NameIdentifierInner > > , Page < NameIdentifierInner > > ( ) { @ Override public Page < NameIdentifierInner > call ( ServiceResponse < Page < NameIdentifierInner > > response ) { return response . body ( ) ; } } ) ;
public class PatchVal { /** * and patches . resolve ( ) on LocalDeclaration itself to just - in - time replace the ' val ' vartype with the right one . */ public static TypeBinding skipResolveInitializerIfAlreadyCalled ( Expression expr , BlockScope scope ) { } }
if ( expr . resolvedType != null ) return expr . resolvedType ; try { return expr . resolveType ( scope ) ; } catch ( NullPointerException e ) { return null ; } catch ( ArrayIndexOutOfBoundsException e ) { // This will occur internally due to for example ' val x = mth ( " X " ) ; ' , where mth takes 2 arguments . return null ; }
public class AnimateableDialogDecorator { /** * Creates and returns an animation listener , which allows to hide the animated view once the * animation is finished . * @ param animatedView * The animated view as an instance of the class { @ link View } . The view may not be null * @ param listener * The listener , which should be notified , as an instance of the type { @ link * AnimatorListener } or null , if no listener should be notified * @ return The animation listener , which has been created , as an instance of the type { @ link * AnimatorListener } */ private AnimatorListener createHideAnimationListener ( @ NonNull final View animatedView , @ Nullable final AnimatorListener listener ) { } }
return new AnimatorListener ( ) { @ Override public void onAnimationStart ( final Animator animation ) { if ( listener != null ) { listener . onAnimationStart ( animation ) ; } } @ Override public void onAnimationEnd ( final Animator animation ) { animatedView . setVisibility ( View . GONE ) ; if ( listener != null ) { listener . onAnimationEnd ( animation ) ; } } @ Override public void onAnimationCancel ( final Animator animation ) { if ( listener != null ) { listener . onAnimationCancel ( animation ) ; } } @ Override public void onAnimationRepeat ( final Animator animation ) { if ( listener != null ) { listener . onAnimationRepeat ( animation ) ; } } } ;
public class ObjectInputStreamWithLoader { /** * Use the given ClassLoader rather than using the system class */ protected Class resolveClass ( ObjectStreamClass classDesc ) throws IOException , ClassNotFoundException { } }
String cname = classDesc . getName ( ) ; if ( cname . startsWith ( "[" ) ) { // An array Class component ; // component class int dcount ; // dimension for ( dcount = 1 ; cname . charAt ( dcount ) == '[' ; dcount ++ ) { ; } if ( cname . charAt ( dcount ) == 'L' ) { component = loader . loadClass ( cname . substring ( dcount + 1 , cname . length ( ) - 1 ) ) ; } else { if ( cname . length ( ) != dcount + 1 ) { throw new ClassNotFoundException ( cname ) ; // malformed } component = primitiveType ( cname . charAt ( dcount ) ) ; } int dim [ ] = new int [ dcount ] ; for ( int i = 0 ; i < dcount ; i ++ ) { dim [ i ] = 0 ; } return Array . newInstance ( component , dim ) . getClass ( ) ; } else { return loader . loadClass ( cname ) ; }
public class Expressions { /** * Create a new NumberExpression * @ param expr Expression of type Number * @ return new NumberExpression */ public static < T extends Number & Comparable < ? > > NumberExpression < T > asNumber ( Expression < T > expr ) { } }
Expression < T > underlyingMixin = ExpressionUtils . extract ( expr ) ; if ( underlyingMixin instanceof PathImpl ) { return new NumberPath < T > ( ( PathImpl < T > ) underlyingMixin ) ; } else if ( underlyingMixin instanceof OperationImpl ) { return new NumberOperation < T > ( ( OperationImpl < T > ) underlyingMixin ) ; } else if ( underlyingMixin instanceof TemplateExpressionImpl ) { return new NumberTemplate < T > ( ( TemplateExpressionImpl < T > ) underlyingMixin ) ; } else { return new NumberExpression < T > ( underlyingMixin ) { private static final long serialVersionUID = - 8712299418891960222L ; @ Override public < R , C > R accept ( Visitor < R , C > v , C context ) { return this . mixin . accept ( v , context ) ; } } ; }
public class CollectionHelp { /** * Set Format string and locale for calling thread . List items are formatted * using these . It is good practice to call removeFormat after use . * @ param locale * @ see # removeFormat ( ) * @ see java . lang . String # format ( java . util . Locale , java . lang . String , java . lang . Object . . . ) */ public static final void setFormat ( String format , Locale locale ) { } }
threadFormat . set ( format ) ; threadLocale . set ( locale ) ;
public class Dispatching { /** * Adapts a function to an consumer . * @ param < T > the function parameter type * @ param < R > the function return type * @ param function the function to be adapted * @ return the adapted consumer */ public static < T , R > Consumer < T > consumer ( Function < T , R > function ) { } }
dbc . precondition ( function != null , "cannot adapt a null function" ) ; return function :: apply ;
public class JmsBytesMessageImpl { /** * Write a byte array to the stream message . * @ param value of the byte array to be written . * @ exception MessageNotWriteableException if message in read - only mode . * @ exception JMSException if JMS fails to write message due to * some internal JMS error . */ @ Override public void writeBytes ( byte [ ] value ) throws JMSException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "writeBytes" , value ) ; try { // Check that we are in write mode checkBodyWriteable ( "writeBytes" ) ; // This method has different behaviours for storing the byte array , based on the whether the producer // has promised not to mess with data after it ' s been written . . . if ( producerWontModifyPayloadAfterSet ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Producer has promised not to modify the payload after setting it in the message - check if they've violated that promise" ) ; // The producer has promised not to modify the payload after it ' s been set , so check // the flag to see whether this is the first , or a subsequent call to writeBytes . if ( ! writeByteArrayCalled ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "This is the first call to writeBytes(byte[] value) - storing the byte array reference directly in the underlying MFP object" ) ; writeByteArrayCalled = true ; // Set the data buffer & MFP data references to the value param & reset // the dataStart attribute jsBytesMsg . setBytes ( value ) ; dataBuffer = value ; dataStart = 0 ; } else { // The producer has promised not to modify the payload after it ' s been set , but the producer has // been naughty by calling this method more than once ! Throw exception to admonish the producer . throw ( JMSException ) JmsErrorUtils . newThrowable ( IllegalStateException . class , // JMS illegal state exception "PROMISE_BROKEN_EXCEPTION_CWSIA0511" , // promise broken null , // No inserts null , // no cause - original exception "JmsBytesMessageImpl.writeBytes#3" , // Probe ID this , // Caller ( ? ) tc ) ; // Trace component } } else { // Producer makes no promises relating to the accessing the message payload , so // make a copy of the byte array at this point to ensure the message is transmitted safely . if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( this , tc , "Producer 'payload modification' promise is not in place - make a copy of the byte array" ) ; // Write the byte array to the output stream // We don ' t check for value = = null as JDK1.1.6 DataOutputStream doesn ' t writeStream . write ( value , 0 , value . length ) ; // Ensure that the new data gets exported when the time comes . bodySetInJsMsg = false ; } // Invalidate the cached toString object , because the message payload has changed . cachedBytesToString = null ; } catch ( IOException ex ) { // No FFDC code needed // ( exception repro ' ed from 3 - param writeBytes method ) throw ( JMSException ) JmsErrorUtils . newThrowable ( ResourceAllocationException . class , "WRITE_PROBLEM_CWSIA0186" , null , ex , "JmsBytesMessageImpl.writeBytes#4" , this , tc ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "writeBytes" ) ;
public class ModulePlaybackImpl { /** * { @ inheritDoc } */ public SetSubtitleVodNonDvdOperation buildSetSubtitleVodNonDvdOperation ( String track , String color , int fontSize , int position , String encoding , String timeOffset ) { } }
return new SetSubtitleVodNonDvdOperation ( getOperationFactory ( ) , track , color , fontSize , position , encoding , timeOffset ) ;
public class EncryptionConfigParser { /** * Get the type of keystore to instantiate */ public static String getKeystoreType ( Map < String , Object > parameters ) { } }
String type = ( String ) parameters . get ( ENCRYPTION_KEYSTORE_TYPE_KEY ) ; if ( type == null ) { type = ENCRYPTION_KEYSTORE_TYPE_KEY_DEFAULT ; } return type ;
public class KeyExchange { /** * This method returns a { @ link Cipher } instance , for { @ value # CIPHER _ ALGORITHM } in mode * { @ value # CIPHER _ MODE } , with padding { @ value # CIPHER _ PADDING } . * It then initialises the { @ link Cipher } in either { @ link Cipher # ENCRYPT _ MODE } or * { @ link Cipher # DECRYPT _ MODE } ) , as specified by the mode parameter , with the given * { @ link SecretKey } . * @ param mode One of { @ link Cipher # ENCRYPT _ MODE } or { @ link Cipher # DECRYPT _ MODE } ) . * @ param key Either a { @ link PublicKey } or a { @ link PrivateKey } to be used with the * { @ link Cipher } . * @ return A lazily - instantiated , cached { @ link Cipher } instance . */ private Cipher getCipher ( int mode , Key key ) { } }
if ( cipher == null ) { try { // Get a Cipher instance : cipher = Cipher . getInstance ( cipherName ) ; } catch ( NoSuchAlgorithmException e ) { if ( SecurityProvider . addProvider ( ) ) { cipher = getCipher ( mode , key ) ; } else { throw new IllegalStateException ( "Algorithm unavailable: " + cipherName , e ) ; } } catch ( NoSuchPaddingException e ) { throw new IllegalStateException ( "Padding method unavailable: " + cipherName , e ) ; } } // Initialise the Cipher try { cipher . init ( mode , key ) ; } catch ( InvalidKeyException e ) { throw new IllegalArgumentException ( "Invalid key used to initialise cipher." , e ) ; } return cipher ;
public class JobsInner { /** * Gets a list of Jobs within the specified Experiment . * ServiceResponse < PageImpl < JobInner > > * @ param resourceGroupName Name of the resource group to which the resource belongs . * ServiceResponse < PageImpl < JobInner > > * @ param workspaceName The name of the workspace . Workspace names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * ServiceResponse < PageImpl < JobInner > > * @ param experimentName The name of the experiment . Experiment names can only contain a combination of alphanumeric characters along with dash ( - ) and underscore ( _ ) . The name must be from 1 through 64 characters long . * ServiceResponse < PageImpl < JobInner > > * @ param jobsListByExperimentOptions Additional parameters for the operation * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; JobInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < JobInner > > > listByExperimentSinglePageAsync ( final String resourceGroupName , final String workspaceName , final String experimentName , final JobsListByExperimentOptions jobsListByExperimentOptions ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( workspaceName == null ) { throw new IllegalArgumentException ( "Parameter workspaceName is required and cannot be null." ) ; } if ( experimentName == null ) { throw new IllegalArgumentException ( "Parameter experimentName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( jobsListByExperimentOptions ) ; Integer maxResults = null ; if ( jobsListByExperimentOptions != null ) { maxResults = jobsListByExperimentOptions . maxResults ( ) ; } return service . listByExperiment ( resourceGroupName , workspaceName , experimentName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , maxResults , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < JobInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < JobInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < JobInner > > result = listByExperimentDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < JobInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class GroupApi { /** * Get a list of groups ( As user : my groups , as admin : all groups ) and in the specified page range . * < pre > < code > GitLab Endpoint : GET / groups < / code > < / pre > * @ param page the page to get * @ param perPage the number of Group instances per page * @ return the list of groups viewable by the authenticated userin the specified page range * @ throws GitLabApiException if any exception occurs */ public List < Group > getGroups ( int page , int perPage ) throws GitLabApiException { } }
Response response = get ( Response . Status . OK , getPageQueryParams ( page , perPage ) , "groups" ) ; return ( response . readEntity ( new GenericType < List < Group > > ( ) { } ) ) ;
public class EthereumUtil { /** * Decodes an RLPList from the given ByteBuffer . This list may contain further RLPList and RLPElements that are decoded as well * @ param bb Bytebuffer containing an RLPList * @ return RLPList in case the byte stream represents a valid RLPList , null if not */ private static RLPList decodeRLPList ( ByteBuffer bb ) { } }
byte firstByte = bb . get ( ) ; int firstByteUnsigned = firstByte & 0xFF ; long payloadSize = - 1 ; if ( ( firstByteUnsigned >= 0xc0 ) && ( firstByteUnsigned <= 0xf7 ) ) { // length of the list in bytes int offsetSmallList = 0xc0 & 0xff ; payloadSize = ( long ) ( firstByteUnsigned ) - offsetSmallList ; } else if ( ( firstByteUnsigned >= 0xf8 ) && ( firstByteUnsigned <= 0xff ) ) { // read size of indicator ( size of the size ) int noOfBytesSize = firstByteUnsigned - 0xf7 ; byte [ ] indicator = new byte [ noOfBytesSize + 1 ] ; indicator [ 0 ] = firstByte ; bb . get ( indicator , 1 , noOfBytesSize ) ; // read the size of the data payloadSize = convertIndicatorToRLPSize ( indicator ) ; } else { LOG . error ( "Invalid RLP encoded list detected" ) ; } ArrayList < RLPObject > payloadList = new ArrayList < > ( ) ; if ( payloadSize > 0 ) { byte [ ] payload = new byte [ ( int ) payloadSize ] ; bb . get ( payload ) ; ByteBuffer payloadBB = ByteBuffer . wrap ( payload ) ; while ( payloadBB . remaining ( ) > 0 ) { switch ( EthereumUtil . detectRLPObjectType ( payloadBB ) ) { case EthereumUtil . RLP_OBJECTTYPE_ELEMENT : payloadList . add ( EthereumUtil . decodeRLPElement ( payloadBB ) ) ; break ; case EthereumUtil . RLP_OBJECTTYPE_LIST : payloadList . add ( EthereumUtil . decodeRLPList ( payloadBB ) ) ; break ; default : LOG . error ( "Unknown object type" ) ; } } } return new RLPList ( payloadList ) ;
public class RecordTypeBuilder { /** * Creates a record . Fails if any duplicate property names were added . * @ return The record type . */ public JSType build ( ) { } }
// If we have an empty record , simply return the object type . if ( isEmpty ) { return registry . getNativeObjectType ( JSTypeNative . OBJECT_TYPE ) ; } ImmutableSortedMap . Builder < String , RecordProperty > m = ImmutableSortedMap . naturalOrder ( ) ; m . putAll ( this . properties ) ; return new RecordType ( registry , m . build ( ) , isDeclared ) ;
public class BaseConvertToMessage { /** * Add the error properties from this message . * @ param properties * @ param errorsType */ public void addErrorsProperties ( Map < String , Object > properties , Object errorsType ) { } }
try { Method method = errorsType . getClass ( ) . getMethod ( "getErrors" , EMPTY_PARAMS ) ; if ( method != null ) { Object value = method . invoke ( errorsType , EMPTY_DATA ) ; if ( value instanceof List < ? > ) { for ( Object errorType : ( List < ? > ) value ) { method = errorType . getClass ( ) . getMethod ( "getShortText" , EMPTY_PARAMS ) ; if ( method != null ) { value = method . invoke ( errorType , EMPTY_DATA ) ; if ( value instanceof String ) properties . put ( "Error" , value ) ; } } } } } catch ( SecurityException e ) { e . printStackTrace ( ) ; } catch ( IllegalArgumentException e ) { e . printStackTrace ( ) ; } catch ( NoSuchMethodException e ) { e . printStackTrace ( ) ; } catch ( IllegalAccessException e ) { e . printStackTrace ( ) ; } catch ( InvocationTargetException e ) { e . printStackTrace ( ) ; }
public class SipServletMessageImpl { /** * This method tries to resolve header name - meaning if it is compact - it * returns full name , if its not , it returns passed value . * @ param headerName * @ return */ protected static String getFullHeaderName ( String headerName ) { } }
String fullName = null ; if ( JainSipUtils . HEADER_COMPACT_2_FULL_NAMES_MAPPINGS . containsKey ( headerName ) ) { fullName = JainSipUtils . HEADER_COMPACT_2_FULL_NAMES_MAPPINGS . get ( headerName ) ; } else { fullName = headerName ; } if ( logger . isDebugEnabled ( ) ) logger . debug ( "Fetching full header name for [" + headerName + "] returning [" + fullName + "]" ) ; return fullName ;
public class ClientFactory { /** * Uses the specified Shiro { @ link CacheManager } instance as the Stormpath SDK Client ' s CacheManager , allowing both * Shiro and Stormpath SDK to share the same cache mechanism . * If for some reason you don ' t want to share the same cache mechanism , you can explicitly set a Stormpath SDK - only * { @ link com . stormpath . sdk . cache . CacheManager CacheManager } instance via the * { @ link # setStormpathCacheManager ( com . stormpath . sdk . cache . CacheManager ) setStormpathCacheManager } method . * @ param cacheManager the Shiro CacheManager to use for the Stormpath SDK Client ' s caching needs . * @ since 0.4.0 * @ see # setStormpathCacheManager ( com . stormpath . sdk . cache . CacheManager ) */ public void setCacheManager ( CacheManager cacheManager ) { } }
com . stormpath . sdk . cache . CacheManager stormpathCacheManager = new ShiroCacheManager ( cacheManager ) ; this . clientBuilder . setCacheManager ( stormpathCacheManager ) ;
public class CommerceShipmentItemModelImpl { /** * Converts the soap model instances into normal model instances . * @ param soapModels the soap model instances to convert * @ return the normal model instances */ public static List < CommerceShipmentItem > toModels ( CommerceShipmentItemSoap [ ] soapModels ) { } }
if ( soapModels == null ) { return null ; } List < CommerceShipmentItem > models = new ArrayList < CommerceShipmentItem > ( soapModels . length ) ; for ( CommerceShipmentItemSoap soapModel : soapModels ) { models . add ( toModel ( soapModel ) ) ; } return models ;
public class IORDumpUtil { public static String getIor ( String filename ) throws FileNotFoundException , SecurityException , IOException { } }
FileInputStream fid = new FileInputStream ( filename ) ; int nb = fid . available ( ) ; byte [ ] inStr = new byte [ nb ] ; int nbread = fid . read ( inStr ) ; fid . close ( ) ; String str = "" ; if ( nbread > 0 ) str = new String ( inStr ) ; System . out . println ( str ) ; return str . trim ( ) ;
public class CmsResultItemWidget { /** * Returns the scale parameter for big thumbnail images . < p > * @ return the scale parameter */ private String getBigImageScaleParam ( ) { } }
return IMAGE_SCALE_PARAM + ",w:" + I_CmsLayoutBundle . INSTANCE . galleryResultItemCss ( ) . bigImageWidth ( ) + ",h:" + I_CmsLayoutBundle . INSTANCE . galleryResultItemCss ( ) . bigImageHeight ( ) ;
public class GerritQueryHandler { /** * Runs the query and returns the result as a list of Java JSONObjects . * @ param queryString the query . * @ param getPatchSets getPatchSets if all patch - sets of the projects found should be included in the result . * Meaning if - - patch - sets should be appended to the command call . * @ param getCurrentPatchSet if the current patch - set for the projects found should be included in the result . * Meaning if - - current - patch - set should be appended to the command call . * @ param getFiles if the files of the patch sets should be included in the result . * Meaning if - - files should be appended to the command call . * @ param getCommitMessage if full commit message should be included in the result . * Meaning if - - commit - message should be appended to the command call . * @ param getComments if patchset comments should be included in the results . * Meaning if - - comments should be appended to the command call . * @ return the query result as a List of JSONObjects . * @ throws GerritQueryException if Gerrit reports an error with the query . * @ throws SshException if there is an error in the SSH Connection . * @ throws IOException for some other IO problem . */ public List < JSONObject > queryJava ( String queryString , boolean getPatchSets , boolean getCurrentPatchSet , boolean getFiles , boolean getCommitMessage , boolean getComments ) throws SshException , IOException , GerritQueryException { } }
final List < JSONObject > list = new LinkedList < JSONObject > ( ) ; runQuery ( queryString , getPatchSets , getCurrentPatchSet , getFiles , getCommitMessage , getComments , new LineVisitor ( ) { @ Override public void visit ( String line ) throws GerritQueryException { JSONObject json = ( JSONObject ) JSONSerializer . toJSON ( line . trim ( ) ) ; if ( json . has ( "type" ) && "error" . equalsIgnoreCase ( json . getString ( "type" ) ) ) { throw new GerritQueryException ( json . getString ( "message" ) ) ; } list . add ( json ) ; } } ) ; return list ;
public class SnapshotTaskClientImpl { /** * { @ inheritDoc } */ @ Override public GetRestoreTaskResult getRestore ( String restoreId ) throws ContentStoreException { } }
GetRestoreTaskParameters taskParams = new GetRestoreTaskParameters ( ) ; taskParams . setRestoreId ( restoreId ) ; String taskResult = contentStore . performTask ( SnapshotConstants . GET_RESTORE_TASK_NAME , taskParams . serialize ( ) ) ; return GetRestoreTaskResult . deserialize ( taskResult ) ;
public class ListUserProfilesRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListUserProfilesRequest listUserProfilesRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listUserProfilesRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listUserProfilesRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listUserProfilesRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class UtilDateTime { /** * Makes a java . sql . Date from separate Strings for month , day , year * @ param monthStr * The month String * @ param dayStr * The day String * @ param yearStr * The year String * @ return A java . sql . Date made from separate Strings for month , day , year */ public static java . sql . Date toSqlDate ( String monthStr , String dayStr , String yearStr ) { } }
java . util . Date newDate = toDate ( monthStr , dayStr , yearStr , "0" , "0" , "0" ) ; if ( newDate != null ) return new java . sql . Date ( newDate . getTime ( ) ) ; else return null ;
public class Client { /** * Stop all threads related to this client . No further calls may be made * using this client . */ public void stop ( ) { } }
if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "Stopping client" ) ; } if ( ! running . compareAndSet ( true , false ) ) { return ; } synchronized ( connections ) { // wake up all connections for ( Connection conn : connections . values ( ) ) { conn . interrupt ( ) ; } // wait until all connections are closed while ( ! connections . isEmpty ( ) ) { try { connections . wait ( ) ; } catch ( InterruptedException e ) { // pass } } }
public class EventsImpl { /** * Execute OData query . * Executes an OData query for events . * @ param appId ID of the application . This is Application ID from the API Access settings blade in the Azure portal . * @ param eventType The type of events to query ; either a standard event type ( ` traces ` , ` customEvents ` , ` pageViews ` , ` requests ` , ` dependencies ` , ` exceptions ` , ` availabilityResults ` ) or ` $ all ` to query across all event types . Possible values include : ' $ all ' , ' traces ' , ' customEvents ' , ' pageViews ' , ' browserTimings ' , ' requests ' , ' dependencies ' , ' exceptions ' , ' availabilityResults ' , ' performanceCounters ' , ' customMetrics ' * @ param timespan Optional . The timespan over which to retrieve events . This is an ISO8601 time period value . This timespan is applied in addition to any that are specified in the Odata expression . * @ param filter An expression used to filter the returned events * @ param search A free - text search expression to match for whether a particular event should be returned * @ param orderby A comma - separated list of properties with \ " asc \ " ( the default ) or \ " desc \ " to control the order of returned events * @ param select Limits the properties to just those requested on each returned event * @ param skip The number of items to skip over before returning events * @ param top The number of events to return * @ param format Format for the returned events * @ param count Request a count of matching items included with the returned events * @ param apply An expression used for aggregation over returned events * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the EventsResults object if successful . */ public EventsResults getByType ( String appId , EventType eventType , String timespan , String filter , String search , String orderby , String select , Integer skip , Integer top , String format , Boolean count , String apply ) { } }
return getByTypeWithServiceResponseAsync ( appId , eventType , timespan , filter , search , orderby , select , skip , top , format , count , apply ) . toBlocking ( ) . single ( ) . body ( ) ;
public class TypeReflector { /** * Checks if value has primitive type . * Primitive types are : numbers , strings , booleans , date and time . Complex * ( non - primitive types are ) : objects , maps and arrays * @ param value a value to check * @ return true if the value has primitive type and false if value type is * complex . * @ see TypeConverter # toTypeCode ( Object ) * @ see TypeCode */ public static boolean isPrimitive ( Object value ) { } }
TypeCode typeCode = TypeConverter . toTypeCode ( value ) ; return typeCode == TypeCode . String || typeCode == TypeCode . Enum || typeCode == TypeCode . Boolean || typeCode == TypeCode . Integer || typeCode == TypeCode . Long || typeCode == TypeCode . Float || typeCode == TypeCode . Double || typeCode == TypeCode . DateTime || typeCode == TypeCode . Duration ;
public class DefaultBeanContext { /** * Fall back method to attempt to find a candidate for the given definitions . * @ param beanType The bean type * @ param qualifier The qualifier * @ param candidates The candidates * @ param < T > The generic time * @ return The concrete bean definition */ protected @ Nonnull < T > BeanDefinition < T > findConcreteCandidate ( @ Nonnull Class < T > beanType , @ Nullable Qualifier < T > qualifier , @ Nonnull Collection < BeanDefinition < T > > candidates ) { } }
throw new NonUniqueBeanException ( beanType , candidates . iterator ( ) ) ;
public class JSONUtils { /** * Gets a Map of attributes from a json object given a path to traverse . * @ param record a JSONObject to traverse . * @ param path the json path to follow . * @ return the attributes as a { @ link HashMap } , or null if it was not found . */ public static HashMap < String , String > getMapFromJSONPath ( JSONObject record , String path ) { } }
return getObjectFromJSONPath ( record , path ) ;
public class FunctionCallback { /** * Takes the payload from the response and checks if it was successful . Calls { @ link # onSuccess ( ModelNode ) } and * { @ link # proceed ( ) } if no error occurred , { @ link # onFailedOutcome ( ModelNode ) } and { @ link # abort ( ) } otherwise . */ @ Override public void onSuccess ( final DMRResponse response ) { } }
ModelNode result = response . get ( ) ; if ( ! result . hasDefined ( OUTCOME ) || result . isFailure ( ) ) { onFailedOutcome ( result ) ; abort ( ) ; } else { onSuccess ( result ) ; proceed ( ) ; }
public class HtmlValidationResponseFilter { /** * Called when the validated markup does not contain any errors . * @ param responseBuffer * the validated response markup * @ param report * the validation report */ protected void onValidMarkup ( AppendingStringBuffer responseBuffer , ValidationReport report ) { } }
IRequestablePage responsePage = getResponsePage ( ) ; DocType doctype = getDocType ( responseBuffer ) ; log . info ( "Markup for {} is valid {}" , responsePage != null ? responsePage . getClass ( ) . getName ( ) : "<unable to determine page class>" , doctype . name ( ) ) ; String head = report . getHeadMarkup ( ) ; String body = report . getBodyMarkup ( ) ; int indexOfHeadClose = responseBuffer . lastIndexOf ( "</head>" ) ; responseBuffer . insert ( indexOfHeadClose , head ) ; int indexOfBodyClose = responseBuffer . lastIndexOf ( "</body>" ) ; responseBuffer . insert ( indexOfBodyClose , body ) ;
public class FragmentBuilder { /** * This method pops a node of the defined class and optional uri from the stack . * If the uri is not defined , then the latest node of the approach class will * be chosen . * @ param stack The stack * @ param cls The node type * @ param uri The optional uri to match * @ return The node , or null if no suitable candidate is found */ protected Node popNode ( Stack < Node > stack , Class < ? extends Node > cls , String uri ) { } }
Node top = stack . isEmpty ( ) ? null : stack . peek ( ) ; if ( top != null ) { if ( nodeMatches ( top , cls , uri ) ) { Node node = stack . pop ( ) ; poppedNodes . push ( node ) ; return node ; } else { // Scan for potential match , from - 2 so don ' t repeat // check of top node for ( int i = stack . size ( ) - 2 ; i >= 0 ; i -- ) { if ( nodeMatches ( stack . get ( i ) , cls , uri ) ) { Node node = stack . remove ( i ) ; poppedNodes . push ( node ) ; return node ; } } } } return null ;
public class CmsEditor { /** * Returns the edit state for the given resource structure id . < p > * @ param cms the cms context * @ param resourceType the resource type to create * @ param contextPath the context path * @ param modelFilePath the model file path * @ param plainText if plain text / source editing is required * @ param backLink the back link location * @ return the state */ public static String getEditStateForNew ( CmsObject cms , I_CmsResourceType resourceType , String contextPath , String modelFilePath , boolean plainText , String backLink ) { } }
String state = "" ; state = A_CmsWorkplaceApp . addParamToState ( state , CmsXmlContentEditor . PARAM_NEWLINK , CmsJspTagEdit . getNewLink ( cms , resourceType , contextPath ) ) ; if ( CmsStringUtil . isNotEmptyOrWhitespaceOnly ( modelFilePath ) ) { state = A_CmsWorkplaceApp . addParamToState ( state , CmsWorkplace . PARAM_MODELFILE , modelFilePath ) ; state = A_CmsWorkplaceApp . addParamToState ( state , CmsEditorConstants . PARAM_MODE , CmsEditorConstants . MODE_COPY ) ; } state = A_CmsWorkplaceApp . addParamToState ( state , CmsEditor . RESOURCE_PATH_PREFIX , contextPath ) ; state = A_CmsWorkplaceApp . addParamToState ( state , CmsEditor . PLAIN_TEXT_PREFIX , String . valueOf ( plainText ) ) ; try { backLink = URLEncoder . encode ( backLink , CmsEncoder . ENCODING_UTF_8 ) ; } catch ( UnsupportedEncodingException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } state = A_CmsWorkplaceApp . addParamToState ( state , CmsEditor . BACK_LINK_PREFIX , backLink ) ; return state ;
public class Main { /** * A Java method that returns the count of vowels present in the given string . * Examples : * countVowels ( " corner " , " AaEeIiOoUu " ) * countVowels ( " valid " , " AaEeIiOoUu " ) * countVowels ( " true " , " AaEeIiOoUu " ) * @ param inputStr A string in which vowels count need to find . * @ param vowels String contains characters considered as vowels . * @ return Returns the count of vowels present in the inputStr . */ public static int countVowels ( String inputStr , String vowels ) { } public static void main ( String [ ] args ) { System . out . println ( countVowels ( "corner" , "AaEeIiOoUu" ) ) ; // Output : 2 System . out . println ( countVowels ( "valid" , "AaEeIiOoUu" ) ) ; // Output : 2 System . out . println ( countVowels ( "true" , "AaEeIiOoUu" ) ) ; // Output : 2 } }
int countVowels = 0 ; for ( int i = 0 ; i < inputStr . length ( ) ; i ++ ) { if ( vowels . indexOf ( inputStr . charAt ( i ) ) != - 1 ) { countVowels ++ ; } } return countVowels ;
public class CommerceCurrencyPersistenceImpl { /** * Returns the commerce currency where uuid = & # 63 ; and groupId = & # 63 ; or throws a { @ link NoSuchCurrencyException } if it could not be found . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching commerce currency * @ throws NoSuchCurrencyException if a matching commerce currency could not be found */ @ Override public CommerceCurrency findByUUID_G ( String uuid , long groupId ) throws NoSuchCurrencyException { } }
CommerceCurrency commerceCurrency = fetchByUUID_G ( uuid , groupId ) ; if ( commerceCurrency == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", groupId=" ) ; msg . append ( groupId ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCurrencyException ( msg . toString ( ) ) ; } return commerceCurrency ;
public class TangoUtil { /** * Get the full device name * @ param deviceName * @ return * @ throws DevFailed */ public static String getfullNameForDevice ( final String deviceName ) throws DevFailed { } }
checkNullOrEmptyString ( deviceName ) ; final String [ ] fields = deviceName . split ( DEVICE_SEPARATOR ) ; if ( deviceName . contains ( DBASE_NO ) || fields . length != 1 ) { return deviceName ; } else { final Database db = ApiUtil . get_db_obj ( ) ; return db . get_device_from_alias ( fields [ 0 ] ) ; }
public class ConfigCustomResolverExample { /** * tag : : config - custom - resolver [ ] */ private Driver createDriver ( String virtualUri , String user , String password , ServerAddress ... addresses ) { } }
Config config = Config . builder ( ) . withResolver ( address -> new HashSet < > ( Arrays . asList ( addresses ) ) ) . build ( ) ; return GraphDatabase . driver ( virtualUri , AuthTokens . basic ( user , password ) , config ) ;
public class VisualizerParameterizer { /** * Shorten the class name . * @ param nam Class name * @ param c Splitting character * @ return Shortened name */ protected static String shortenClassname ( String nam , char c ) { } }
final int lastdot = nam . lastIndexOf ( c ) ; if ( lastdot >= 0 ) { nam = nam . substring ( lastdot + 1 ) ; } return nam ;
public class Session { /** * Returns the FORM parameters for the given URL based on the parser associated with the * first context found that includes the URL , or the default parser if it is not * in a context * @ param uri * @ param formData * @ return * @ throws URIException */ public Map < String , String > getFormParams ( URI uri , String formData ) throws URIException { } }
return this . getFormParamParser ( uri . toString ( ) ) . parse ( formData ) ;
public class FeatureTileTableCoreLinker { /** * Query for feature tile links by tile table * @ param tileTable * tile table * @ return links */ public List < FeatureTileLink > queryForTileTable ( String tileTable ) { } }
List < FeatureTileLink > links = null ; if ( featureTileLinksActive ( ) ) { links = featureTileLinkDao . queryForTileTableName ( tileTable ) ; } else { links = new ArrayList < FeatureTileLink > ( ) ; } return links ;
public class StitchAppRequestClientImpl { /** * Performs a request against a Stitch app server determined by the deployment model * of the underlying app . Throws a Stitch specific exception if the request fails . * @ param stitchReq the request to perform . * @ return a { @ link Response } to the request . */ @ Override public Response doRequest ( final StitchRequest stitchReq ) { } }
initAppMetadata ( clientAppId ) ; return super . doRequestUrl ( stitchReq , getHostname ( ) ) ;
public class BinaryJedis { /** * Synchronously save the DB on disk , then shutdown the server . * Stop all the clients , save the DB , then quit the server . This commands makes sure that the DB * is switched off without the lost of any data . This is not guaranteed if the client uses simply * { @ link # save ( ) SAVE } and then { @ link # quit ( ) QUIT } because other clients may alter the DB data * between the two commands . * @ return Status code reply on error . On success nothing is returned since the server quits and * the connection is closed . */ @ Override public String shutdown ( ) { } }
client . shutdown ( ) ; String status ; try { status = client . getStatusCodeReply ( ) ; } catch ( JedisException ex ) { status = null ; } return status ;
public class BinaryJedis { /** * Test for existence of a specified field in a hash . < b > Time complexity : < / b > O ( 1) * @ param key * @ param field * @ return Return true if the hash stored at key contains the specified field . Return false if the key is * not found or the field is not present . */ @ Override public Boolean hexists ( final byte [ ] key , final byte [ ] field ) { } }
checkIsInMultiOrPipeline ( ) ; client . hexists ( key , field ) ; return client . getIntegerReply ( ) == 1 ;
public class Logger { /** * < p > info < / p > * @ see Log # info * @ param message Format string . * @ param args Arguments for format string . */ public void info ( String message , Object ... args ) { } }
if ( log . isInfoEnabled ( ) ) { log . info ( String . format ( message , args ) ) ; }
public class MessageStatusHolder { /** * Count of unread messages . * @ param _ userId user id the count is wanted for * @ return count of unread messages */ public static int getUnReadCount ( final Long _userId ) { } }
int ret = 0 ; if ( MessageStatusHolder . CACHE . userID2UnRead . containsKey ( _userId ) ) { ret = MessageStatusHolder . CACHE . userID2UnRead . get ( _userId ) ; } return ret ;
public class ReflectionUtil { /** * Returns { @ link Annotation } s of the given type defined * on the given { @ link Method } . * @ param < T > the { @ link Annotation } type * @ param method the { @ link Method } * @ param type the type * @ return the { @ link Annotation } s */ public static < T extends Annotation > List < T > getAnnotations ( Method method , Class < T > type ) { } }
return filterAnnotations ( getAnnotations ( method ) , type ) ;
public class BTreePage { /** * Updates ( in - place ) the contents of this BTreePage with the given entries for those Keys that already exist . For * all the new or deleted Keys , collects them into a List and calculates the offset where they would have to be * inserted at or removed from . * @ param entries A List of PageEntries to update , in sorted order by { @ link PageEntry # getKey ( ) } . * @ return A { @ link ChangeInfo } object . * @ throws IllegalDataFormatException If any of the entries do not conform to the Key / Value size constraints . * @ throws IllegalArgumentException If the entries are not sorted by { @ link PageEntry # getKey ( ) } . */ private ChangeInfo applyUpdates ( List < PageEntry > entries ) { } }
// Keep track of new keys to be added along with the offset ( in the original page ) where they would have belonged . val changes = new ArrayList < Map . Entry < Integer , PageEntry > > ( ) ; int removeCount = 0 ; // Process all the Entries , in order ( by Key ) . int lastPos = 0 ; ByteArraySegment lastKey = null ; for ( val e : entries ) { if ( e . getKey ( ) . getLength ( ) != this . config . keyLength || ( e . hasValue ( ) && e . getValue ( ) . getLength ( ) != this . config . valueLength ) ) { throw new IllegalDataFormatException ( "Found an entry with unexpected Key or Value length." ) ; } if ( lastKey != null ) { Preconditions . checkArgument ( KEY_COMPARATOR . compare ( lastKey , e . getKey ( ) ) < 0 , "Entries must be sorted by key and no duplicates are allowed." ) ; } // Figure out if this entry exists already . val searchResult = search ( e . getKey ( ) , lastPos ) ; if ( searchResult . isExactMatch ( ) ) { if ( e . hasValue ( ) ) { // Key already exists : update in - place . setValueAtPosition ( searchResult . getPosition ( ) , e . getValue ( ) ) ; } else { // Key exists but this is a removal . Record it for later . changes . add ( new AbstractMap . SimpleImmutableEntry < > ( searchResult . getPosition ( ) , null ) ) ; removeCount ++ ; } } else if ( e . hasValue ( ) ) { // This entry ' s key does not exist and we want to insert it ( we don ' t care if we want to delete an inexistent // key ) . We need to remember it for later . Since this was not an exact match , binary search returned the // position where it should have been . changes . add ( new AbstractMap . SimpleImmutableEntry < > ( searchResult . getPosition ( ) , e ) ) ; } // Remember the last position so we may resume the next search from there . lastPos = searchResult . getPosition ( ) ; lastKey = e . getKey ( ) ; } return new ChangeInfo ( changes , changes . size ( ) - removeCount , removeCount ) ;
public class CmsFileExplorer { /** * Clears the given tree level . < p > * @ param parentId the parent id */ protected void clearTreeLevel ( CmsUUID parentId ) { } }
// create a new list to avoid concurrent modifications Collection < ? > children = m_treeContainer . getChildren ( parentId ) ; // may be null when monkey clicking if ( children != null ) { List < Object > childIds = new ArrayList < Object > ( m_treeContainer . getChildren ( parentId ) ) ; for ( Object childId : childIds ) { m_treeContainer . removeItemRecursively ( childId ) ; } }
public class AccessPoint { /** * if capture { @ code closest } is of timestamp different from the one requested , * redirect to exact Archival - URL for { @ code closest } . * Memento Timegate request is always redirected regardless of timestamp . * Needs better method name . * @ param wbRequest * @ param httpResponse * @ param captureResults * @ param closest * @ throws BetterRequestException */ protected void handleReplayRedirect ( WaybackRequest wbRequest , HttpServletResponse httpResponse , CaptureSearchResults captureResults , CaptureSearchResult closest ) throws BetterRequestException { } }
if ( wbRequest . getReplayTimestamp ( ) . startsWith ( closest . getCaptureTimestamp ( ) ) && ! wbRequest . isMementoTimegate ( ) ) { // Matching return ; } captureResults . setClosest ( closest ) ; // TODO : better detection of non - redirect proxy mode ? // For now , checking if the betterURI does not contain the timestamp , then we ' re not doing a redirect String datespec = ArchivalUrl . getDateSpec ( wbRequest , closest . getCaptureTimestamp ( ) ) ; String betterURI = getUriConverter ( ) . makeReplayURI ( datespec , closest . getOriginalUrl ( ) ) ; // if spare - redirect - for - embeds is on , render embedded resource in - place with Content - Location header pointing // exact replay URL ( it is disabled for timegate requests ) // XXX set Content - Location header somewhere else . if ( fixedEmbeds && ! wbRequest . isMementoTimegate ( ) && isWaybackReferer ( wbRequest , this . getReplayPrefix ( ) ) ) { httpResponse . setHeader ( "Content-Location" , betterURI ) ; return ; } boolean isNonRedirectProxy = ! betterURI . contains ( closest . getCaptureTimestamp ( ) ) ; if ( ! isNonRedirectProxy ) { throw new BetterReplayRequestException ( closest , captureResults ) ; }
public class BaseOptimizationRunner { /** * Process returned task ( either completed or failed */ private void processReturnedTask ( Future < OptimizationResult > future ) { } }
long currentTime = System . currentTimeMillis ( ) ; OptimizationResult result ; try { result = future . get ( 100 , TimeUnit . MILLISECONDS ) ; } catch ( InterruptedException e ) { throw new RuntimeException ( "Unexpected InterruptedException thrown for task" , e ) ; } catch ( ExecutionException e ) { // Note that most of the time , an OptimizationResult is returned even for an exception // This is just to handle any that are missed there ( or , by implementations that don ' t properly do this ) log . warn ( "Task failed" , e ) ; numCandidatesFailed . getAndIncrement ( ) ; return ; } catch ( TimeoutException e ) { throw new RuntimeException ( e ) ; // TODO } // Update internal status : CandidateInfo status = currentStatus . get ( result . getIndex ( ) ) ; CandidateInfo newStatus = new CandidateInfo ( result . getIndex ( ) , result . getCandidateInfo ( ) . getCandidateStatus ( ) , result . getScore ( ) , status . getCreatedTime ( ) , result . getCandidateInfo ( ) . getStartTime ( ) , currentTime , status . getFlatParams ( ) , result . getCandidateInfo ( ) . getExceptionStackTrace ( ) ) ; currentStatus . put ( result . getIndex ( ) , newStatus ) ; // Listeners ( on complete , etc ) should be executed in underlying task if ( result . getCandidateInfo ( ) . getCandidateStatus ( ) == CandidateStatus . Failed ) { log . info ( "Task {} failed during execution: {}" , result . getIndex ( ) , result . getCandidateInfo ( ) . getExceptionStackTrace ( ) ) ; numCandidatesFailed . getAndIncrement ( ) ; } else { // Report completion to candidate generator config . getCandidateGenerator ( ) . reportResults ( result ) ; Double score = result . getScore ( ) ; log . info ( "Completed task {}, score = {}" , result . getIndex ( ) , result . getScore ( ) ) ; boolean minimize = config . getScoreFunction ( ) . minimize ( ) ; if ( score != null && ( bestScore == null || ( ( minimize && score < bestScore ) || ( ! minimize && score > bestScore ) ) ) ) { if ( bestScore == null ) { log . info ( "New best score: {} (first completed model)" , score ) ; } else { int idx = result . getIndex ( ) ; int lastBestIdx = bestScoreCandidateIndex . get ( ) ; log . info ( "New best score: {}, model {} (prev={}, model {})" , score , idx , bestScore , lastBestIdx ) ; } bestScore = score ; bestScoreTime = System . currentTimeMillis ( ) ; bestScoreCandidateIndex . set ( result . getIndex ( ) ) ; } numCandidatesCompleted . getAndIncrement ( ) ; // Model saving is done in the optimization tasks , to avoid CUDA threading issues ResultReference resultReference = result . getResultReference ( ) ; if ( resultReference != null ) allResults . add ( resultReference ) ; }
public class AuthorizationUtil { /** * Create a singleton context attribute set * @ param key context key * @ param value context value * @ return attribute set */ public static Set < Attribute > context ( String key , String value ) { } }
if ( null == key ) { throw new IllegalArgumentException ( "key cannot be null" ) ; } if ( null == value ) { throw new IllegalArgumentException ( "value cannot be null" ) ; } return Collections . singleton ( new Attribute ( URI . create ( EnvironmentalContext . URI_BASE + key ) , value ) ) ;
public class DisconfWebPathMgr { /** * @ return String * @ Description : 获取基本配置路径 的MAP * @ author liaoqiqi * @ date 2013-6-16 */ private static Map < String , String > getConfServerBasePathMap ( String app , String version , String env , String key ) { } }
Map < String , String > parameterMap = new LinkedHashMap < String , String > ( ) ; parameterMap . put ( Constants . VERSION , version ) ; parameterMap . put ( Constants . APP , app ) ; parameterMap . put ( Constants . ENV , env ) ; parameterMap . put ( Constants . KEY , key ) ; return parameterMap ;
public class WebUtils { /** * Check the attribute names * @ param attributeNames Atttribute names to check * @ return Null if all attribute names are invalid , non - empty if the check * failed , with an appropriate error message */ public static String validateAttributeNames ( Enumeration < String > attributeNames ) { } }
while ( attributeNames . hasMoreElements ( ) ) { String attribute = attributeNames . nextElement ( ) ; if ( ! attribute . equals ( "users" ) && ! attribute . equals ( "poolGroups" ) && ! attribute . equals ( "poolInfos" ) && ! attribute . equals ( "toKillSessionId" ) && ! attribute . equals ( "killSessionsToken" ) ) { return "Illegal parameter " + attribute + ", only 'users, " + "poolGroups, 'poolInfos', 'toKillSessionId' and 'killSessionsToken'" + "parameters allowed." ; } } return null ;
public class ServerVariableValidator { /** * { @ inheritDoc } */ @ Override public void validate ( ValidationHelper helper , Context context , String key , ServerVariable t ) { } }
ValidatorUtils . validateRequiredField ( t . getDefaultValue ( ) , context , "default" ) . ifPresent ( helper :: addValidationEvent ) ;
public class ARCWriter { /** * Write a record with the given metadata / content . * @ param uri * URI for metadata - line * @ param contentType * MIME content - type for metadata - line * @ param hostIP * IP for metadata - line * @ param fetchBeginTimeStamp * timestamp for metadata - line * @ param recordLength * length for metadata - line ; also may be enforced * @ param in * source InputStream for record content * @ param enforceLength * whether to enforce the declared length ; should be true * unless intentionally writing bad records for testing * @ throws IOException */ public void write ( String uri , String contentType , String hostIP , long fetchBeginTimeStamp , long recordLength , InputStream in , boolean enforceLength ) throws IOException { } }
preWriteRecordTasks ( ) ; try { write ( getMetaLine ( uri , contentType , hostIP , fetchBeginTimeStamp , recordLength ) . getBytes ( UTF8 ) ) ; copyFrom ( in , recordLength , enforceLength ) ; if ( in instanceof ReplayInputStream ) { // check for consumption of entire recorded material long remaining = ( ( ReplayInputStream ) in ) . remaining ( ) ; // Should be zero at this stage . If not , something is // wrong . if ( remaining != 0 ) { String message = "Gap between expected and actual: " + remaining + LINE_SEPARATOR + DevUtils . extraInfo ( ) + " writing arc " + this . getFile ( ) . getAbsolutePath ( ) ; DevUtils . warnHandle ( new Throwable ( message ) , message ) ; throw new IOException ( message ) ; } } write ( LINE_SEPARATOR ) ; } finally { postWriteRecordTasks ( ) ; }
public class DefaultWaitInteractable { /** * / * ( non - Javadoc ) * @ see minium . actions . InteractionPerformer # waitTime ( long , java . util . concurrent . TimeUnit ) */ @ Override public T waitTime ( long time , TimeUnit unit ) { } }
return perform ( new WaitTimeInteraction ( new Duration ( time , unit ) ) ) ;