signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class WorkerState { /** * Sets the maximum number of concurrent workers . * @ param numberOfCpus The number of CPUs to use ( zero to use all available * CPUs on the machine ) . */ @ CommandArgument public void setcpus ( int numberOfCpus ) { } }
if ( worker == null ) { System . err . println ( "Worker not running." ) ; return ; } int availableCpus = Runtime . getRuntime ( ) . availableProcessors ( ) ; if ( numberOfCpus <= 0 || numberOfCpus > availableCpus ) { numberOfCpus = availableCpus ; } System . out . printf ( "Setting number of CPUs to %d\n" , numberOfCpus ) ; worker . setMaxWorkers ( numberOfCpus ) ;
public class Operation { /** * See if a simple sequence can be used to extract the array . A simple extent is a continuous block from * a min to max index * @ return true if it is a simple range or false if not */ private static boolean extractSimpleExtents ( Variable var , Extents e , boolean row , int length ) { } }
int lower ; int upper ; if ( var . getType ( ) == VariableType . INTEGER_SEQUENCE ) { IntegerSequence sequence = ( ( VariableIntegerSequence ) var ) . sequence ; if ( sequence . getType ( ) == IntegerSequence . Type . FOR ) { IntegerSequence . For seqFor = ( IntegerSequence . For ) sequence ; seqFor . initialize ( length ) ; if ( seqFor . getStep ( ) == 1 ) { lower = seqFor . getStart ( ) ; upper = seqFor . getEnd ( ) ; } else { return false ; } } else { return false ; } } else if ( var . getType ( ) == VariableType . SCALAR ) { lower = upper = ( ( VariableInteger ) var ) . value ; } else { throw new RuntimeException ( "How did a bad variable get put here?!?!" ) ; } if ( row ) { e . row0 = lower ; e . row1 = upper ; } else { e . col0 = lower ; e . col1 = upper ; } return true ;
public class FilterReplaceDialog { /** * This method initializes btnOK * @ return javax . swing . JButton */ private JButton getBtnOK ( ) { } }
if ( btnOK == null ) { btnOK = new JButton ( ) ; btnOK . setText ( Constant . messages . getString ( "all.button.ok" ) ) ; btnOK . addActionListener ( new java . awt . event . ActionListener ( ) { @ Override public void actionPerformed ( java . awt . event . ActionEvent e ) { try { Pattern . compile ( getTxtPattern ( ) . getText ( ) ) ; } catch ( Exception e1 ) { // ZAP : i18n view . showWarningDialog ( Constant . messages . getString ( "filter.replacedialog.invalidpattern" ) ) ; getTxtPattern ( ) . grabFocus ( ) ; return ; } getTxtPattern ( ) . discardAllEdits ( ) ; FilterReplaceDialog . this . dispose ( ) ; exitCode = JOptionPane . OK_OPTION ; } } ) ; } return btnOK ;
public class Base64 { /** * Encodes a byte array into Base64 notation . * Example options : * < pre > * GZIP : gzip - compresses object before encoding it . * DO _ BREAK _ LINES : break lines at 76 characters * Note : Technically , this makes your encoding non - compliant . * < / pre > * Example : < code > encodeBytes ( myData , Base64 . GZIP ) < / code > or * Example : * < code > encodeBytes ( myData , Base64 . GZIP | Base64 . DO _ BREAK _ LINES ) < / code > * As of v 2.3 , if there is an error with the GZIP stream , the method will * throw an IOException . < b > This is new to v2.3 ! < / b > In earlier versions , it * just returned a null value , but in retrospect that ' s a pretty poor way to * handle it . * @ param source * The data to convert * @ param options * Specified options * @ return The Base64 - encoded data as a String * @ see Base64 # GZIP * @ see Base64 # DO _ BREAK _ LINES * @ throws IOException * if there is an error * @ throws NullPointerException * if source array is null * @ since 2.0 */ @ Nonnull public static String encodeBytes ( @ Nonnull final byte [ ] source , final int options ) throws IOException { } }
return encodeBytes ( source , 0 , source . length , options ) ;
public class AbstractConversionTable { /** * Restores the column widths from dialog settings . * @ param settings the settings to read . */ private void restoreColumnWidths ( IDialogSettings settings ) { } }
final int columnCount = this . table . getColumnCount ( ) ; for ( int i = 0 ; i < columnCount ; i ++ ) { int width = - 1 ; try { width = settings . getInt ( getPreferenceContainerID ( ) + getColumnWidthDialogSettingsKey ( ) + i ) ; } catch ( NumberFormatException exception ) { } if ( ( width <= 0 ) || ( i == this . table . getColumnCount ( ) - 1 ) ) { this . table . getColumn ( i ) . pack ( ) ; } else { this . table . getColumn ( i ) . setWidth ( width ) ; } }
public class CodeBuilder { /** * creation style instructions */ public void newObject ( TypeDesc type ) { } }
if ( type . isArray ( ) ) { newObject ( type , 1 ) ; } else { mInstructions . new NewObjectInstruction ( mCp . addConstantClass ( type ) ) ; }
public class AbstractCorsPolicyBuilder { /** * Specifies HTTP response headers that should be added to a CORS preflight response . * < p > An intermediary like a load balancer might require that a CORS preflight request * have certain headers set . This enables such headers to be added . * < p > Some values must be dynamically created when the HTTP response is created , for * example the { @ code " Date " } response header . This can be accomplished by using a { @ link Supplier } * which will have its { @ link Supplier # get ( ) } method invoked when the HTTP response is created . * @ param name the name of the HTTP header . * @ param valueSupplier a { @ link Supplier } which will be invoked at HTTP response creation . * @ return { @ code this } to support method chaining . */ public B preflightResponseHeader ( CharSequence name , Supplier < ? > valueSupplier ) { } }
requireNonNull ( name , "name" ) ; requireNonNull ( valueSupplier , "valueSupplier" ) ; preflightResponseHeaders . put ( HttpHeaderNames . of ( name ) , valueSupplier ) ; return self ( ) ;
public class FileUtils { /** * This method is able to determine whether a file is GZipped and return a { @ link BufferedWriter } in any case . * @ param path to be write * @ return BufferedWriter object * @ throws java . io . IOException IOException */ public static BufferedWriter newBufferedWriter ( Path path ) throws IOException { } }
FileUtils . checkDirectory ( path . getParent ( ) ) ; BufferedWriter bufferedWriter ; if ( path . toFile ( ) . getName ( ) . endsWith ( ".gz" ) ) { bufferedWriter = new BufferedWriter ( new OutputStreamWriter ( new GZIPOutputStream ( new FileOutputStream ( path . toFile ( ) ) ) ) ) ; } else { bufferedWriter = Files . newBufferedWriter ( path , Charset . defaultCharset ( ) ) ; } return bufferedWriter ;
public class AbstractHttpQuery { /** * Method to call after writing the HTTP response to the wire . The default * is to simply log the request info . Can be overridden by subclasses . */ public void done ( ) { } }
final int processing_time = processingTimeMillis ( ) ; final String url = request . getUri ( ) ; final String msg = String . format ( "HTTP %s done in %d ms" , url , processing_time ) ; if ( url . startsWith ( "/api/put" ) && LOG . isDebugEnabled ( ) ) { // NOTE : Suppresses too many log lines from / api / put . LOG . debug ( msg ) ; } else { logInfo ( msg ) ; } logInfo ( "HTTP " + request . getUri ( ) + " done in " + processing_time + "ms" ) ;
public class TileCache { /** * Calculate the best tile level to use for a certain view - bounds . * @ return best tile level for view bounds */ private int calculateTileLevel ( ) { } }
double baseX = layerBounds . getWidth ( ) ; double baseY = layerBounds . getHeight ( ) ; // choose the tile level so the area is between minimumTileSize and the next level ( minimumTileSize * 4) double baseArea = baseX * baseY ; double scale = layer . getMapModel ( ) . getMapView ( ) . getCurrentScale ( ) ; double osmArea = preferredTileSize / ( scale * scale ) ; int tileLevel = ( int ) Math . round ( Math . log ( baseArea / osmArea ) / Math . log ( 4.0 ) ) ; if ( tileLevel < 0 ) { tileLevel = 0 ; } return tileLevel ;
public class JvmExecutableImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case TypesPackage . JVM_EXECUTABLE__TYPE_PARAMETERS : return typeParameters != null && ! typeParameters . isEmpty ( ) ; case TypesPackage . JVM_EXECUTABLE__PARAMETERS : return parameters != null && ! parameters . isEmpty ( ) ; case TypesPackage . JVM_EXECUTABLE__EXCEPTIONS : return exceptions != null && ! exceptions . isEmpty ( ) ; case TypesPackage . JVM_EXECUTABLE__VAR_ARGS : return varArgs != VAR_ARGS_EDEFAULT ; } return super . eIsSet ( featureID ) ;
public class Commands { /** * Returns a line , trimmed if the * { @ link BuiltInProperty # TRIM _ SCRIPTS options require trimming } . */ private String maybeTrim ( String line ) { } }
return sqlLine . getOpts ( ) . getTrimScripts ( ) ? line . trim ( ) : line ;
public class XMLParse { /** * 生成xml消息 * @ param encrypt 加密后的消息密文 * @ param signature 安全签名 * @ param timestamp 时间戳 * @ param nonce 随机字符串 * @ return 生成的xml字符串 */ public static String generate ( String encrypt , String signature , String timestamp , String nonce ) { } }
String format = "<xml>\n" + "<Encrypt><![CDATA[%1$s]]></Encrypt>\n" + "<MsgSignature><![CDATA[%2$s]]></MsgSignature>\n" + "<TimeStamp>%3$s</TimeStamp>\n" + "<Nonce><![CDATA[%4$s]]></Nonce>\n" + "</xml>" ; return String . format ( format , encrypt , signature , timestamp , nonce ) ;
public class JobAgentsInner { /** * Creates or updates a job agent . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param jobAgentName The name of the job agent to be created or updated . * @ param parameters The requested job agent resource state . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < JobAgentInner > createOrUpdateAsync ( String resourceGroupName , String serverName , String jobAgentName , JobAgentInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , jobAgentName , parameters ) . map ( new Func1 < ServiceResponse < JobAgentInner > , JobAgentInner > ( ) { @ Override public JobAgentInner call ( ServiceResponse < JobAgentInner > response ) { return response . body ( ) ; } } ) ;
public class EndpointActivationService { /** * Returns the JCA specification major and minor version with which the resource adapter declares support . * @ param bootstrapContext bootstrap context supplied to the resource adapter . * @ return the JCA specification version with which the resource adapter declares support . */ private static int [ ] getFullJCAVersion ( BootstrapContextImpl bootstrapContext ) { } }
int [ ] fullVersionIntArray = { 1 , 5 } ; // SIB and WMQ resource adapter " bundles " are compliant with JCA spec version 1.5 // TODO Set the correct JCA Version on the MessageEndpointFactory . Currently hardcoding to 1.5 ResourceAdapterMetaData raMetadata = bootstrapContext . getResourceAdapterMetaData ( ) ; if ( raMetadata != null ) { ConnectorModuleMetaData connectorMetadata = ( ConnectorModuleMetaData ) raMetadata . getModuleMetaData ( ) ; String fullVersionString = connectorMetadata . getSpecVersion ( ) ; String [ ] fullVersionStrArray = fullVersionString . split ( "\\." ) ; fullVersionIntArray [ 0 ] = Integer . valueOf ( fullVersionStrArray [ 0 ] ) ; fullVersionIntArray [ 1 ] = Integer . valueOf ( fullVersionStrArray [ 1 ] ) ; } return fullVersionIntArray ;
public class FailurePolicy { /** * Specifies that a failure has occurred if the { @ code resultPredicate } matches the execution result . * @ throws NullPointerException if { @ code resultPredicate } is null */ @ SuppressWarnings ( "unchecked" ) public S handleIf ( BiPredicate < R , ? extends Throwable > resultPredicate ) { } }
Assert . notNull ( resultPredicate , "resultPredicate" ) ; failuresChecked = true ; failureConditions . add ( ( BiPredicate < R , Throwable > ) resultPredicate ) ; return ( S ) this ;
public class route6 { /** * Use this API to clear route6. */ public static base_response clear ( nitro_service client , route6 resource ) throws Exception { } }
route6 clearresource = new route6 ( ) ; clearresource . routetype = resource . routetype ; return clearresource . perform_operation ( client , "clear" ) ;
public class ExecutionGraphUtil { /** * Takes in a collection of executions belonging to the same process instance . Orders the executions in a list , first elements are the leaf , last element is the root elements . */ public static List < ExecutionEntity > orderFromRootToLeaf ( Collection < ExecutionEntity > executions ) { } }
List < ExecutionEntity > orderedList = new ArrayList < ExecutionEntity > ( executions . size ( ) ) ; // Root elements HashSet < String > previousIds = new HashSet < String > ( ) ; for ( ExecutionEntity execution : executions ) { if ( execution . getParentId ( ) == null ) { orderedList . add ( execution ) ; previousIds . add ( execution . getId ( ) ) ; } } // Non - root elements while ( orderedList . size ( ) < executions . size ( ) ) { for ( ExecutionEntity execution : executions ) { if ( ! previousIds . contains ( execution . getId ( ) ) && previousIds . contains ( execution . getParentId ( ) ) ) { orderedList . add ( execution ) ; previousIds . add ( execution . getId ( ) ) ; } } } return orderedList ;
public class NGCommunicator { /** * Get nail command context from the header and start reading for stdin and heartbeats */ CommandContext readCommandContext ( ) throws IOException { } }
// client info - command line arguments and environment List < String > remoteArgs = new ArrayList ( ) ; Properties remoteEnv = new Properties ( ) ; String cwd = null ; // working directory String command = null ; // alias or class name // read everything from the client up to and including the command while ( command == null ) { int bytesToRead = in . readInt ( ) ; byte chunkType = in . readByte ( ) ; byte [ ] b = new byte [ bytesToRead ] ; in . readFully ( b ) ; String line = new String ( b , "UTF-8" ) ; switch ( chunkType ) { case NGConstants . CHUNKTYPE_ARGUMENT : // command line argument remoteArgs . add ( line ) ; break ; case NGConstants . CHUNKTYPE_ENVIRONMENT : // parse environment into property int equalsIndex = line . indexOf ( '=' ) ; if ( equalsIndex > 0 ) { remoteEnv . setProperty ( line . substring ( 0 , equalsIndex ) , line . substring ( equalsIndex + 1 ) ) ; } break ; case NGConstants . CHUNKTYPE_COMMAND : // command ( alias or classname ) command = line ; break ; case NGConstants . CHUNKTYPE_WORKINGDIRECTORY : // client working directory cwd = line ; break ; default : // freakout ? } } // Command and environment is read . Move other communication with client , which is heartbeats // and // stdin , to background thread startBackgroundReceive ( ) ; return new CommandContext ( command , cwd , remoteEnv , remoteArgs ) ;
public class TwitterImpl { /** * / * Search Resources */ @ Override public QueryResult search ( Query query ) throws TwitterException { } }
if ( query . nextPage ( ) != null ) { return factory . createQueryResult ( get ( conf . getRestBaseURL ( ) + "search/tweets.json" + query . nextPage ( ) ) , query ) ; } else { return factory . createQueryResult ( get ( conf . getRestBaseURL ( ) + "search/tweets.json" , query . asHttpParameterArray ( ) ) , query ) ; }
public class SimpleObfuscatorExtensions { /** * Disentangle the given obfuscated text with the given { @ link BiMap } rules * @ param rules * the rules * @ param obfuscated * the obfuscated text * @ return the string */ public static String disentangleBiMap ( final BiMap < Character , Character > rules , final String obfuscated ) { } }
return obfuscateBiMap ( rules . inverse ( ) , obfuscated ) ;
public class CXFMAPBuilder { /** * retrieve the inbound server message address properties attached to a message context * @ param ctx the server message context * @ return */ public MAP inboundMap ( Map < String , Object > ctx ) { } }
AddressingProperties implementation = ( AddressingProperties ) ctx . get ( CXFMAPConstants . SERVER_ADDRESSING_PROPERTIES_INBOUND ) ; return newMap ( implementation ) ;
public class ArchetypeBuilder { /** * This method : < ul > * < li > Copies POM from original project to archetype - resources < / li > * < li > Generates < code > < / code > archetype - descriptor . xml < / code > < / li > * < li > Generates Archetype ' s < code > pom . xml < / code > if not present in target directory . < / li > * < / ul > * @ param projectPom POM file of original project * @ param archetypeDir target directory of created Maven Archetype project * @ param archetypePom created POM file for Maven Archetype project * @ param metadataXmlOutFile generated archetype - metadata . xml file * @ param replaceFn replace function * @ throws IOException */ private void createArchetypeDescriptors ( File projectPom , File archetypeDir , File archetypePom , File metadataXmlOutFile , Replacement replaceFn ) throws IOException { } }
LOG . debug ( "Parsing " + projectPom ) ; String text = replaceFn . replace ( IOHelpers . readFully ( projectPom ) ) ; // lets update the XML Document doc = archetypeUtils . parseXml ( new InputSource ( new StringReader ( text ) ) ) ; Element root = doc . getDocumentElement ( ) ; // let ' s get some values from the original project String originalArtifactId , originalName , originalDescription ; Element artifactIdEl = ( Element ) findChild ( root , "artifactId" ) ; Element nameEl = ( Element ) findChild ( root , "name" ) ; Element descriptionEl = ( Element ) findChild ( root , "description" ) ; if ( artifactIdEl != null && artifactIdEl . getTextContent ( ) != null && artifactIdEl . getTextContent ( ) . trim ( ) . length ( ) > 0 ) { originalArtifactId = artifactIdEl . getTextContent ( ) . trim ( ) ; } else { originalArtifactId = archetypeDir . getName ( ) ; } if ( nameEl != null && nameEl . getTextContent ( ) != null && nameEl . getTextContent ( ) . trim ( ) . length ( ) > 0 ) { originalName = nameEl . getTextContent ( ) . trim ( ) ; } else { originalName = originalArtifactId ; } if ( descriptionEl != null && descriptionEl . getTextContent ( ) != null && descriptionEl . getTextContent ( ) . trim ( ) . length ( ) > 0 ) { originalDescription = descriptionEl . getTextContent ( ) . trim ( ) ; } else { originalDescription = originalName ; } Map < String , String > propertyNameSet = new LinkedHashMap < > ( ) ; if ( root != null ) { // remove the parent element and the following text Node NodeList parents = root . getElementsByTagName ( "parent" ) ; if ( parents . getLength ( ) > 0 ) { boolean removeParentPom = true ; Element parentNode = ( Element ) parents . item ( 0 ) ; Element groupId = DomHelper . firstChild ( parentNode , "groupId" ) ; if ( groupId != null ) { String textContent = groupId . getTextContent ( ) ; if ( textContent != null ) { textContent = textContent . trim ( ) ; if ( Objects . equal ( textContent , "io.fabric8.funktion.starter" ) ) { removeParentPom = false ; } } } if ( removeParentPom ) { if ( parentNode . getNextSibling ( ) . getNodeType ( ) == Node . TEXT_NODE ) { root . removeChild ( parents . item ( 0 ) . getNextSibling ( ) ) ; } root . removeChild ( parents . item ( 0 ) ) ; } } // lets load all the properties defined in the < properties > element in the pom . Map < String , String > pomProperties = new LinkedHashMap < > ( ) ; NodeList propertyElements = root . getElementsByTagName ( "properties" ) ; if ( propertyElements . getLength ( ) > 0 ) { Element propertyElement = ( Element ) propertyElements . item ( 0 ) ; NodeList children = propertyElement . getChildNodes ( ) ; for ( int cn = 0 ; cn < children . getLength ( ) ; cn ++ ) { Node e = children . item ( cn ) ; if ( e instanceof Element ) { pomProperties . put ( e . getNodeName ( ) , e . getTextContent ( ) ) ; } } } if ( LOG . isDebugEnabled ( ) ) { for ( Map . Entry < String , String > entry : pomProperties . entrySet ( ) ) { LOG . debug ( "pom property: {}={}" , entry . getKey ( ) , entry . getValue ( ) ) ; } } // lets find all the property names NodeList children = root . getElementsByTagName ( "*" ) ; for ( int cn = 0 ; cn < children . getLength ( ) ; cn ++ ) { Node e = children . item ( cn ) ; if ( e instanceof Element ) { String cText = e . getTextContent ( ) ; String prefix = "${" ; if ( cText . startsWith ( prefix ) ) { int offset = prefix . length ( ) ; int idx = cText . indexOf ( "}" , offset + 1 ) ; if ( idx > 0 ) { String name = cText . substring ( offset , idx ) ; if ( ! pomProperties . containsKey ( name ) && isValidRequiredPropertyName ( name ) ) { // use default value if we have one , but favor value from this pom over the bom pom String value = pomProperties . get ( name ) ; if ( value == null ) { value = versionProperties . get ( name ) ; } // lets use dash instead of dot name = name . replace ( '.' , '-' ) ; propertyNameSet . put ( name , value ) ; } } } else { // pickup some special property names we want to be in the archetype as requiredProperty String cName = e . getNodeName ( ) ; if ( isValidRequiredPropertyName ( cName ) && isSpecialPropertyName ( cName ) ) { String value = e . getTextContent ( ) ; if ( value != null ) { value = value . trim ( ) ; // lets use dash instead of dot cName = cName . replace ( '.' , '-' ) ; propertyNameSet . put ( cName , value ) ; // and use a placeholder token in its place , so we can allow to specify the version dynamically in the archetype String token = "${" + cName + "}" ; e . setTextContent ( token ) ; } } } } } // now lets replace the contents of some elements ( adding new elements if they are not present ) List < String > beforeNames = Arrays . asList ( "artifactId" , "version" , "packaging" , "name" , "properties" ) ; replaceOrAddElementText ( doc , root , "version" , "${version}" , beforeNames ) ; replaceOrAddElementText ( doc , root , "artifactId" , "${artifactId}" , beforeNames ) ; replaceOrAddElementText ( doc , root , "groupId" , "${groupId}" , beforeNames ) ; } archetypePom . getParentFile ( ) . mkdirs ( ) ; // remove copyright header which is the first comment , as we do not want that in the archetypes removeCommentNodes ( doc ) ; archetypeUtils . writeXmlDocument ( doc , archetypePom ) ; // lets update the archetype - metadata . xml file String archetypeXmlText ; if ( archetypeDir . getName ( ) . contains ( "groovy" ) ) { archetypeXmlText = groovyArchetypeXmlText ( ) ; } else if ( archetypeDir . getName ( ) . contains ( "kotlin" ) ) { archetypeXmlText = kotlinArchetypeXmlText ( ) ; } else { archetypeXmlText = defaultArchetypeXmlText ( ) ; } Document archDoc = archetypeUtils . parseXml ( new InputSource ( new StringReader ( archetypeXmlText ) ) ) ; Element archRoot = archDoc . getDocumentElement ( ) ; // replace @ name attribute on root element archRoot . setAttribute ( "name" , archetypeDir . getName ( ) ) ; LOG . debug ( ( "Found property names: {}" ) , propertyNameSet ) ; // lets add all the properties Element requiredProperties = replaceOrAddElement ( archDoc , archRoot , "requiredProperties" , Arrays . asList ( "fileSets" ) ) ; // lets add the various properties in for ( Map . Entry < String , String > entry : propertyNameSet . entrySet ( ) ) { requiredProperties . appendChild ( archDoc . createTextNode ( "\n" + indent + indent ) ) ; Element requiredProperty = archDoc . createElement ( "requiredProperty" ) ; requiredProperties . appendChild ( requiredProperty ) ; requiredProperty . setAttribute ( "key" , entry . getKey ( ) ) ; if ( entry . getValue ( ) != null ) { requiredProperty . appendChild ( archDoc . createTextNode ( "\n" + indent + indent + indent ) ) ; Element defaultValue = archDoc . createElement ( "defaultValue" ) ; requiredProperty . appendChild ( defaultValue ) ; defaultValue . appendChild ( archDoc . createTextNode ( entry . getValue ( ) ) ) ; } requiredProperty . appendChild ( archDoc . createTextNode ( "\n" + indent + indent ) ) ; } requiredProperties . appendChild ( archDoc . createTextNode ( "\n" + indent ) ) ; metadataXmlOutFile . getParentFile ( ) . mkdirs ( ) ; archetypeUtils . writeXmlDocument ( archDoc , metadataXmlOutFile ) ; generatePomIfRequired ( archetypeDir , originalName , originalDescription ) ;
public class RTMPConnection { /** * { @ inheritDoc } */ public IClientBroadcastStream newBroadcastStream ( Number streamId ) { } }
if ( isValidStreamId ( streamId ) ) { // get ClientBroadcastStream defined as a prototype in red5 - common . xml ClientBroadcastStream cbs = ( ClientBroadcastStream ) scope . getContext ( ) . getBean ( "clientBroadcastStream" ) ; customizeStream ( streamId , cbs ) ; if ( ! registerStream ( cbs ) ) { cbs = null ; } return cbs ; } return null ;
public class SQLManager { /** * Builds SQL SELECT to retrieve chunk of rows from a table based on row offset and number of rows in a chunk . * Pagination in following Databases : * SQL Server , Oracle 12c : OFFSET x ROWS FETCH NEXT y ROWS ONLY * SQL Server , Vertica may need ORDER BY * MySQL , PostgreSQL , MariaDB : LIMIT y OFFSET x * Teradata ( and possibly older Oracle ) : * SELECT * FROM mytable * QUALIFY ROW _ NUMBER ( ) OVER ( ORDER BY column _ name ) BETWEEN x and x + y ; * @ param databaseType * @ param table * @ param start * @ param length * @ param columns * @ param columnNames array of column names retrieved and parsed from single row SELECT prior to this call * @ return String SQL SELECT statement */ static String buildSelectChunkSql ( String databaseType , String table , long start , int length , String columns , String [ ] columnNames ) { } }
String sqlText = "SELECT " + columns + " FROM " + table ; switch ( databaseType ) { case SQL_SERVER_DB_TYPE : // requires ORDER BY clause with OFFSET / FETCH NEXT clauses , syntax supported since SQLServer 2012 sqlText += " ORDER BY ROW_NUMBER() OVER (ORDER BY (SELECT 0))" ; sqlText += " OFFSET " + start + " ROWS FETCH NEXT " + length + " ROWS ONLY" ; break ; case ORACLE_DB_TYPE : sqlText += " OFFSET " + start + " ROWS FETCH NEXT " + length + " ROWS ONLY" ; break ; case TERADATA_DB_TYPE : sqlText += " QUALIFY ROW_NUMBER() OVER (ORDER BY " + columnNames [ 0 ] + ") BETWEEN " + ( start + 1 ) + " AND " + ( start + length ) ; break ; default : sqlText += " LIMIT " + length + " OFFSET " + start ; } return sqlText ;
public class AbcGrammar { /** * field - history : : = % x48.3A * WSP 1 * ( tex - text header - eol ) < p > * < tt > H : < / tt > * field contents may extend over many lines , which is deprecated ( maybe not allowed any longer ? ) */ Rule FieldHistory ( ) { } }
return Sequence ( String ( "H:" ) , ZeroOrMore ( WSP ( ) ) . suppressNode ( ) , /* OneOrMoreS ( */ TexText ( ) , HeaderEol ( ) ) . label ( FieldHistory ) ;
public class Node { /** * Adds a child to the list of children for this Node < T > . The addition of * the first child will create a new List < Node < T > > . * @ param child a Node < T > object to set . */ public Node < T > addChild ( Node < T > child ) { } }
if ( children == null ) { children = new ArrayList < Node < T > > ( ) ; } child . parent = this ; children . add ( child ) ; return child ;
public class AbstractSearch { /** * returns the options of the current setup . * @ returnthe current options */ @ Override public String [ ] getOptions ( ) { } }
List < String > result ; result = new ArrayList < String > ( ) ; if ( getDebug ( ) ) result . add ( "-D" ) ; return result . toArray ( new String [ result . size ( ) ] ) ;
public class StyleCounter { /** * Adds a new occurence to the counter . * @ param style The style to be added . */ public void add ( T style ) { } }
Integer cnt = styles . get ( style ) ; if ( cnt == null ) styles . put ( style , 1 ) ; else styles . put ( style , cnt + 1 ) ;
public class Collections { /** * Create a list from values . * @ param values values to create a list from . * @ return list with values . */ public static < T > List < T > list ( T ... values ) { } }
return new ArrayList < T > ( Arrays . asList ( values ) ) ;
public class ESDocumentField { /** * The first value of the hit . */ public < V > V getValue ( ) { } }
if ( values == null || values . isEmpty ( ) ) { return null ; } return ( V ) values . get ( 0 ) ;
public class CopySnapshotRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < CopySnapshotRequest > getDryRunRequest ( ) { } }
Request < CopySnapshotRequest > request = new CopySnapshotRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class TangoAttribute { /** * Read attribute with format SPECTRUM or IMAGE * @ param < T > * @ param type * @ return * @ throws DevFailed */ public < T > T [ ] readSpecOrImage ( final Class < T > type ) throws DevFailed { } }
update ( ) ; return extractSpecOrImage ( type ) ;
public class MessageFormat { /** * Creates a MessageFormat with the given pattern and uses it to * format the given arguments . The pattern must identifyarguments * by name instead of by number . * @ throws IllegalArgumentException if the pattern is invalid * @ throws IllegalArgumentException if a value in the * < code > arguments < / code > array is not of the type * expected by the corresponding argument or custom Format object . * @ see # format ( Map , StringBuffer , FieldPosition ) * @ see # format ( String , Object [ ] ) */ public static String format ( String pattern , Map < String , Object > arguments ) { } }
MessageFormat temp = new MessageFormat ( pattern ) ; return temp . format ( arguments ) ;
public class MD5DigestFileEntry { /** * Generates the digest . * @ return the digest . * @ throws IOException if the backing entry could not be read . * @ since 1.0 */ private byte [ ] getContent ( ) throws IOException { } }
InputStream is = null ; try { MessageDigest digest = MessageDigest . getInstance ( "MD5" ) ; digest . reset ( ) ; byte [ ] buffer = new byte [ 8192 ] ; int read ; try { is = entry . getInputStream ( ) ; while ( ( read = is . read ( buffer ) ) > 0 ) { digest . update ( buffer , 0 , read ) ; } } catch ( IOException e ) { if ( is != null ) { throw e ; } } final String md5 = StringUtils . leftPad ( new BigInteger ( 1 , digest . digest ( ) ) . toString ( 16 ) , 32 , "0" ) ; return md5 . getBytes ( ) ; } catch ( NoSuchAlgorithmException e ) { IOException ioe = new IOException ( "Unable to calculate hash" ) ; ioe . initCause ( e ) ; throw ioe ; } finally { IOUtils . closeQuietly ( is ) ; }
public class SeaGlassTableUI { /** * Paint the drop lines , if any . * @ param context the Synth context . * @ param g the Graphics context . */ private void paintDropLines ( SeaGlassContext context , Graphics g ) { } }
JTable . DropLocation loc = table . getDropLocation ( ) ; if ( loc == null ) { return ; } Color color = ( Color ) style . get ( context , "Table.dropLineColor" ) ; Color shortColor = ( Color ) style . get ( context , "Table.dropLineShortColor" ) ; if ( color == null && shortColor == null ) { return ; } Rectangle rect ; rect = getHDropLineRect ( loc ) ; if ( rect != null ) { int x = rect . x ; int w = rect . width ; if ( color != null ) { extendRect ( rect , true ) ; g . setColor ( color ) ; g . fillRect ( rect . x , rect . y , rect . width , rect . height ) ; } if ( ! loc . isInsertColumn ( ) && shortColor != null ) { g . setColor ( shortColor ) ; g . fillRect ( x , rect . y , w , rect . height ) ; } } rect = getVDropLineRect ( loc ) ; if ( rect != null ) { int y = rect . y ; int h = rect . height ; if ( color != null ) { extendRect ( rect , false ) ; g . setColor ( color ) ; g . fillRect ( rect . x , rect . y , rect . width , rect . height ) ; } if ( ! loc . isInsertRow ( ) && shortColor != null ) { g . setColor ( shortColor ) ; g . fillRect ( rect . x , y , rect . width , h ) ; } }
public class CoreOptions { /** * Creates a composite option of { @ link ProvisionOption } s . * @ param urls provision urls ( cannot be null or containing null entries ) * @ return composite option of provision options * @ throws IllegalArgumentException - If urls array is null or contains null entries */ public static Option provision ( final String ... urls ) { } }
validateNotEmptyContent ( urls , true , "URLs" ) ; final List < ProvisionOption > options = new ArrayList < ProvisionOption > ( ) ; for ( String url : urls ) { options . add ( new UrlProvisionOption ( url ) ) ; } return provision ( options . toArray ( new ProvisionOption [ options . size ( ) ] ) ) ;
public class CmsDateBox { /** * Updates the auto hide partner from the parent widget . < p > * If there is any invalid user input the parent widget should not be closed automatically . < p > */ protected void updateCloseBehavior ( ) { } }
if ( isEnabled ( ) ) { if ( ! m_isValidTime && isValidTime ( ) ) { m_isValidTime = true ; m_popup . setAutoHideEnabled ( true ) ; } else if ( m_isValidTime && ! isValidTime ( ) ) { m_isValidTime = false ; m_popup . setAutoHideEnabled ( false ) ; } if ( ! m_isValidDateBox && isValideDateBox ( ) ) { m_isValidDateBox = true ; if ( m_autoHideParent != null ) { m_autoHideParent . removeAutoHidePartner ( RootPanel . getBodyElement ( ) . getParentElement ( ) ) ; } } else if ( m_isValidDateBox && ! isValideDateBox ( ) ) { m_isValidDateBox = false ; if ( m_autoHideParent != null ) { m_autoHideParent . addAutoHidePartner ( RootPanel . getBodyElement ( ) . getParentElement ( ) ) ; } } }
public class TrafficPlugin { /** * Add Layer to the map and track the id . * @ param layer the layer to be added to the map * @ param idAboveLayer the id of the layer above */ private void addTrafficLayersToMap ( Layer layerCase , Layer layer , String idAboveLayer ) { } }
style . addLayerBelow ( layerCase , idAboveLayer ) ; style . addLayerAbove ( layer , layerCase . getId ( ) ) ; layerIds . add ( layerCase . getId ( ) ) ; layerIds . add ( layer . getId ( ) ) ;
public class Context { /** * Tells whether or not there ' s at least one node from the sites tree in context . * The whole tree is traversed until is found one node in context . * @ param node the node to check , recursively * @ return { @ code true } if the context has at least one node from the sites tree in context , { @ code false } otherwise */ private boolean hasNodesInContext ( SiteNode node ) { } }
@ SuppressWarnings ( "unchecked" ) Enumeration < TreeNode > en = node . children ( ) ; while ( en . hasMoreElements ( ) ) { SiteNode sn = ( SiteNode ) en . nextElement ( ) ; if ( isInContext ( sn ) ) { return true ; } if ( hasNodesInContext ( sn ) ) { return true ; } } return false ;
public class BeanShellBSFEngine { /** * Note : the apply ( ) method may be supported directly in BeanShell in an * upcoming release and would not require special support here . */ public Object apply ( String source , int lineNo , int columnNo , Object funcBody , Vector namesVec , Vector argsVec ) throws BSFException { } }
if ( namesVec . size ( ) != argsVec . size ( ) ) throw new BSFException ( "number of params/names mismatch" ) ; if ( ! ( funcBody instanceof String ) ) throw new BSFException ( "apply: functino body must be a string" ) ; String [ ] names = new String [ namesVec . size ( ) ] ; namesVec . copyInto ( names ) ; Object [ ] args = new Object [ argsVec . size ( ) ] ; argsVec . copyInto ( args ) ; try { if ( ! installedApplyMethod ) { interpreter . eval ( bsfApplyMethod ) ; installedApplyMethod = true ; } bsh . This global = ( bsh . This ) interpreter . get ( "global" ) ; Object value = global . invokeMethod ( "_bsfApply" , new Object [ ] { names , args , ( String ) funcBody } ) ; return Primitive . unwrap ( value ) ; } catch ( InterpreterError e ) { throw new BSFException ( BSFException . REASON_UNKNOWN_LANGUAGE , "BeanShell interpreter internal error: " + e + sourceInfo ( source , lineNo , columnNo ) , e ) ; } catch ( TargetError e2 ) { throw new BSFException ( BSFException . REASON_EXECUTION_ERROR , "The application script threw an exception: " + e2 . getTarget ( ) + sourceInfo ( source , lineNo , columnNo ) , e2 ) ; } catch ( EvalError e3 ) { throw new BSFException ( BSFException . REASON_OTHER_ERROR , "BeanShell script error: " + e3 + sourceInfo ( source , lineNo , columnNo ) , e3 ) ; }
public class Glob { /** * Translate a glob PATTERN to a regular expression . */ private static boolean translate ( String glob , StringBuilder result ) { } }
int i ; int max ; char c ; int j ; String stuff ; int escaped ; escaped = 0 ; max = glob . length ( ) ; for ( i = 0 ; i < max ; ) { c = glob . charAt ( i ++ ) ; if ( c == '*' ) { result . append ( ".*" ) ; } else if ( c == '?' ) { result . append ( '.' ) ; } else if ( c == '[' ) { j = i ; if ( j < max && glob . charAt ( j ) == '!' ) { j ++ ; } if ( j < max && glob . charAt ( j ) == ']' ) { j ++ ; } while ( j < max && glob . charAt ( j ) != ']' ) { j ++ ; } if ( j >= max ) { result . append ( "\\[" ) ; } else { stuff = glob . substring ( i , j ) ; stuff = Strings . replace ( stuff , "\\" , "\\\\" ) ; i = j + 1 ; if ( stuff . charAt ( 0 ) == '!' ) { stuff = '^' + stuff . substring ( 1 ) ; } else if ( stuff . charAt ( 0 ) == '^' ) { stuff = '\\' + stuff ; } result . append ( '[' ) ; result . append ( stuff ) ; result . append ( ']' ) ; } } else { escaped ++ ; result . append ( escape ( c ) ) ; } } result . append ( '$' ) ; return escaped == max ;
public class EnumUtils { /** * Return an array of Strings of all the enum levels . * Taken from http : / / stackoverflow . com / questions / 13783295 / getting - all - names - in - an - enum - as - a - string . */ public static String [ ] getNames ( Class < ? extends Enum < ? > > e ) { } }
return Arrays . toString ( e . getEnumConstants ( ) ) . replaceAll ( "^.|.$" , "" ) . split ( ", " ) ;
public class SourceBuilderImpl { /** * { @ inheritDoc } */ @ Override public SourceLink createSourceLink ( final GedObject ged , final Source source ) { } }
if ( ged == null || source == null ) { return new SourceLink ( ) ; } final SourceLink sourceLink = new SourceLink ( ged , "Source" , new ObjectId ( source . getString ( ) ) ) ; ged . insert ( sourceLink ) ; return sourceLink ;
public class ResourceLoader { /** * Finds all resources with given name at the given search path . If the URL points to a directory , the name is the * file path relative to this directory . If the URL points to a JAR file , the name identifies an entry in that JAR * file . If the URL points to a JAR file , the resource is not found in that JAR file , and the JAR file has * Class - Path attribute , the JAR files identified in the Class - Path are also searched for the resource . * The search is lazy , that is , " find next resource " operation is triggered by calling * { @ link Enumeration # hasMoreElements } . * @ param sources the source URL path * @ param name the resource name * @ return enumeration of URLs of the resources */ public Enumeration < URL > findResources ( URL [ ] sources , String name ) { } }
return new ResourceEnumeration < > ( sources . clone ( ) , name , true ) ;
public class PDUByteBuffer { /** * Append bytes to specified offset and length . * @ param b is the bytes to append . * @ param offset is the offset where the bytes will be placed . * @ param length the length that will specified which part of the bytes will * be append . * @ return the latest length of the byte buffer . */ public int append ( byte [ ] b , int offset , int length ) { } }
int oldLength = bytesLength ; bytesLength += length ; int newCapacity = capacityPolicy . ensureCapacity ( bytesLength , bytes . length ) ; if ( newCapacity > bytes . length ) { byte [ ] newB = new byte [ newCapacity ] ; System . arraycopy ( bytes , 0 , newB , 0 , bytes . length ) ; // copy current bytes to new bytes bytes = newB ; } System . arraycopy ( b , offset , bytes , oldLength , length ) ; // assign value normalizeCommandLength ( ) ; return bytesLength ;
public class AbstractProcessInstanceMarshaller { /** * Output methods */ public Object writeProcessInstance ( MarshallerWriteContext context , ProcessInstance processInstance ) throws IOException { } }
WorkflowProcessInstanceImpl workFlow = ( WorkflowProcessInstanceImpl ) processInstance ; ObjectOutputStream stream = context . stream ; stream . writeLong ( workFlow . getId ( ) ) ; stream . writeUTF ( workFlow . getProcessId ( ) ) ; stream . writeInt ( workFlow . getState ( ) ) ; stream . writeLong ( workFlow . getNodeInstanceCounter ( ) ) ; SwimlaneContextInstance swimlaneContextInstance = ( SwimlaneContextInstance ) workFlow . getContextInstance ( SwimlaneContext . SWIMLANE_SCOPE ) ; if ( swimlaneContextInstance != null ) { Map < String , String > swimlaneActors = swimlaneContextInstance . getSwimlaneActors ( ) ; stream . writeInt ( swimlaneActors . size ( ) ) ; for ( Map . Entry < String , String > entry : swimlaneActors . entrySet ( ) ) { stream . writeUTF ( entry . getKey ( ) ) ; stream . writeUTF ( entry . getValue ( ) ) ; } } else { stream . writeInt ( 0 ) ; } List < NodeInstance > nodeInstances = new ArrayList < NodeInstance > ( workFlow . getNodeInstances ( ) ) ; Collections . sort ( nodeInstances , new Comparator < NodeInstance > ( ) { public int compare ( NodeInstance o1 , NodeInstance o2 ) { return ( int ) ( o1 . getId ( ) - o2 . getId ( ) ) ; } } ) ; for ( NodeInstance nodeInstance : nodeInstances ) { stream . writeShort ( PersisterEnums . NODE_INSTANCE ) ; writeNodeInstance ( context , nodeInstance ) ; } stream . writeShort ( PersisterEnums . END ) ; List < ContextInstance > exclusiveGroupInstances = workFlow . getContextInstances ( ExclusiveGroup . EXCLUSIVE_GROUP ) ; if ( exclusiveGroupInstances == null ) { stream . writeInt ( 0 ) ; } else { stream . writeInt ( exclusiveGroupInstances . size ( ) ) ; for ( ContextInstance contextInstance : exclusiveGroupInstances ) { ExclusiveGroupInstance exclusiveGroupInstance = ( ExclusiveGroupInstance ) contextInstance ; Collection < NodeInstance > groupNodeInstances = exclusiveGroupInstance . getNodeInstances ( ) ; stream . writeInt ( groupNodeInstances . size ( ) ) ; for ( NodeInstance nodeInstance : groupNodeInstances ) { stream . writeLong ( nodeInstance . getId ( ) ) ; } } } VariableScopeInstance variableScopeInstance = ( VariableScopeInstance ) workFlow . getContextInstance ( VariableScope . VARIABLE_SCOPE ) ; Map < String , Object > variables = variableScopeInstance . getVariables ( ) ; List < String > keys = new ArrayList < String > ( variables . keySet ( ) ) ; Collection < Object > values = variables . values ( ) ; Collections . sort ( keys , new Comparator < String > ( ) { public int compare ( String o1 , String o2 ) { return o1 . compareTo ( o2 ) ; } } ) ; // Process Variables // - Number of non null Variables = nonnullvariables . size ( ) // For Each Variable // - Variable Key // - Marshalling Strategy Index // - Marshalled Object Collection < Object > notNullValues = new ArrayList < Object > ( ) ; for ( Object value : values ) { if ( value != null ) { notNullValues . add ( value ) ; } } stream . writeInt ( notNullValues . size ( ) ) ; for ( String key : keys ) { Object object = variables . get ( key ) ; if ( object != null ) { stream . writeUTF ( key ) ; // New marshalling algorithm when using strategies int useNewMarshallingStrategyAlgorithm = - 2 ; stream . writeInt ( useNewMarshallingStrategyAlgorithm ) ; // Choose first strategy that accepts the object ( what was always done ) ObjectMarshallingStrategy strategy = context . objectMarshallingStrategyStore . getStrategyObject ( object ) ; stream . writeUTF ( strategy . getClass ( ) . getName ( ) ) ; strategy . write ( stream , object ) ; } } return null ;
public class TimeBaseObject { /** * Requests that the < code > oddBase < / code > field be set to the * specified value . The local value will be updated immediately and an * event will be propagated through the system to notify all listeners * that the attribute did change . Proxied copies of this object ( on * clients ) will apply the value change when they received the * attribute changed notification . */ @ Generated ( value = { } }
"com.threerings.presents.tools.GenDObjectTask" } ) public void setOddBase ( long value ) { long ovalue = this . oddBase ; requestAttributeChange ( ODD_BASE , Long . valueOf ( value ) , Long . valueOf ( ovalue ) ) ; this . oddBase = value ;
public class UpdateAdGroup { /** * Runs the example . * @ param adWordsServices the services factory . * @ param session the session . * @ param adGroupId the ID of the ad group to update . * @ param bidMicroAmount the optional bid amount in micros to use for the ad group bid . * @ throws ApiException if the API request failed with one or more service errors . * @ throws RemoteException if the API request failed due to other errors . */ public static void runExample ( AdWordsServicesInterface adWordsServices , AdWordsSession session , Long adGroupId , @ Nullable Long bidMicroAmount ) throws RemoteException { } }
// Get the AdGroupService . AdGroupServiceInterface adGroupService = adWordsServices . get ( session , AdGroupServiceInterface . class ) ; // Create an ad group with the specified ID . AdGroup adGroup = new AdGroup ( ) ; adGroup . setId ( adGroupId ) ; // Update the CPC bid if specified . if ( bidMicroAmount != null ) { BiddingStrategyConfiguration biddingStrategyConfiguration = new BiddingStrategyConfiguration ( ) ; Money cpcBidMoney = new Money ( ) ; cpcBidMoney . setMicroAmount ( bidMicroAmount ) ; CpcBid cpcBid = new CpcBid ( ) ; cpcBid . setBid ( cpcBidMoney ) ; biddingStrategyConfiguration . setBids ( new Bids [ ] { cpcBid } ) ; adGroup . setBiddingStrategyConfiguration ( biddingStrategyConfiguration ) ; } // Pause the ad group . adGroup . setStatus ( AdGroupStatus . PAUSED ) ; // Create operations . AdGroupOperation operation = new AdGroupOperation ( ) ; operation . setOperand ( adGroup ) ; operation . setOperator ( Operator . SET ) ; AdGroupOperation [ ] operations = new AdGroupOperation [ ] { operation } ; // Update ad group . AdGroupReturnValue result = adGroupService . mutate ( operations ) ; // Display ad groups . for ( AdGroup adGroupResult : result . getValue ( ) ) { BiddingStrategyConfiguration biddingStrategyConfiguration = adGroupResult . getBiddingStrategyConfiguration ( ) ; // Find the CpcBid in the bidding strategy configuration ' s bids collection . Long cpcBidMicros = null ; if ( biddingStrategyConfiguration != null ) { if ( biddingStrategyConfiguration . getBids ( ) != null ) { for ( Bids bid : biddingStrategyConfiguration . getBids ( ) ) { if ( bid instanceof CpcBid ) { cpcBidMicros = ( ( CpcBid ) bid ) . getBid ( ) . getMicroAmount ( ) ; break ; } } } } System . out . printf ( "Ad group with ID %d and name '%s' updated to have status '%s' and CPC bid %d%n" , adGroupResult . getId ( ) , adGroupResult . getName ( ) , adGroupResult . getStatus ( ) , cpcBidMicros ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcDamper ( ) { } }
if ( ifcDamperEClass == null ) { ifcDamperEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 169 ) ; } return ifcDamperEClass ;
public class Zab { /** * Removes a peer from the cluster . If you send remove request in * non - broadcasting phase , the operation will fail . * @ param peerId the id of the peer who will be removed from the cluster . * @ param ctx context to be provided to the callback * @ throws ZabException . InvalidPhase if Zab is not in broadcasting phase . * @ throws ZabException . TooManyPendingRequests if there is a pending snapshot * request . */ public void remove ( String peerId , Object ctx ) throws InvalidPhase , TooManyPendingRequests { } }
this . mainThread . remove ( peerId , ctx ) ;
public class TypeValidator { /** * Expect the type to be anything but the null or void type . If the expectation is not met , issue * a warning at the provided node ' s source code position . Note that a union type that includes the * void type and at least one other type meets the expectation . * @ return Whether the expectation was met . */ boolean expectNotNullOrUndefined ( NodeTraversal t , Node n , JSType type , String msg , JSType expectedType ) { } }
if ( ! type . isNoType ( ) && ! type . isUnknownType ( ) && type . isSubtypeOf ( nullOrUndefined ) && ! containsForwardDeclaredUnresolvedName ( type ) ) { // There ' s one edge case right now that we don ' t handle well , and // that we don ' t want to warn about . // if ( this . x = = null ) { // this . initializeX ( ) ; // this . x . foo ( ) ; // In this case , we incorrectly type x because of how we // infer properties locally . See issue 109. // http : / / blickly . github . io / closure - compiler - issues / # 109 // We do not do this inference globally . if ( n . isGetProp ( ) && ! t . inGlobalScope ( ) && type . isNullType ( ) ) { return true ; } mismatch ( n , msg , type , expectedType ) ; return false ; } return true ;
public class EDBConverter { /** * Converts a list of EDBObjects to a list of models of the given model type . */ public < T > List < T > convertEDBObjectsToModelObjects ( Class < T > model , List < EDBObject > objects ) { } }
List < T > models = new ArrayList < > ( ) ; for ( EDBObject object : objects ) { T instance = convertEDBObjectToModel ( model , object ) ; if ( instance != null ) { models . add ( instance ) ; } } return models ;
public class CommandLineReader { /** * This is a degenerate implementation . * I don ' t know how to keep this from blocking if we try to read more * than one char . . . There is no available ( ) for Readers ? ? */ public int read ( char buff [ ] , int off , int len ) throws IOException { } }
int b = read ( ) ; if ( b == - 1 ) return - 1 ; // EOF , not zero read apparently else { buff [ off ] = ( char ) b ; return 1 ; }
public class RelationalOperationsMatrix { /** * Checks whether the scl string is the crosses relation . */ private static boolean crosses_ ( String scl , int dim_a , int dim_b ) { } }
if ( dim_a > dim_b ) { // Valid for area - Line , area - Point , Line - Point if ( scl . charAt ( 0 ) == 'T' && scl . charAt ( 1 ) == '*' && scl . charAt ( 2 ) == '*' && scl . charAt ( 3 ) == '*' && scl . charAt ( 4 ) == '*' && scl . charAt ( 5 ) == '*' && scl . charAt ( 6 ) == 'T' && scl . charAt ( 7 ) == '*' && scl . charAt ( 8 ) == '*' ) return true ; return false ; } if ( dim_a == 1 && dim_b == 1 ) { // Valid for Line - Line if ( scl . charAt ( 0 ) == '0' && scl . charAt ( 1 ) == '*' && scl . charAt ( 2 ) == '*' && scl . charAt ( 3 ) == '*' && scl . charAt ( 4 ) == '*' && scl . charAt ( 5 ) == '*' && scl . charAt ( 6 ) == '*' && scl . charAt ( 7 ) == '*' && scl . charAt ( 8 ) == '*' ) return true ; } return false ;
public class UimaPOSTagger { /** * this is from opennlp */ public static List < TypedSpan > createSpanList ( String [ ] toks , String [ ] tags ) { } }
// initialize with the list maximum size List < TypedSpan > phrases = new ArrayList < TypedSpan > ( toks . length ) ; String startTag = "" ; int startIndex = 0 ; boolean foundPhrase = false ; for ( int ci = 0 , cn = tags . length ; ci < cn ; ci ++ ) { String pred = tags [ ci ] ; if ( ! tags [ ci ] . startsWith ( "B-" ) && ! tags [ ci ] . startsWith ( "I-" ) ) { pred = "O" ; } if ( pred . startsWith ( "B-" ) || ( ! pred . equals ( "I-" + startTag ) && ! pred . equals ( "O" ) ) ) { // start if ( foundPhrase ) { // handle the last phrases . add ( new TypedSpan ( startIndex , ci , startTag ) ) ; } startIndex = ci ; startTag = pred . substring ( 2 ) ; foundPhrase = true ; } else if ( pred . equals ( "I-" + startTag ) ) { // middle // do nothing } else if ( foundPhrase ) { // end phrases . add ( new TypedSpan ( startIndex , ci , startTag ) ) ; foundPhrase = false ; startTag = "" ; } } if ( foundPhrase ) { // leftover phrases . add ( new TypedSpan ( startIndex , tags . length , startTag ) ) ; } return phrases ;
public class BaseBigtableInstanceAdminClient { /** * Lists information about app profiles in an instance . * < p > Sample code : * < pre > < code > * try ( BaseBigtableInstanceAdminClient baseBigtableInstanceAdminClient = BaseBigtableInstanceAdminClient . create ( ) ) { * InstanceName parent = InstanceName . of ( " [ PROJECT ] " , " [ INSTANCE ] " ) ; * for ( AppProfile element : baseBigtableInstanceAdminClient . listAppProfiles ( parent ) . iterateAll ( ) ) { * / / doThingsWith ( element ) ; * < / code > < / pre > * @ param parent The unique name of the instance for which a list of app profiles is requested . * Values are of the form ` projects / & lt ; project & gt ; / instances / & lt ; instance & gt ; ` . Use * ` & lt ; instance & gt ; = ' - ' ` to list AppProfiles for all Instances in a project , e . g . , * ` projects / myproject / instances / - ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final ListAppProfilesPagedResponse listAppProfiles ( InstanceName parent ) { } }
ListAppProfilesRequest request = ListAppProfilesRequest . newBuilder ( ) . setParent ( parent == null ? null : parent . toString ( ) ) . build ( ) ; return listAppProfiles ( request ) ;
public class PolicyDelta { /** * < pre > * The delta for Bindings between two policies . * < / pre > * < code > repeated . google . iam . v1 . BindingDelta binding _ deltas = 1 ; < / code > */ public java . util . List < com . google . iam . v1 . BindingDelta > getBindingDeltasList ( ) { } }
return bindingDeltas_ ;
public class DouglasPeucker { /** * Simplifies a part of the < code > points < / code > . The < code > fromIndex < / code > and < code > lastIndex < / code > * are guaranteed to be kept . * @ param points The PointList to simplify * @ param fromIndex Start index to simplify , should be > = < code > lastIndex < / code > * @ param lastIndex Simplify up to this index * @ return The number of removed points */ public int simplify ( PointList points , int fromIndex , int lastIndex ) { } }
int removed = 0 ; int size = lastIndex - fromIndex ; if ( approx ) { int delta = 500 ; int segments = size / delta + 1 ; int start = fromIndex ; for ( int i = 0 ; i < segments ; i ++ ) { // start of next is end of last segment , except for the last removed += subSimplify ( points , start , Math . min ( lastIndex , start + delta ) ) ; start += delta ; } } else { removed = subSimplify ( points , fromIndex , lastIndex ) ; } if ( removed > 0 ) compressNew ( points , removed ) ; return removed ;
public class ModelMojoReader { private void readAll ( final boolean readModelDescriptor ) throws IOException { } }
String [ ] columns = ( String [ ] ) _lkv . get ( "[columns]" ) ; String [ ] [ ] domains = parseModelDomains ( columns . length ) ; boolean isSupervised = readkv ( "supervised" ) ; _model = makeModel ( columns , domains , isSupervised ? columns [ columns . length - 1 ] : null ) ; _model . _uuid = readkv ( "uuid" ) ; _model . _h2oVersion = readkv ( "h2o_version" , "unknown" ) ; _model . _category = hex . ModelCategory . valueOf ( ( String ) readkv ( "category" ) ) ; _model . _supervised = isSupervised ; _model . _nfeatures = readkv ( "n_features" ) ; _model . _nclasses = readkv ( "n_classes" ) ; _model . _balanceClasses = readkv ( "balance_classes" ) ; _model . _defaultThreshold = readkv ( "default_threshold" ) ; _model . _priorClassDistrib = readkv ( "prior_class_distrib" ) ; _model . _modelClassDistrib = readkv ( "model_class_distrib" ) ; _model . _offsetColumn = readkv ( "offset_column" ) ; _model . _mojo_version = ( ( Number ) readkv ( "mojo_version" ) ) . doubleValue ( ) ; checkMaxSupportedMojoVersion ( ) ; readModelData ( ) ; if ( readModelDescriptor ) { _model . _modelDescriptor = readModelDescriptor ( ) ; }
public class RLSControllerFactory { /** * Returns the singleton RLSController to which calls to suspend and resume are * delegated . */ static RLSController getRLSController ( ) throws Exception { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "getRLSController" ) ; if ( _instance == null ) { try { _instance = ( RLSController ) Class . forName ( "com.ibm.ws.recoverylog.spi.RLSControllerImpl" ) . newInstance ( ) ; } catch ( UnsupportedOperationException uoe ) { // No FFDC code needed . // Catch and swallow this exception } catch ( Exception e ) { FFDCFilter . processException ( e , "com.ibm.ws.recoverylog.spi.RLSControllerFactory" , "75" ) ; if ( tc . isEventEnabled ( ) ) Tr . event ( tc , "Rethrowing exception" ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getRLSController" , e ) ; throw e ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getRLSController" , _instance ) ; return _instance ;
public class SeleniumBrowser { /** * Creates local web driver . * @ param browserType * @ return */ private WebDriver createLocalWebDriver ( String browserType ) { } }
switch ( browserType ) { case BrowserType . FIREFOX : FirefoxProfile firefoxProfile = getEndpointConfiguration ( ) . getFirefoxProfile ( ) ; /* set custom download folder */ firefoxProfile . setPreference ( "browser.download.dir" , temporaryStorage . toFile ( ) . getAbsolutePath ( ) ) ; DesiredCapabilities defaults = DesiredCapabilities . firefox ( ) ; defaults . setCapability ( FirefoxDriver . PROFILE , firefoxProfile ) ; return new FirefoxDriver ( defaults ) ; case BrowserType . IE : return new InternetExplorerDriver ( ) ; case BrowserType . EDGE : return new EdgeDriver ( ) ; case BrowserType . SAFARI : return new SafariDriver ( ) ; case BrowserType . CHROME : return new ChromeDriver ( ) ; case BrowserType . GOOGLECHROME : return new ChromeDriver ( ) ; case BrowserType . HTMLUNIT : BrowserVersion browserVersion = null ; if ( getEndpointConfiguration ( ) . getVersion ( ) . equals ( "FIREFOX" ) ) { browserVersion = BrowserVersion . FIREFOX_45 ; } else if ( getEndpointConfiguration ( ) . getVersion ( ) . equals ( "INTERNET_EXPLORER" ) ) { browserVersion = BrowserVersion . INTERNET_EXPLORER ; } else if ( getEndpointConfiguration ( ) . getVersion ( ) . equals ( "EDGE" ) ) { browserVersion = BrowserVersion . EDGE ; } else if ( getEndpointConfiguration ( ) . getVersion ( ) . equals ( "CHROME" ) ) { browserVersion = BrowserVersion . CHROME ; } HtmlUnitDriver htmlUnitDriver ; if ( browserVersion != null ) { htmlUnitDriver = new HtmlUnitDriver ( browserVersion ) ; } else { htmlUnitDriver = new HtmlUnitDriver ( ) ; } htmlUnitDriver . setJavascriptEnabled ( getEndpointConfiguration ( ) . isJavaScript ( ) ) ; return htmlUnitDriver ; default : throw new CitrusRuntimeException ( "Unsupported local browser type: " + browserType ) ; }
public class ConfigurationTree { /** * The same as { @ link # findAllRootPaths ( ) } , but returns only paths started in provided configuration . * @ param confType configuration type to get all properties from * @ return all root objects and direct root values ( only paths 1 level paths ) declared in * specified configuration class ( directly ) * @ see # findAllRootPaths ( ) */ public List < ConfigPath > findAllRootPathsFrom ( final Class < ? extends Configuration > confType ) { } }
return paths . stream ( ) . filter ( it -> ! it . getPath ( ) . contains ( DOT ) && it . getRootDeclarationClass ( ) == confType ) . collect ( Collectors . toList ( ) ) ;
public class TextObject { /** * setter for objectId - sets the id of the object as found in the text * @ generated * @ param v value to set into the feature */ public void setObjectId ( String v ) { } }
if ( TextObject_Type . featOkTst && ( ( TextObject_Type ) jcasType ) . casFeat_objectId == null ) jcasType . jcas . throwFeatMissing ( "objectId" , "de.julielab.jules.types.TextObject" ) ; jcasType . ll_cas . ll_setStringValue ( addr , ( ( TextObject_Type ) jcasType ) . casFeatCode_objectId , v ) ;
public class DetectFiducialSquareBinary { /** * Sees how many pixels were positive and negative in each square region . Then decides if they * should be 0 or 1 or unknown */ protected boolean thresholdBinaryNumber ( ) { } }
int lower = ( int ) ( N * ( ambiguityThreshold / 2.0 ) ) ; int upper = ( int ) ( N * ( 1 - ambiguityThreshold / 2.0 ) ) ; final int totalElements = getTotalGridElements ( ) ; for ( int i = 0 ; i < totalElements ; i ++ ) { if ( counts [ i ] < lower ) { classified [ i ] = 0 ; } else if ( counts [ i ] > upper ) { classified [ i ] = 1 ; } else { // it ' s ambiguous so just fail return true ; } } return false ;
public class FileSystemHeartbeatPResponse { /** * < code > optional . alluxio . grpc . file . FileSystemCommand command = 1 ; < / code > */ public alluxio . grpc . FileSystemCommandOrBuilder getCommandOrBuilder ( ) { } }
return command_ == null ? alluxio . grpc . FileSystemCommand . getDefaultInstance ( ) : command_ ;
public class BaseUserService { /** * Deletes a token * Note : If you do not plan to use the UsernamePassword provider just provide en empty * implementation * @ param tokenId the token id */ @ Override public Future < scala . Option < MailToken > > deleteToken ( String tokenId ) { } }
return toScala ( doDeleteToken ( tokenId ) . thenApply ( this :: toMailToken ) ) ;
public class AbstractAppender { /** * Resets the match index when a response fails . */ protected void resetMatchIndex ( RaftMemberContext member , AppendResponse response ) { } }
if ( response . lastLogIndex ( ) < member . getMatchIndex ( ) ) { member . setMatchIndex ( response . lastLogIndex ( ) ) ; log . trace ( "Reset match index for {} to {}" , member , member . getMatchIndex ( ) ) ; }
public class RQ { /** * Convenience method to compute an RQ decomposition * @ param A * Matrix to decompose . Not modified * @ return Newly allocated decomposition */ public static RQ factorize ( Matrix A ) { } }
return new RQ ( A . numRows ( ) , A . numColumns ( ) ) . factor ( new DenseMatrix ( A ) ) ;
public class CategoryChart { /** * Add a series for a Category type chart using using double arrays * @ param seriesName * @ param xData the X - Axis data * @ param xData the Y - Axis data * @ return A Series object that you can set properties on */ public CategorySeries addSeries ( String seriesName , double [ ] xData , double [ ] yData ) { } }
return addSeries ( seriesName , xData , yData , null ) ;
public class ExecutionContext { /** * Enables further validation on an existing accumulating OrFuture by passing validation functions . * @ param < G > the Good type of the argument OrFuture * @ param < ERR > the type of the error message contained in the accumulating failure * @ param or the accumulating OrFuture * @ param validations the validation functions * @ return the original or if it passed all validations or a Bad with all failures */ @ SafeVarargs public final < G , ERR > OrFuture < G , Every < ERR > > when ( OrFuture < ? extends G , ? extends Every < ? extends ERR > > or , Function < ? super G , ? extends Validation < ERR > > ... validations ) { } }
OrPromise < G , Every < ERR > > promise = promise ( ) ; or . onComplete ( o -> promise . complete ( Accumulation . when ( o , validations ) ) ) ; return promise . future ( ) ;
public class BloatedAssignmentScope { /** * returns an existing scope block that has the same target as the one looked for * @ param sb * the scope block to start with * @ param start * the current pc * @ param target * the target to look for * @ return the scope block found or null */ private ScopeBlock findScopeBlockWithTarget ( ScopeBlock sb , int start , int target ) { } }
ScopeBlock parentBlock = null ; int finishLocation = sb . getFinish ( ) ; if ( ( sb . getStart ( ) < start ) && ( finishLocation >= start ) && ( ( finishLocation <= target ) || ( sb . isGoto ( ) && ! sb . isLoop ( ) ) ) ) { parentBlock = sb ; } List < ScopeBlock > children = sb . getChildren ( ) ; if ( children != null ) { for ( ScopeBlock child : children ) { ScopeBlock targetBlock = findScopeBlockWithTarget ( child , start , target ) ; if ( targetBlock != null ) { return targetBlock ; } } } return parentBlock ;
public class TileWriter { /** * Get the length of the bounding box projected using the transformation * @ param boundingBox * @ param toWebMercatorTransform * @ return length */ private static double getLength ( BoundingBox boundingBox , ProjectionTransform toWebMercatorTransform ) { } }
BoundingBox transformedBoundingBox = boundingBox . transform ( toWebMercatorTransform ) ; return getLength ( transformedBoundingBox ) ;
public class JavaType { /** * JavaType生成 * @ param implementClass 実装クラス情報 * @ param field Field * @ return JavaType */ public static JavaType of ( final ImplementClass implementClass , final Field field ) { } }
return of ( implementClass , field . getGenericType ( ) ) ;
public class StaticStringCharacteristic { /** * { @ inheritDoc } */ @ Override protected CompletableFuture < String > getValue ( ) { } }
return CompletableFuture . completedFuture ( value ) . thenApply ( s -> s != null ? s : "Unavailable" ) ;
public class Code { /** * Calls the non - private instance method { @ code method } of { @ code instance } * using { @ code args } and assigns the result to { @ code target } . * @ param method a non - private , non - static , method declared on a class . May * not be an interface method or a constructor . * @ param target the local to receive the method ' s return value , or { @ code * null } if the return type is { @ code void } or if its value not needed . */ public < D , R > void invokeVirtual ( MethodId < D , R > method , Local < ? super R > target , Local < ? extends D > instance , Local < ? > ... args ) { } }
invoke ( Rops . opInvokeVirtual ( method . prototype ( true ) ) , method , target , instance , args ) ;
public class RestRequestValidator { /** * Retrieve and validate vector clock value from the REST request . * " X _ VOLD _ VECTOR _ CLOCK " is the vector clock header . * @ return true if present , false if missing */ protected boolean hasVectorClock ( boolean isVectorClockOptional ) { } }
boolean result = false ; String vectorClockHeader = this . request . getHeader ( RestMessageHeaders . X_VOLD_VECTOR_CLOCK ) ; if ( vectorClockHeader != null ) { ObjectMapper mapper = new ObjectMapper ( ) ; try { VectorClockWrapper vcWrapper = mapper . readValue ( vectorClockHeader , VectorClockWrapper . class ) ; this . parsedVectorClock = new VectorClock ( vcWrapper . getVersions ( ) , vcWrapper . getTimestamp ( ) ) ; result = true ; } catch ( Exception e ) { logger . error ( "Exception while parsing and constructing vector clock" , e ) ; RestErrorHandler . writeErrorResponse ( this . messageEvent , HttpResponseStatus . BAD_REQUEST , "Invalid Vector Clock" ) ; } } else if ( ! isVectorClockOptional ) { logger . error ( "Error when validating request. Missing Vector Clock" ) ; RestErrorHandler . writeErrorResponse ( this . messageEvent , HttpResponseStatus . BAD_REQUEST , "Missing Vector Clock" ) ; } else { result = true ; } return result ;
public class EventLocalMap { /** * Put a key and value pair into the storage map . * @ param key * @ param value */ public void put ( K key , V value ) { } }
final int hash = key . hashCode ( ) ; final int row = hash / SIZE_ROW ; final int column = hash & ( SIZE_ROW - 1 ) ; // DON ' T use the % operator as we // need the result to be // non - negative ( - 1%16 is - 1 for // example ) validateKey ( hash ) ; validateTable ( row ) ; this . values [ row ] [ column ] = value ; this . keys [ hash ] = key ;
public class ManagedInstanceKeysInner { /** * Creates or updates a managed instance key . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param managedInstanceName The name of the managed instance . * @ param keyName The name of the managed instance key to be operated on ( updated or created ) . * @ param parameters The requested managed instance key resource state . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < ManagedInstanceKeyInner > beginCreateOrUpdateAsync ( String resourceGroupName , String managedInstanceName , String keyName , ManagedInstanceKeyInner parameters , final ServiceCallback < ManagedInstanceKeyInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , managedInstanceName , keyName , parameters ) , serviceCallback ) ;
public class ApiOvhMe { /** * Return main data about the object the processing of the order generated * REST : GET / me / order / { orderId } / paymentMeans * @ param orderId [ required ] */ public OvhPaymentMeans order_orderId_paymentMeans_GET ( Long orderId ) throws IOException { } }
String qPath = "/me/order/{orderId}/paymentMeans" ; StringBuilder sb = path ( qPath , orderId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhPaymentMeans . class ) ;
public class ResultHandlerContext { /** * Adds the specified result handler to the validator under construction . * @ param resultHandler Result handler to be added . * @ return Context allowing further construction of the validator using the DSL . */ public ResultHandlerContext < DPO , RI , RO , RHI > handleWith ( ResultHandler < RHI > resultHandler ) { } }
if ( resultHandler != null ) { addedResultHandlers . add ( resultHandler ) ; builtValidator . addResultHandler ( resultHandler ) ; } // Stay in the same context and re - use the same instance because no type has changed return this ;
public class DirectedMultigraph { /** * { @ inheritDoc } */ public Set < DirectedTypedEdge < T > > inEdges ( int vertex ) { } }
SparseDirectedTypedEdgeSet < T > edges = vertexToEdges . get ( vertex ) ; return ( edges == null ) ? Collections . < DirectedTypedEdge < T > > emptySet ( ) : new EdgeListWrapper ( edges . incoming ( ) ) ;
public class MergePath { /** * Returns the first matching path . */ public PathImpl getWritePath ( ) { } }
ArrayList < PathImpl > pathList = getPathList ( ) ; if ( pathList . size ( ) == 0 ) return new NotFoundPath ( getSchemeMap ( ) , _pathname ) ; else { return pathList . get ( 0 ) ; }
public class ExpressionUtil { /** * Collect / dedup all terminal / leaf nodes of an expression tree * @ param expr source expression tree * @ return dedup - ed terminal expressions */ public static Set < AbstractExpression > collectTerminals ( AbstractExpression expr ) { } }
final Set < AbstractExpression > result = new HashSet < > ( ) ; collectTerminals ( expr , result ) ; return result ;
public class Firmata { /** * Disconnect from the SerialPort object that we are communicating with over the Firmata protocol . * @ return True if the SerialPort was closed . False if the port failed to close . */ private Boolean removeSerialPort ( ) { } }
Boolean ret = true ; if ( serialPort != null ) { ret = serialPort . disconnect ( ) ; serialPort = null ; } return ret ;
public class SharedPreferencesUtils { /** * Retrieve a double value from the preferences . * @ param preferences the preferences * @ param key the key * @ param defaultValue default value if the key is not present * @ return the double value . */ public static double getDouble ( SharedPreferences preferences , String key , double defaultValue ) { } }
String stored = preferences . getString ( key , null ) ; if ( TextUtils . isEmpty ( stored ) ) { return defaultValue ; } return Double . parseDouble ( stored ) ;
public class DocumentRootImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case DroolsPackage . DOCUMENT_ROOT__MIXED : if ( coreType ) return getMixed ( ) ; return ( ( FeatureMap . Internal ) getMixed ( ) ) . getWrapper ( ) ; case DroolsPackage . DOCUMENT_ROOT__XMLNS_PREFIX_MAP : if ( coreType ) return getXMLNSPrefixMap ( ) ; else return getXMLNSPrefixMap ( ) . map ( ) ; case DroolsPackage . DOCUMENT_ROOT__XSI_SCHEMA_LOCATION : if ( coreType ) return getXSISchemaLocation ( ) ; else return getXSISchemaLocation ( ) . map ( ) ; case DroolsPackage . DOCUMENT_ROOT__GLOBAL : return getGlobal ( ) ; case DroolsPackage . DOCUMENT_ROOT__IMPORT : return getImport ( ) ; case DroolsPackage . DOCUMENT_ROOT__META_DATA : return getMetaData ( ) ; case DroolsPackage . DOCUMENT_ROOT__ON_ENTRY_SCRIPT : return getOnEntryScript ( ) ; case DroolsPackage . DOCUMENT_ROOT__ON_EXIT_SCRIPT : return getOnExitScript ( ) ; case DroolsPackage . DOCUMENT_ROOT__PACKAGE_NAME : return getPackageName ( ) ; case DroolsPackage . DOCUMENT_ROOT__PRIORITY : return getPriority ( ) ; case DroolsPackage . DOCUMENT_ROOT__RULE_FLOW_GROUP : return getRuleFlowGroup ( ) ; case DroolsPackage . DOCUMENT_ROOT__TASK_NAME : return getTaskName ( ) ; case DroolsPackage . DOCUMENT_ROOT__VERSION : return getVersion ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class I18N { /** * Returns a { @ link MessageSource } for the given types . */ public static MessageSource of ( final Class ... types ) { } }
checkNotNull ( types ) ; checkArgument ( types . length > 0 ) ; return new ResourceBundleMessageSource ( ) . add ( false , types ) ;
public class GenericDao { /** * 查询符合条件组合的对象列表 * @ param matches 查询参数 , 多个 * @ return */ public List < ENTITY > find ( Match ... matches ) { } }
return find ( Arrays . asList ( matches ) , new ArrayList < Order > ( 0 ) ) ;
public class AmazonApiGatewayV2Client { /** * The API mapping . * @ param getApiMappingRequest * @ return Result of the GetApiMapping operation returned by the service . * @ throws NotFoundException * The resource specified in the request was not found . * @ throws TooManyRequestsException * The client is sending more than the allowed number of requests per unit of time . * @ throws BadRequestException * One of the parameters in the request is invalid . * @ sample AmazonApiGatewayV2 . GetApiMapping */ @ Override public GetApiMappingResult getApiMapping ( GetApiMappingRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetApiMapping ( request ) ;
public class LPC { /** * Apply a window function to sample data * @ param samples Samples to apply window to . Values in this array are left * unaltered . * @ param count number of samples to use * @ param start index of samples array to start at * @ param increment number of indices to increment between valid samples ( for * interleaved arrays ) * @ param windowedSamples array containing windowed values . Return values * are packed ( increment of one ) . */ public static void window ( int [ ] samples , int count , int start , int increment , int [ ] windowedSamples ) { } }
int [ ] values = windowedSamples ; int loopCount = 0 ; float halfway = count / 2.0f ; float hth = halfway * halfway ; float windowCount = - halfway ; int limit = count * increment + start ; for ( int i = start ; i < limit ; i += increment ) { // float innerCount = ( windowCount < 0 ) ? - windowCount : windowCount ; float innerCountSquared = windowCount * windowCount ; windowCount ++ ; // double val = 1.0 - ( double ) ( innerCount / halfway ) ; float val = 1.0f - ( innerCountSquared / hth ) ; double temp = ( ( double ) samples [ i ] ) * val ; temp = ( temp > 0 ) ? temp + 0.5 : temp - 0.5 ; values [ loopCount ++ ] = ( int ) temp ; }
public class IndexClause { /** * Returns true if field corresponding to fieldID is set ( has been assigned a value ) and false otherwise */ public boolean isSet ( _Fields field ) { } }
if ( field == null ) { throw new IllegalArgumentException ( ) ; } switch ( field ) { case EXPRESSIONS : return isSetExpressions ( ) ; case START_KEY : return isSetStart_key ( ) ; case COUNT : return isSetCount ( ) ; } throw new IllegalStateException ( ) ;
public class Assert { /** * Asserts that two longs are equal . If they are not * an AssertionFailedError is thrown with the given message . */ static public void assertEquals ( String message , long expected , long actual ) { } }
assertEquals ( message , Long . valueOf ( expected ) , Long . valueOf ( actual ) ) ;
public class AnnotationDetector { /** * Utility method to scan the given package and handler for the annotation of * the given class . Its uses the Spring annotation detector * @ param clas the clas * @ param basePackage the base package * @ param handler the handler */ public static void scan ( final Class < ? extends Annotation > clas , final String [ ] basePackage , final AnnotationHandler handler ) { } }
final ClassPathScanningCandidateComponentProvider scanner = new ClassPathScanningCandidateComponentProvider ( false ) ; scanner . setResourceLoader ( new PathMatchingResourcePatternResolver ( Thread . currentThread ( ) . getContextClassLoader ( ) ) ) ; scanner . addIncludeFilter ( new AnnotationTypeFilter ( clas ) ) ; for ( final String pck : basePackage ) { for ( final BeanDefinition bd : scanner . findCandidateComponents ( pck ) ) { handler . handleAnnotationFound ( bd . getBeanClassName ( ) ) ; } }
public class DetourCommon { /** * / All points are projected onto the xz - plane , so the y - values are ignored . */ static boolean pointInPolygon ( float [ ] pt , float [ ] verts , int nverts ) { } }
// TODO : Replace pnpoly with triArea2D tests ? int i , j ; boolean c = false ; for ( i = 0 , j = nverts - 1 ; i < nverts ; j = i ++ ) { int vi = i * 3 ; int vj = j * 3 ; if ( ( ( verts [ vi + 2 ] > pt [ 2 ] ) != ( verts [ vj + 2 ] > pt [ 2 ] ) ) && ( pt [ 0 ] < ( verts [ vj + 0 ] - verts [ vi + 0 ] ) * ( pt [ 2 ] - verts [ vi + 2 ] ) / ( verts [ vj + 2 ] - verts [ vi + 2 ] ) + verts [ vi + 0 ] ) ) { c = ! c ; } } return c ;
public class SystemClock { /** * / * [ deutsch ] * < p > Liefert die aktuelle seit [ 1970-01-01T00:00:00,000Z ] verstrichene * Zeit in Millisekunden . < / p > * @ return count of milliseconds since UNIX epoch without leap seconds * @ see # currentTimeInMicros ( ) */ public long currentTimeInMillis ( ) { } }
if ( this . monotonic || MONOTON_MODE ) { long nanos = this . utcNanos ( ) ; long secs = LeapSeconds . getInstance ( ) . strip ( Math . floorDiv ( nanos , MRD ) ) ; return Math . multiplyExact ( secs , 1000 ) + Math . floorMod ( nanos , MIO ) ; } else { return System . currentTimeMillis ( ) ; }
public class EndpointUtil { /** * Get the list of EC2 URLs given the zone name . * @ param dnsName The dns name of the zone - specific CNAME * @ param type CNAME or EIP that needs to be retrieved * @ return The list of EC2 URLs associated with the dns name */ private static Set < String > getEC2DiscoveryUrlsFromZone ( String dnsName , DiscoveryUrlType type ) { } }
Set < String > eipsForZone ; try { dnsName = "txt." + dnsName ; LOG . debug ( "The zone url to be looked up is {} :" , dnsName ) ; Set < String > ec2UrlsForZone = DnsResolver . getCNamesFromTxtRecord ( dnsName ) ; for ( String ec2Url : ec2UrlsForZone ) { LOG . debug ( "The eureka url for the dns name {} is {}" , dnsName , ec2Url ) ; ec2UrlsForZone . add ( ec2Url ) ; } if ( DiscoveryUrlType . CNAME . equals ( type ) ) { return ec2UrlsForZone ; } eipsForZone = new TreeSet < > ( ) ; for ( String cname : ec2UrlsForZone ) { String [ ] tokens = cname . split ( "\\." ) ; String ec2HostName = tokens [ 0 ] ; String [ ] ips = ec2HostName . split ( "-" ) ; StringBuilder eipBuffer = new StringBuilder ( ) ; for ( int ipCtr = 1 ; ipCtr < 5 ; ipCtr ++ ) { eipBuffer . append ( ips [ ipCtr ] ) ; if ( ipCtr < 4 ) { eipBuffer . append ( "." ) ; } } eipsForZone . add ( eipBuffer . toString ( ) ) ; } LOG . debug ( "The EIPS for {} is {} :" , dnsName , eipsForZone ) ; } catch ( Throwable e ) { throw new RuntimeException ( "Cannot get cnames bound to the region:" + dnsName , e ) ; } return eipsForZone ;
public class ExpressionResolverImpl { /** * Resolve the given string using any plugin and the DMR resolve method */ private String resolveExpressionString ( final String unresolvedString ) throws OperationFailedException { } }
// parseAndResolve should only be providing expressions with no leading or trailing chars assert unresolvedString . startsWith ( "${" ) && unresolvedString . endsWith ( "}" ) ; // Default result is no change from input String result = unresolvedString ; ModelNode resolveNode = new ModelNode ( new ValueExpression ( unresolvedString ) ) ; // Try plug - in resolution ; i . e . vault resolvePluggableExpression ( resolveNode ) ; if ( resolveNode . getType ( ) == ModelType . EXPRESSION ) { // resolvePluggableExpression did nothing . Try standard resolution String resolvedString = resolveStandardExpression ( resolveNode ) ; if ( ! unresolvedString . equals ( resolvedString ) ) { // resolveStandardExpression made progress result = resolvedString ; } // else there is nothing more we can do with this string } else { // resolvePluggableExpression made progress result = resolveNode . asString ( ) ; } return result ;
public class FieldFormatterRegistry { /** * 指定したクラスタイプに対する { @ link FieldFormatter } を取得する 。 * @ param < T > クラスタイプ * @ param requiredType 取得したいクラスタイプ * @ return 見つからない場合は 、 nullを返す 。 */ @ SuppressWarnings ( "unchecked" ) public < T > FieldFormatter < T > getFormatter ( final Class < T > requiredType ) { } }
return ( FieldFormatter < T > ) typeMap . get ( requiredType ) ;
public class ExternalChildResourceCollectionImpl { /** * Commits the changes in the external child resource childCollection . * This method returns a observable stream , either its observer ' s onError will be called with * { @ link CompositeException } if some resources failed to commit or onNext will be called if all resources * committed successfully . * @ return the observable stream */ public Observable < List < FluentModelTImpl > > commitAndGetAllAsync ( ) { } }
return commitAsync ( ) . collect ( new Func0 < List < FluentModelTImpl > > ( ) { public List < FluentModelTImpl > call ( ) { return new ArrayList < > ( ) ; } } , new Action2 < List < FluentModelTImpl > , FluentModelTImpl > ( ) { public void call ( List < FluentModelTImpl > state , FluentModelTImpl item ) { state . add ( item ) ; } } ) ;