signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class MortbayMuleAgentHttpServer { /** * Create a Jetty Server with the agent servlet installed */
private Server getServer ( MuleAgentConfig pConfig ) { } } | Server newServer = new Server ( ) ; Connector connector = new SelectChannelConnector ( ) ; if ( pConfig . getHost ( ) != null ) { connector . setHost ( pConfig . getHost ( ) ) ; } connector . setPort ( pConfig . getPort ( ) ) ; newServer . setConnectors ( new Connector [ ] { connector } ) ; return newServer ; |
public class MersenneTwisterFast { /** * Initialize the pseudo random number generator . Don ' t
* pass in a long that ' s bigger than an int ( Mersenne Twister
* only uses the first 32 bits for its seed ) .
* @ param seed */
synchronized public void setSeed ( final long seed ) { } } | // Due to a bug in java . util . Random clear up to 1.2 , we ' re
// doing our own Gaussian variable .
__haveNextNextGaussian = false ; mt = new int [ N ] ; mag01 = new int [ 2 ] ; mag01 [ 0 ] = 0x0 ; mag01 [ 1 ] = MATRIX_A ; mt [ 0 ] = ( int ) ( seed & 0xffffffff ) ; for ( mti = 1 ; mti < N ; mti ++ ) { mt [ mti ] = ( 1812433253 * ( mt [ mti - 1 ] ^ ( mt [ mti - 1 ] >>> 30 ) ) + mti ) ; /* See Knuth TAOCP Vol2 . 3rd Ed . P . 106 for multiplier . */
/* In the previous versions , MSBs of the seed affect */
/* only MSBs of the array mt [ ] . */
/* 2002/01/09 modified by Makoto Matsumoto */
mt [ mti ] &= 0xffffffff ; /* for > 32 bit machines */
} |
public class ListUtils { /** * Returns a new list containing all elements of the original list but
* < code > exclude < / code >
* @ param < T >
* Type of list elements
* @ param list
* Basic list for operation
* @ param exclude
* Element to exclude
* @ return A new List containing all elements of the original list but
* < code > exclude < / code > */
public static < T > List < T > getListWithout ( List < T > list , T exclude ) { } } | List < T > result = new ArrayList < > ( list . size ( ) ) ; for ( T t : list ) if ( ! t . equals ( exclude ) ) result . add ( t ) ; return result ; |
public class KamSummarizer { /** * returns number of deceases and directly _ decreases edges .
* @ param edges
* @ return */
private int getDecreasesEdges ( Collection < KamEdge > edges ) { } } | int count = 0 ; for ( KamEdge edge : edges ) { if ( edge . getRelationshipType ( ) == RelationshipType . DECREASES || edge . getRelationshipType ( ) == RelationshipType . DIRECTLY_DECREASES ) { count ++ ; } } return count ; |
public class AbstractWSelectList { /** * Indicates whether this list was present in the request .
* Lists that allow no option to be selected have a hidden input field , whose name is name - h to indicate that it is
* in the request .
* @ param request the request being responded to .
* @ return true if this list was present in the request , false if not . */
@ Override protected boolean isPresent ( final Request request ) { } } | if ( isAllowNoSelection ( ) ) { String id = getId ( ) ; return request . getParameter ( id + "-h" ) != null ; } else { return super . isPresent ( request ) ; } |
public class PolynomialOps { /** * Creates different polynomial root finders .
* @ param maxCoefficients The maximum number of coefficients that will be processed . This is the order + 1
* @ param which 0 = Sturm and 1 = companion matrix .
* @ return PolynomialRoots */
public static PolynomialRoots createRootFinder ( int maxCoefficients , RootFinderType which ) { } } | switch ( which ) { case STURM : FindRealRootsSturm sturm = new FindRealRootsSturm ( maxCoefficients , - 1 , 1e-10 , 200 , 200 ) ; return new WrapRealRootsSturm ( sturm ) ; case EVD : return new RootFinderCompanion ( ) ; default : throw new IllegalArgumentException ( "Unknown algorithm: " + which ) ; } |
public class MemberMap { /** * Creates clone of source { @ code MemberMap } , excluding given members .
* If source is empty , same map instance will be returned . If excluded members are empty or not present in
* source , a new map will be created containing the same members with source .
* @ param source source map
* @ param excludeMembers members to exclude
* @ return clone map */
static MemberMap cloneExcluding ( MemberMap source , MemberImpl ... excludeMembers ) { } } | if ( source . size ( ) == 0 ) { return source ; } Map < Address , MemberImpl > addressMap = new LinkedHashMap < > ( source . addressToMemberMap ) ; Map < String , MemberImpl > uuidMap = new LinkedHashMap < > ( source . uuidToMemberMap ) ; for ( MemberImpl member : excludeMembers ) { MemberImpl removed = addressMap . remove ( member . getAddress ( ) ) ; if ( removed != null ) { uuidMap . remove ( removed . getUuid ( ) ) ; } removed = uuidMap . remove ( member . getUuid ( ) ) ; if ( removed != null ) { addressMap . remove ( removed . getAddress ( ) ) ; } } return new MemberMap ( source . version + excludeMembers . length , addressMap , uuidMap ) ; |
public class GatewayImpl { /** * < p > Launch the in - process Gateway . < / p >
* @ throws Exception */
@ Override public void launch ( ) throws Exception { } } | if ( baseGateway != null ) { baseGateway . launch ( ) ; } if ( gateway != null ) { throw new GatewayAlreadyRunningException ( "An instance of the Gateway is already running" ) ; } Properties configuration = getProperties ( ) ; if ( configuration == null ) { // Change to a public exception once all calls to System . getProperty ( ) throughout the entire
// codebase have been eliminated .
throw new Exception ( "No environment has been specified" ) ; } if ( ! supportedJavaVersion ( 1 , 8 , "0" ) ) { throw new RuntimeException ( "Unsupported JDK version, Please install Java SE 8.0 or later and relaunch " + "Kaazing WebSocket Gateway" ) ; } String gatewayHomeProperty = configuration . getProperty ( GATEWAY_HOME_PROPERTY ) ; if ( gatewayHomeProperty == null ) { throw new IllegalArgumentException ( GATEWAY_HOME_PROPERTY + " directory was not specified" ) ; } File homeDir = new File ( gatewayHomeProperty ) ; if ( ! homeDir . isDirectory ( ) ) { throw new IllegalArgumentException ( GATEWAY_HOME_PROPERTY + " is not a valid directory: " + homeDir . getAbsolutePath ( ) ) ; } String gatewayConfigDirectoryProperty = configuration . getProperty ( GATEWAY_CONFIG_DIRECTORY_PROPERTY ) ; File configDir = ( gatewayConfigDirectoryProperty != null ) ? new File ( gatewayConfigDirectoryProperty ) : new File ( homeDir , DEFAULT_CONFIG_DIRECTORY ) ; if ( ! configDir . isDirectory ( ) ) { throw new IllegalArgumentException ( GATEWAY_CONFIG_DIRECTORY_PROPERTY + " is not a valid directory: " + configDir . getAbsolutePath ( ) ) ; } // Login modules needs the CONFIG directory , put it back into the configuration properties .
configuration . setProperty ( GATEWAY_CONFIG_DIRECTORY_PROPERTY , configDir . toString ( ) ) ; String gatewayTempDirectoryProperty = configuration . getProperty ( GATEWAY_TEMP_DIRECTORY_PROPERTY ) ; File tempDir = ( gatewayTempDirectoryProperty != null ) ? new File ( gatewayTempDirectoryProperty ) : new File ( homeDir , DEFAULT_TEMP_DIRECTORY ) ; if ( ! tempDir . isDirectory ( ) ) { throw new IllegalArgumentException ( GATEWAY_TEMP_DIRECTORY_PROPERTY + " is not a valid directory: " + tempDir . getAbsolutePath ( ) ) ; } String gatewayLogDirectoryProperty = configuration . getProperty ( GATEWAY_LOG_DIRECTORY_PROPERTY ) ; File logDir = ( gatewayLogDirectoryProperty != null ) ? new File ( gatewayLogDirectoryProperty ) : new File ( homeDir , DEFAULT_LOG_DIRECTORY ) ; if ( ! logDir . exists ( ) ) { logDir . mkdir ( ) ; } if ( ! logDir . isDirectory ( ) ) { throw new IllegalArgumentException ( GATEWAY_LOG_DIRECTORY_PROPERTY + " is not a valid directory or could not be created: " + logDir . getAbsolutePath ( ) ) ; } // Because we use Log4J and it contains a reference to $ { GATEWAY _ LOG _ DIRECTORY } ,
// we need to make sure to put the log directory back into the configuration properties .
configuration . setProperty ( GATEWAY_LOG_DIRECTORY_PROPERTY , logDir . toString ( ) ) ; // As of 3.2 we will normally have two config files in the config directory ( if you
// have the ' full ' installation ) , one for the ' minimal ' configuration ( that you would
// get if you just have a base installation ) and one for the demos and docs . To decide
// on the config file to use we ' ll have the following rules :
// * If there is a config specified as a system property , we ' ll use that .
// * If there is no config specified , we ' ll look for the default " full " gateway config .
// ( of course , the user may have modified that file and just be using the same name . )
// * If that doesn ' t exist , we ' ll look for the default " minimal " gateway config .
// * If none of those exists ( and is a readable file ) , that ' s an error .
File gatewayConfigFile ; String gatewayConfigProperty = configuration . getProperty ( GATEWAY_CONFIG_PROPERTY ) ; // If config property is a url then we download it and reset the property
try { // if config is a URL download it to config directory
URL configURL = new URL ( gatewayConfigProperty ) ; String path = configURL . getPath ( ) ; ReadableByteChannel rbc = Channels . newChannel ( configURL . openStream ( ) ) ; final File configFile = new File ( configDir , path . substring ( path . lastIndexOf ( '/' ) + 1 ) ) ; try ( FileOutputStream fos = new FileOutputStream ( configFile ) ) { fos . getChannel ( ) . transferFrom ( rbc , 0 , Long . MAX_VALUE ) ; } configuration . setProperty ( Gateway . GATEWAY_CONFIG_PROPERTY , configFile . getPath ( ) ) ; gatewayConfigProperty = configuration . getProperty ( GATEWAY_CONFIG_PROPERTY ) ; } catch ( MalformedURLException e1 ) { // expected exception if config is not a url
} catch ( IOException e ) { throw new RuntimeException ( "Could not fetch config from url: " + gatewayConfigProperty , e ) ; } if ( gatewayConfigProperty != null ) { gatewayConfigFile = new File ( gatewayConfigProperty ) ; if ( ! gatewayConfigFile . isFile ( ) || ! gatewayConfigFile . canRead ( ) ) { throw new IllegalArgumentException ( GATEWAY_CONFIG_PROPERTY + " was specified but is not a valid, readable file: " + gatewayConfigFile . getAbsolutePath ( ) ) ; } } else { gatewayConfigFile = new File ( configDir , DEFAULT_GATEWAY_CONFIG_XML ) ; if ( ! gatewayConfigFile . exists ( ) ) { gatewayConfigFile = new File ( configDir , DEFAULT_GATEWAY_CONFIG_MINIMAL_XML ) ; } if ( ! gatewayConfigFile . isFile ( ) || ! gatewayConfigFile . canRead ( ) ) { throw new IllegalArgumentException ( GATEWAY_CONFIG_PROPERTY + " was not specified, and no default readable config file" + " could be found in the conf/ directory" ) ; } } String gatewayWebDirectoryProperty = configuration . getProperty ( GATEWAY_WEB_DIRECTORY_PROPERTY ) ; File webRootDir = ( gatewayWebDirectoryProperty != null ) ? new File ( gatewayWebDirectoryProperty ) : new File ( homeDir , DEFAULT_WEB_DIRECTORY ) ; if ( ! webRootDir . exists ( ) ) { webRootDir . mkdir ( ) ; } if ( ! webRootDir . isDirectory ( ) ) { throw new IllegalArgumentException ( GATEWAY_WEB_DIRECTORY_PROPERTY + " is not a valid directory or could not be created: " + webRootDir . getAbsolutePath ( ) ) ; } String overrideLogging = configuration . getProperty ( OVERRIDE_LOGGING ) ; if ( ( overrideLogging == null ) || ! Boolean . parseBoolean ( overrideLogging ) ) { configureLogging ( configDir , configuration ) ; } duplicateJarFinder . findDuplicateJars ( ) ; displayVersionInfo ( ) ; LOGGER . info ( "Configuration file: " + gatewayConfigFile . getCanonicalPath ( ) ) ; GatewayObserverApi gatewayObserver = GatewayObserver . newInstance ( ) ; GatewayConfigParser parser = new GatewayConfigParser ( configuration ) ; GatewayConfigDocument config = parser . parse ( gatewayConfigFile ) ; GatewayContextResolver resolver = new GatewayContextResolver ( configDir , webRootDir , tempDir , jmxMBeanServer ) ; gatewayObserver . initingGateway ( configuration , resolver . getInjectables ( ) ) ; ConfigurationObserver confObserver = ConfigurationObserver . newInstance ( ) ; resolver . setObserver ( confObserver ) ; GatewayContext context = resolver . resolve ( config , configuration ) ; gateway = new Launcher ( gatewayObserver ) ; try { gateway . init ( context ) ; } catch ( Exception e ) { LOGGER . error ( String . format ( "Error starting Gateway: caught exception %s" , e ) ) ; throw e ; } |
public class TDHttpClient { /** * Get a { @ link TDHttpClient } that uses the specified headers for each request . Reuses the same
* underlying http client so closing the returned instance will return this instance as well .
* @ param headers
* @ return */
public TDHttpClient withHeaders ( Multimap < String , String > headers ) { } } | Multimap < String , String > mergedHeaders = ImmutableMultimap . < String , String > builder ( ) . putAll ( this . headers ) . putAll ( headers ) . build ( ) ; return new TDHttpClient ( config , httpClient , objectMapper , mergedHeaders ) ; |
public class Interval { /** * Utility function to check if a particular flag is set exclusively
* given a particular set of flags and a mask
* @ param flags flags to check
* @ param flag bit for flag of interest ( is this flag set or not )
* @ param mask bitmask of bits to check
* @ return true if flag is exclusively set for flags & mask */
public static boolean checkFlagExclusiveSet ( int flags , int flag , int mask ) { } } | int f = flags & flag ; if ( f != 0 ) { return ( ( flags & mask & ~ flag ) != 0 ) ? false : true ; } else { return false ; } |
public class Date { /** * fastTime and the returned data are in sync upon return . */
private final BaseCalendar . Date normalize ( BaseCalendar . Date date ) { } } | int y = date . getNormalizedYear ( ) ; int m = date . getMonth ( ) ; int d = date . getDayOfMonth ( ) ; int hh = date . getHours ( ) ; int mm = date . getMinutes ( ) ; int ss = date . getSeconds ( ) ; int ms = date . getMillis ( ) ; TimeZone tz = date . getZone ( ) ; // If the specified year can ' t be handled using a long value
// in milliseconds , GregorianCalendar is used for full
// compatibility with underflow and overflow . This is required
// by some JCK tests . The limits are based max year values -
// years that can be represented by max values of d , hh , mm ,
// ss and ms . Also , let GregorianCalendar handle the default
// cutover year so that we don ' t need to worry about the
// transition here .
if ( y == 1582 || y > 280000000 || y < - 280000000 ) { if ( tz == null ) { tz = TimeZone . getTimeZone ( "GMT" ) ; } GregorianCalendar gc = new GregorianCalendar ( tz ) ; gc . clear ( ) ; gc . set ( GregorianCalendar . MILLISECOND , ms ) ; gc . set ( y , m - 1 , d , hh , mm , ss ) ; fastTime = gc . getTimeInMillis ( ) ; BaseCalendar cal = getCalendarSystem ( fastTime ) ; date = ( BaseCalendar . Date ) cal . getCalendarDate ( fastTime , tz ) ; return date ; } BaseCalendar cal = getCalendarSystem ( y ) ; if ( cal != getCalendarSystem ( date ) ) { date = ( BaseCalendar . Date ) cal . newCalendarDate ( tz ) ; date . setNormalizedDate ( y , m , d ) . setTimeOfDay ( hh , mm , ss , ms ) ; } // Perform the GregorianCalendar - style normalization .
fastTime = cal . getTime ( date ) ; // In case the normalized date requires the other calendar
// system , we need to recalculate it using the other one .
BaseCalendar ncal = getCalendarSystem ( fastTime ) ; if ( ncal != cal ) { date = ( BaseCalendar . Date ) ncal . newCalendarDate ( tz ) ; date . setNormalizedDate ( y , m , d ) . setTimeOfDay ( hh , mm , ss , ms ) ; fastTime = ncal . getTime ( date ) ; } return date ; |
public class TypeUsage_Builder { /** * Copies values from { @ code value } , skipping empty optionals .
* @ return this { @ code Builder } object */
public TypeUsage . Builder mergeFrom ( TypeUsage value ) { } } | TypeUsage_Builder defaults = new TypeUsage . Builder ( ) ; if ( defaults . _unsetProperties . contains ( Property . START ) || value . start ( ) != defaults . start ( ) ) { start ( value . start ( ) ) ; } if ( defaults . _unsetProperties . contains ( Property . END ) || value . end ( ) != defaults . end ( ) ) { end ( value . end ( ) ) ; } if ( defaults . _unsetProperties . contains ( Property . TYPE ) || ! Objects . equals ( value . type ( ) , defaults . type ( ) ) ) { type ( value . type ( ) ) ; } value . scope ( ) . ifPresent ( this :: scope ) ; return ( TypeUsage . Builder ) this ; |
public class SmartsheetImpl { /** * Returns the TemplateResources instance that provides access to Template resources .
* @ return the template resources */
public TemplateResources templateResources ( ) { } } | if ( templates . get ( ) == null ) { templates . compareAndSet ( null , new TemplateResourcesImpl ( this ) ) ; } return templates . get ( ) ; |
public class XAResourceWrapperImpl { /** * { @ inheritDoc } */
public void commit ( Xid xid , boolean onePhase ) throws XAException { } } | if ( pad ) xid = convertXid ( xid ) ; xaResource . commit ( xid , onePhase ) ; |
public class XmlParser { /** * Read a literal . With matching single or double quotes as delimiters ( and
* not embedded ! ) this is used to parse :
* < pre >
* [ 9 ] EntityValue : : = . . . ( [ & circ ; % & amp ; ] | PEReference | Reference ) * . . .
* [ 10 ] AttValue : : = . . . ( [ & circ ; & lt ; & amp ; ] | Reference ) * . . .
* [ 11 ] SystemLiteral : : = . . . ( URLchar - & quot ; ' & quot ; ) * . . .
* [ 12 ] PubidLiteral : : = . . . ( PubidChar - & quot ; ' & quot ; ) * . . .
* < / pre >
* as well as the quoted strings in XML and text declarations ( for version ,
* encoding , and standalone ) which have their own constraints . */
private String readLiteral ( int flags ) throws SAXException , IOException { } } | char delim , c ; int startLine = line ; boolean saved = expandPE ; boolean savedReport = doReport ; // Find the first delimiter .
delim = readCh ( ) ; if ( ( delim != '"' ) && ( delim != '\'' ) ) { fatal ( "expected '\"' or \"'\"" , delim , null ) ; return null ; } inLiteral = true ; if ( ( flags & LIT_DISABLE_PE ) != 0 ) { expandPE = false ; } doReport = false ; // Each level of input source has its own buffer ; remember
// ours , so we won ' t read the ending delimiter from any
// other input source , regardless of entity processing .
char [ ] ourBuf = readBuffer ; // Read the literal .
try { c = readCh ( ) ; loop : while ( ! ( ( c == delim ) && ( readBuffer == ourBuf ) ) ) { switch ( c ) { // attributes and public ids are normalized
// in almost the same ways
case '\n' : case '\r' : if ( ( flags & ( LIT_ATTRIBUTE | LIT_PUBID ) ) != 0 ) { c = ' ' ; } break ; case '\t' : if ( ( flags & LIT_ATTRIBUTE ) != 0 ) { c = ' ' ; } break ; case '&' : c = readCh ( ) ; // Char refs are expanded immediately , except for
// all the cases where it ' s deferred .
if ( c == '#' ) { if ( ( flags & LIT_DISABLE_CREF ) != 0 ) { dataBufferAppend ( '&' ) ; break ; } parseCharRef ( false /* Do not do flushDataBuffer */
) ; // exotic WFness risk : this is an entity literal ,
// dataBuffer [ dataBufferPos - 1 ] = = ' & ' , and
// following chars are a _ partial _ entity / char ref
// It looks like an entity ref . . .
} else { unread ( c ) ; // Expand it ?
if ( ( flags & LIT_ENTITY_REF ) > 0 ) { parseEntityRef ( false ) ; // Is it just data ?
} else if ( ( flags & LIT_DISABLE_EREF ) != 0 ) { dataBufferAppend ( '&' ) ; // OK , it will be an entity ref - - expanded
// later .
} else { String name = readNmtoken ( true ) ; require ( ';' ) ; dataBufferAppend ( '&' ) ; dataBufferAppend ( name ) ; dataBufferAppend ( ';' ) ; } } c = readCh ( ) ; continue loop ; case '<' : // and why ? Perhaps so " & foo ; " expands the same
// inside and outside an attribute ?
if ( ( flags & LIT_ATTRIBUTE ) != 0 ) { fatal ( "attribute values may not contain '<'" ) ; } break ; // We don ' t worry about case ' % ' and PE refs , readCh does .
default : break ; } dataBufferAppend ( c ) ; c = readCh ( ) ; } } catch ( EOFException e ) { fatal ( "end of input while looking for delimiter (started on line " + startLine + ')' , null , Character . valueOf ( delim ) . toString ( ) ) ; } inLiteral = false ; expandPE = saved ; doReport = savedReport ; // Normalise whitespace if necessary .
if ( ( flags & LIT_NORMALIZE ) > 0 ) { dataBufferNormalize ( ) ; } // Return the value .
return dataBufferToString ( ) ; |
public class FormHttpMessageConverter { /** * Return the filename of the given multipart part . This value will be used for the
* { @ code Content - Disposition } header .
* < p > The default implementation returns { @ link Resource # getFilename ( ) } if the part is a
* { @ code Resource } , and { @ code null } in other cases . Can be overridden in subclasses .
* @ param part the part to determine the file name for
* @ return the filename , or { @ code null } if not known */
protected String getFilename ( Object part ) { } } | if ( part instanceof Resource ) { Resource resource = ( Resource ) part ; return resource . getFilename ( ) ; } else { return null ; } |
public class OdmManagerImpl { /** * / * ( non - Javadoc )
* @ see org . springframework . ldap . odm . core . OdmManager # search ( java . lang . Class , javax . naming . Name , java . lang . String , javax . naming . directory . SearchControls ) */
public < T > List < T > search ( Class < T > managedClass , Name base , String filter , SearchControls scope ) { } } | Filter searchFilter = null ; if ( StringUtils . hasText ( filter ) ) { searchFilter = new HardcodedFilter ( filter ) ; } return ldapTemplate . find ( base , searchFilter , scope , managedClass ) ; |
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getIfcTypeProduct ( ) { } } | if ( ifcTypeProductEClass == null ) { ifcTypeProductEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 627 ) ; } return ifcTypeProductEClass ; |
public class JavacServer { /** * Connect and compile using the javac server settings and the args . When using more advanced features , the sources _ to _ compile and visible _ sources are
* supplied to the server and meta data is returned in package _ artifacts , package _ dependencies and package _ pubapis . */
private static int connectAndCompile ( PortFile portFile , String id , String [ ] args , Set < URI > sourcesToCompile , Set < URI > visibleSources , Map < String , Set < URI > > packageArtifacts , Map < String , Set < String > > packageDependencies , Map < String , String > packagePublicApis , SysInfo sysinfo , PrintStream out , PrintStream err ) { } } | int rc = - 3 ; try { int port = portFile . containsPortInfo ( ) ? portFile . getPort ( ) : 0 ; if ( port == 0 ) { return ERROR_BUT_TRY_AGAIN ; } long cookie = portFile . getCookie ( ) ; // Acquire the localhost / 127.0.0.1 address .
InetAddress addr = InetAddress . getByName ( null ) ; SocketAddress sockaddr = new InetSocketAddress ( addr , port ) ; Socket sock = new Socket ( ) ; int timeoutMs = CONNECTION_TIMEOUT * 1000 ; try { sock . connect ( sockaddr , timeoutMs ) ; } catch ( java . net . ConnectException e ) { err . println ( "Could not connect to javac server found in portfile: " + portFile . getFilename ( ) + " " + e ) ; return ERROR_BUT_TRY_AGAIN ; } if ( ! sock . isConnected ( ) ) { err . println ( "Could not connect to javac server found in portfile: " + portFile . getFilename ( ) ) ; return ERROR_BUT_TRY_AGAIN ; } BufferedReader in = new BufferedReader ( new InputStreamReader ( sock . getInputStream ( ) ) ) ; PrintWriter sockout = new PrintWriter ( sock . getOutputStream ( ) ) ; sockout . println ( PROTOCOL_COOKIE_VERSION ) ; sockout . println ( "" + cookie ) ; sockout . println ( PROTOCOL_CWD ) ; sockout . println ( System . getProperty ( "user.dir" ) ) ; sockout . println ( PROTOCOL_ID ) ; sockout . println ( id ) ; sockout . println ( PROTOCOL_ARGS ) ; for ( String s : args ) { StringBuffer buf = new StringBuffer ( ) ; String [ ] paths = s . split ( File . pathSeparator ) ; int c = 0 ; for ( String path : paths ) { File f = new File ( path ) ; if ( f . isFile ( ) || f . isDirectory ( ) ) { buf . append ( f . getAbsolutePath ( ) ) ; c ++ ; if ( c < paths . length ) { buf . append ( File . pathSeparator ) ; } } else { buf = new StringBuffer ( s ) ; break ; } } sockout . println ( buf . toString ( ) ) ; } sockout . println ( PROTOCOL_SOURCES_TO_COMPILE ) ; for ( URI uri : sourcesToCompile ) { sockout . println ( uri . toString ( ) ) ; } sockout . println ( PROTOCOL_VISIBLE_SOURCES ) ; for ( URI uri : visibleSources ) { sockout . println ( uri . toString ( ) ) ; } sockout . println ( PROTOCOL_END ) ; sockout . flush ( ) ; StringBuffer stdout = new StringBuffer ( ) ; StringBuffer stderr = new StringBuffer ( ) ; if ( ! expect ( in , PROTOCOL_STDOUT ) ) { return ERROR_FATAL ; } // Load stdout
for ( ; ; ) { String l = in . readLine ( ) ; if ( l == null ) { return ERROR_FATAL ; } if ( l . equals ( PROTOCOL_STDERR ) ) { break ; } stdout . append ( l ) ; stdout . append ( '\n' ) ; } // Load stderr
for ( ; ; ) { String l = in . readLine ( ) ; if ( l == null ) { return ERROR_FATAL ; } if ( l . equals ( PROTOCOL_PACKAGE_ARTIFACTS ) ) { break ; } stderr . append ( l ) ; stderr . append ( '\n' ) ; } // Load the package artifacts
Set < URI > lastUriSet = null ; for ( ; ; ) { String l = in . readLine ( ) ; if ( l == null ) { return ERROR_FATAL ; } if ( l . equals ( PROTOCOL_PACKAGE_DEPENDENCIES ) ) { break ; } if ( l . length ( ) > 1 && l . charAt ( 0 ) == '+' ) { String pkg = l . substring ( 1 ) ; lastUriSet = new HashSet < URI > ( ) ; packageArtifacts . put ( pkg , lastUriSet ) ; } else if ( l . length ( ) > 1 && lastUriSet != null ) { lastUriSet . add ( new URI ( l . substring ( 1 ) ) ) ; } } // Load package dependencies
Set < String > lastPackageSet = null ; for ( ; ; ) { String l = in . readLine ( ) ; if ( l == null ) { return ERROR_FATAL ; } if ( l . equals ( PROTOCOL_PACKAGE_PUBLIC_APIS ) ) { break ; } if ( l . length ( ) > 1 && l . charAt ( 0 ) == '+' ) { String pkg = l . substring ( 1 ) ; lastPackageSet = new HashSet < String > ( ) ; packageDependencies . put ( pkg , lastPackageSet ) ; } else if ( l . length ( ) > 1 && lastPackageSet != null ) { lastPackageSet . add ( l . substring ( 1 ) ) ; } } // Load package pubapis
Map < String , StringBuffer > tmp = new HashMap < String , StringBuffer > ( ) ; StringBuffer lastPublicApi = null ; for ( ; ; ) { String l = in . readLine ( ) ; if ( l == null ) { return ERROR_FATAL ; } if ( l . equals ( PROTOCOL_SYSINFO ) ) { break ; } if ( l . length ( ) > 1 && l . charAt ( 0 ) == '+' ) { String pkg = l . substring ( 1 ) ; lastPublicApi = new StringBuffer ( ) ; tmp . put ( pkg , lastPublicApi ) ; } else if ( l . length ( ) > 1 && lastPublicApi != null ) { lastPublicApi . append ( l . substring ( 1 ) ) ; lastPublicApi . append ( "\n" ) ; } } for ( String p : tmp . keySet ( ) ) { assert ( packagePublicApis . get ( p ) == null ) ; String api = tmp . get ( p ) . toString ( ) ; packagePublicApis . put ( p , api ) ; } // Now reading the max memory possible .
for ( ; ; ) { String l = in . readLine ( ) ; if ( l == null ) { return ERROR_FATAL ; } if ( l . equals ( PROTOCOL_RETURN_CODE ) ) { break ; } if ( l . startsWith ( "num_cores=" ) && sysinfo != null ) { sysinfo . numCores = Integer . parseInt ( l . substring ( 10 ) ) ; } if ( l . startsWith ( "max_memory=" ) && sysinfo != null ) { sysinfo . maxMemory = Long . parseLong ( l . substring ( 11 ) ) ; } } String l = in . readLine ( ) ; if ( l == null ) { err . println ( "No return value from the server!" ) ; return ERROR_FATAL ; } rc = Integer . parseInt ( l ) ; out . print ( stdout ) ; err . print ( stderr ) ; } catch ( Exception e ) { e . printStackTrace ( err ) ; } return rc ; |
public class ICUHumanize { /** * Same as { @ link # compactDecimal ( Number , CompactStyle ) compactDecimal } but
* defaults to SHORT compact style .
* @ param value
* The number to be abbreviated
* @ return a compact textual representation of the given value */
public static String compactDecimal ( final Number value ) { } } | NumberFormat fmt = context . get ( ) . getCompactDecimalFormat ( ) ; return fmt . format ( value ) ; |
public class YearWeek { /** * Obtains an instance of { @ code YearWeek } from a week - based - year and week .
* If the week is 53 and the year does not have 53 weeks , week one of the following
* year is selected .
* @ param weekBasedYear the week - based - year to represent , from MIN _ YEAR to MAX _ YEAR
* @ param week the week - of - week - based - year to represent , from 1 to 53
* @ return the year - week , not null
* @ throws DateTimeException if either field is invalid */
public static YearWeek of ( int weekBasedYear , int week ) { } } | WEEK_BASED_YEAR . range ( ) . checkValidValue ( weekBasedYear , WEEK_BASED_YEAR ) ; WEEK_OF_WEEK_BASED_YEAR . range ( ) . checkValidValue ( week , WEEK_OF_WEEK_BASED_YEAR ) ; if ( week == 53 && weekRange ( weekBasedYear ) < 53 ) { week = 1 ; weekBasedYear ++ ; WEEK_BASED_YEAR . range ( ) . checkValidValue ( weekBasedYear , WEEK_BASED_YEAR ) ; } return new YearWeek ( weekBasedYear , week ) ; |
public class NetworkBufferPool { @ Override public BufferPool createBufferPool ( int numRequiredBuffers , int maxUsedBuffers ) throws IOException { } } | return createBufferPool ( numRequiredBuffers , maxUsedBuffers , Optional . empty ( ) ) ; |
public class Betner { /** * Reset the given target String as delegate String
* @ param target
* @ return */
public Betner reset ( String target ) { } } | this . rebuild = ! target4Betn . equals ( target ) ; this . target4Betn = target ; return this ; |
public class InputMapTemplate { /** * When the given event type occurs , runs the given action , and then attempts
* to pattern match the event type with the next { @ code InputMap } ( if one exists ) . */
public static < S , T extends Event > InputMapTemplate < S , T > process ( EventType < ? extends T > eventType , BiFunction < ? super S , ? super T , InputHandler . Result > action ) { } } | return process ( EventPattern . eventType ( eventType ) , action ) ; |
public class CompactionSlaEventHelper { /** * Get an { @ link SlaEventSubmitterBuilder } that has dataset urn , partition , record count , previous publish timestamp
* and dedupe status set .
* The caller MUST set eventSubmitter , eventname before submitting . */
public static SlaEventSubmitterBuilder getEventSubmitterBuilder ( Dataset dataset , Optional < Job > job , FileSystem fs ) { } } | SlaEventSubmitterBuilder builder = SlaEventSubmitter . builder ( ) . datasetUrn ( dataset . getUrn ( ) ) . partition ( dataset . jobProps ( ) . getProp ( MRCompactor . COMPACTION_JOB_DEST_PARTITION , "" ) ) . dedupeStatus ( getOutputDedupeStatus ( dataset . jobProps ( ) ) ) ; long previousPublishTime = getPreviousPublishTime ( dataset , fs ) ; long upstreamTime = dataset . jobProps ( ) . getPropAsLong ( SlaEventKeys . UPSTREAM_TS_IN_MILLI_SECS_KEY , - 1l ) ; long recordCount = getRecordCount ( job ) ; // Previous publish only exists when this is a recompact job
if ( previousPublishTime != - 1l ) { builder . previousPublishTimestamp ( Long . toString ( previousPublishTime ) ) ; } // Upstream time is the logical time represented by the compaction input directory
if ( upstreamTime != - 1l ) { builder . upstreamTimestamp ( Long . toString ( upstreamTime ) ) ; } if ( recordCount != - 1l ) { builder . recordCount ( Long . toString ( recordCount ) ) ; } return builder ; |
public class BookKeeperJournalMetadataManager { /** * Create znodes for storing ledger metadata if they have not been
* created before
* @ throws IOException If there an unrecoverable error talking to ZooKeeper */
public void init ( ) throws IOException { } } | try { if ( zooKeeper . exists ( zooKeeperParentPath , false ) == null ) { zooKeeper . create ( zooKeeperParentPath , new byte [ ] { '0' } , ZooDefs . Ids . OPEN_ACL_UNSAFE , CreateMode . PERSISTENT ) ; LOG . info ( "Created ZNode " + zooKeeperParentPath ) ; } if ( zooKeeper . exists ( ledgerParentPath , false ) == null ) { zooKeeper . create ( ledgerParentPath , new byte [ ] { '0' } , ZooDefs . Ids . OPEN_ACL_UNSAFE , CreateMode . PERSISTENT ) ; LOG . info ( "Created ZNode" + ledgerParentPath ) ; } } catch ( InterruptedException e ) { interruptedException ( "Interrupted ensuring that ZNodes " + zooKeeperParentPath + " and " + ledgerParentPath + " exist!" , e ) ; } catch ( KeeperException e ) { keeperException ( "Unrecoverable ZooKeeper error ensuring that ZNodes " + zooKeeperParentPath + " and " + ledgerParentPath + " exist!" , e ) ; } |
public class StringUtils { /** * Wraps a string with a char .
* < pre >
* StringUtils . wrap ( null , * ) = null
* StringUtils . wrap ( " " , * ) = " "
* StringUtils . wrap ( " ab " , ' \ 0 ' ) = " ab "
* StringUtils . wrap ( " ab " , ' x ' ) = " xabx "
* StringUtils . wrap ( " ab " , ' \ ' ' ) = " ' ab ' "
* StringUtils . wrap ( " \ " ab \ " " , ' \ " ' ) = " \ " \ " ab \ " \ " "
* < / pre >
* @ param str
* the string to be wrapped , may be { @ code null }
* @ param wrapWith
* the char that will wrap { @ code str }
* @ return the wrapped string , or { @ code null } if { @ code str = = null }
* @ since 3.4 */
public static String wrap ( final String str , final char wrapWith ) { } } | if ( isEmpty ( str ) || wrapWith == CharUtils . NUL ) { return str ; } return wrapWith + str + wrapWith ; |
public class PublicIPAddressesInner { /** * Gets information about all public IP addresses in a virtual machine IP configuration in a virtual machine scale set .
* ServiceResponse < PageImpl < PublicIPAddressInner > > * @ param resourceGroupName The name of the resource group .
* ServiceResponse < PageImpl < PublicIPAddressInner > > * @ param virtualMachineScaleSetName The name of the virtual machine scale set .
* ServiceResponse < PageImpl < PublicIPAddressInner > > * @ param virtualmachineIndex The virtual machine index .
* ServiceResponse < PageImpl < PublicIPAddressInner > > * @ param networkInterfaceName The network interface name .
* ServiceResponse < PageImpl < PublicIPAddressInner > > * @ param ipConfigurationName The IP configuration name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the PagedList & lt ; PublicIPAddressInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */
public Observable < ServiceResponse < Page < PublicIPAddressInner > > > listVirtualMachineScaleSetVMPublicIPAddressesSinglePageAsync ( final String resourceGroupName , final String virtualMachineScaleSetName , final String virtualmachineIndex , final String networkInterfaceName , final String ipConfigurationName ) { } } | if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( virtualMachineScaleSetName == null ) { throw new IllegalArgumentException ( "Parameter virtualMachineScaleSetName is required and cannot be null." ) ; } if ( virtualmachineIndex == null ) { throw new IllegalArgumentException ( "Parameter virtualmachineIndex is required and cannot be null." ) ; } if ( networkInterfaceName == null ) { throw new IllegalArgumentException ( "Parameter networkInterfaceName is required and cannot be null." ) ; } if ( ipConfigurationName == null ) { throw new IllegalArgumentException ( "Parameter ipConfigurationName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } final String apiVersion = "2017-03-30" ; return service . listVirtualMachineScaleSetVMPublicIPAddresses ( resourceGroupName , virtualMachineScaleSetName , virtualmachineIndex , networkInterfaceName , ipConfigurationName , this . client . subscriptionId ( ) , apiVersion , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < PublicIPAddressInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < PublicIPAddressInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < PublicIPAddressInner > > result = listVirtualMachineScaleSetVMPublicIPAddressesDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < PublicIPAddressInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ; |
public class DbClass { public DbDatum getPipeProperty ( String pipeName , String propertyName ) throws DevFailed { } } | DbPipe dbPipe = database . getClassPipeProperties ( className , pipeName ) ; DbDatum datum = dbPipe . getDatum ( propertyName ) ; if ( datum == null ) Except . throw_exception ( "TangoApi_PropertyNotFound" , "Property " + propertyName + " not found for pipe " + pipeName ) ; return datum ; |
public class Response { /** * Adds cookie to the response . Can be invoked multiple times to insert more than one cookie .
* @ param name name of the cookie
* @ param value value of the cookie
* @ param maxAge max age of the cookie in seconds ( negative for the not persistent cookie , zero - deletes the cookie )
* @ param secured if true : cookie will be secured
* @ param httpOnly if true : cookie will be marked as http only */
public void cookie ( String name , String value , int maxAge , boolean secured , boolean httpOnly ) { } } | cookie ( "" , "" , name , value , maxAge , secured , httpOnly ) ; |
public class JBBPBitInputStream { /** * Read number of double items from the input stream .
* @ param items number of items to be read from the input stream , if less than
* zero then all stream till the end will be read
* @ param byteOrder the order of bytes to be used to decode values
* @ return read items as a double array
* @ throws IOException it will be thrown for any transport problem during the
* operation
* @ see JBBPByteOrder # BIG _ ENDIAN
* @ see JBBPByteOrder # LITTLE _ ENDIAN
* @ since 1.4.0 */
public double [ ] readDoubleArray ( final int items , final JBBPByteOrder byteOrder ) throws IOException { } } | int pos = 0 ; if ( items < 0 ) { double [ ] buffer = new double [ INITIAL_ARRAY_BUFFER_SIZE ] ; // till end
while ( hasAvailableData ( ) ) { final long next = readLong ( byteOrder ) ; if ( buffer . length == pos ) { final double [ ] newbuffer = new double [ buffer . length << 1 ] ; System . arraycopy ( buffer , 0 , newbuffer , 0 , buffer . length ) ; buffer = newbuffer ; } buffer [ pos ++ ] = Double . longBitsToDouble ( next ) ; } if ( buffer . length == pos ) { return buffer ; } final double [ ] result = new double [ pos ] ; System . arraycopy ( buffer , 0 , result , 0 , pos ) ; return result ; } else { // number
final double [ ] buffer = new double [ items ] ; for ( int i = 0 ; i < items ; i ++ ) { buffer [ i ] = readDouble ( byteOrder ) ; } return buffer ; } |
public class VoltPort { /** * Do a reverse DNS lookup of the remote end . Done in a separate thread unless synchronous is specified .
* If asynchronous lookup is requested the task may be dropped and resolution may never occur */
void resolveHostname ( boolean synchronous ) { } } | Runnable r = new Runnable ( ) { @ Override public void run ( ) { String remoteHost = ReverseDNSCache . hostnameOrAddress ( m_remoteSocketAddress . getAddress ( ) ) ; if ( ! remoteHost . equals ( m_remoteSocketAddress . getAddress ( ) . getHostAddress ( ) ) ) { m_remoteHostname = remoteHost ; m_remoteHostAndAddressAndPort = remoteHost + m_remoteHostAndAddressAndPort ; m_toString = VoltPort . this . toString ( ) + ":" + m_remoteHostAndAddressAndPort ; } } } ; if ( synchronous ) { r . run ( ) ; } else { /* * Start the reverse DNS lookup in background because it might be
* very slow if the hostname is not specified in local / etc / hosts . */
try { ReverseDNSCache . submit ( r ) ; } catch ( RejectedExecutionException e ) { networkLog . debug ( "Reverse DNS lookup for " + m_remoteSocketAddress + " rejected because the queue was full" ) ; } } |
public class JmesPathCodeGenVisitor { /** * Generates the code for a new JmesPathLiteral .
* @ param literal JmesPath literal type
* @ param aVoid void
* @ return String that represents a call to
* the new literal expression */
@ Override public String visit ( final JmesPathLiteral literal , final Void aVoid ) { } } | return "new JmesPathLiteral(\"" + StringEscapeUtils . escapeJava ( literal . getValue ( ) . toString ( ) ) + "\")" ; |
public class CmsShowResourceTable { /** * Get resources set for the given principal . < p >
* @ param cms CmsObject
* @ param id id of principal
* @ return Set of CmsResource
* @ throws CmsException if resources can not be read */
private Set < CmsResource > getResourcesFromPrincipal ( CmsObject cms , CmsUUID id ) throws CmsException { } } | return cms . getResourcesForPrincipal ( id , null , false ) ; |
public class WonderPushView { /** * Sets the full URL for the web content displayed in this WonderPushView ' s WebView .
* @ param fullUrl
* A full URL , with host . */
public void setFullUrl ( String fullUrl ) { } } | if ( fullUrl == null ) { return ; } Uri parsedUri = Uri . parse ( fullUrl ) ; if ( ! WonderPushUriHelper . isAPIUri ( parsedUri ) ) { mWebView . loadUrl ( fullUrl ) ; } else { setResource ( WonderPushUriHelper . getResource ( parsedUri ) , WonderPushUriHelper . getParams ( parsedUri ) ) ; } |
public class DecorableElement { /** * Removes annotation ( s ) having the given identifier */
protected void removeAnnotation ( String annotIdentifier ) { } } | if ( m_annotations != null ) { Iterator it = m_annotations . iterator ( ) ; while ( it . hasNext ( ) ) { Annotation ann = ( Annotation ) it . next ( ) ; if ( ann . getIdentifier ( ) . equals ( annotIdentifier ) ) it . remove ( ) ; } } |
public class RemoteMuzeiArtSource { /** * Subclasses of { @ link RemoteMuzeiArtSource } should implement { @ link # onTryUpdate ( int ) }
* instead of this method . */
@ RequiresPermission ( allOf = { } } | Manifest . permission . ACCESS_NETWORK_STATE , Manifest . permission . WAKE_LOCK } ) @ CallSuper @ Override protected void onUpdate ( @ UpdateReason int reason ) { PowerManager pwm = ( PowerManager ) getSystemService ( POWER_SERVICE ) ; PowerManager . WakeLock lock = pwm . newWakeLock ( PowerManager . PARTIAL_WAKE_LOCK , mName ) ; lock . acquire ( FETCH_WAKELOCK_TIMEOUT_MILLIS ) ; SharedPreferences sp = getSharedPreferences ( ) ; try { NetworkInfo ni = ( ( ConnectivityManager ) getSystemService ( Context . CONNECTIVITY_SERVICE ) ) . getActiveNetworkInfo ( ) ; if ( ni == null || ! ni . isConnected ( ) ) { Log . d ( TAG , "No network connection; not attempting to fetch update, id=" + mName ) ; throw new RetryException ( ) ; } // Attempt an update
onTryUpdate ( reason ) ; // No RetryException , so declare success and reset update attempt
sp . edit ( ) . remove ( PREF_RETRY_ATTEMPT ) . apply ( ) ; setWantsNetworkAvailable ( false ) ; } catch ( RetryException e ) { Log . w ( TAG , "Error fetching, scheduling retry, id=" + mName ) ; // Schedule retry with exponential backoff , starting with INITIAL _ RETRY . . . seconds later
int retryAttempt = Math . min ( sp . getInt ( PREF_RETRY_ATTEMPT , 0 ) , MAX_RETRY_ATTEMPTS ) ; scheduleUpdate ( System . currentTimeMillis ( ) + ( INITIAL_RETRY_DELAY_MILLIS << retryAttempt ) ) ; if ( retryAttempt < MAX_RETRY_ATTEMPTS ) { sp . edit ( ) . putInt ( PREF_RETRY_ATTEMPT , retryAttempt + 1 ) . apply ( ) ; } setWantsNetworkAvailable ( true ) ; } finally { if ( lock . isHeld ( ) ) { lock . release ( ) ; } } |
public class ApiOvhCdndedicated { /** * Get this object properties
* REST : GET / cdn / dedicated / { serviceName } / domains / { domain } / cacheRules / { cacheRuleId }
* @ param serviceName [ required ] The internal name of your CDN offer
* @ param domain [ required ] Domain of this object
* @ param cacheRuleId [ required ] Id for this cache rule */
public OvhCacheRule serviceName_domains_domain_cacheRules_cacheRuleId_GET ( String serviceName , String domain , Long cacheRuleId ) throws IOException { } } | String qPath = "/cdn/dedicated/{serviceName}/domains/{domain}/cacheRules/{cacheRuleId}" ; StringBuilder sb = path ( qPath , serviceName , domain , cacheRuleId ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhCacheRule . class ) ; |
public class TransientBinaryStore { /** * Ensures that the directory used by this binary store exists and can be both read and written to .
* @ throws BinaryStoreException if the directory cannot be written to , read , or ( if needed ) created */
@ Override protected void initializeStorage ( File directory ) throws BinaryStoreException { } } | // make sure the directory doesn ' t exist
FileUtil . delete ( directory ) ; if ( ! directory . exists ( ) ) { logger . debug ( "Creating temporary directory for transient binary store: {0}" , directory . getAbsolutePath ( ) ) ; directory . mkdirs ( ) ; } if ( ! directory . canRead ( ) ) { throw new BinaryStoreException ( JcrI18n . unableToReadTemporaryDirectory . text ( directory . getAbsolutePath ( ) , JAVA_IO_TMPDIR ) ) ; } if ( ! directory . canWrite ( ) ) { throw new BinaryStoreException ( JcrI18n . unableToWriteTemporaryDirectory . text ( directory . getAbsolutePath ( ) , JAVA_IO_TMPDIR ) ) ; } |
public class ON_MATCH { /** * < div color = ' red ' style = " font - size : 24px ; color : red " > < b > < i > < u > JCYPHER < / u > < / i > < / b > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > set a label of a node in an ON _ MATCH clause < / i > < / div >
* < div color = ' red ' style = " font - size : 18px ; color : red " > < i > e . g . . . . < b > SET ( n . label ( " Person " ) ) < / b > < / i > < / div >
* < br / > */
public static ModifyTerminal SET ( JcLabel label ) { } } | ModifyTerminal ret = ModifyFactory . setLabel ( label ) ; ASTNode an = APIObjectAccess . getAstNode ( ret ) ; an . setClauseType ( ClauseType . ON_MATCH_SET ) ; return ret ; |
public class DB { /** * Log state assertion .
* @ param callInfo Call info .
* @ param dsa state assertion . */
void log ( CallInfo callInfo , DataSetAssertion dsa ) { } } | if ( isEnabled ( Option . LOG_ASSERTIONS ) || ( ! dsa . passed ( ) && isEnabled ( Option . LOG_ASSERTION_ERRORS ) ) ) { log . write ( callInfo , dsa ) ; } |
public class DescribeScalingActivitiesResult { /** * A list of scaling activity objects .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setScalingActivities ( java . util . Collection ) } or { @ link # withScalingActivities ( java . util . Collection ) } if
* you want to override the existing values .
* @ param scalingActivities
* A list of scaling activity objects .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeScalingActivitiesResult withScalingActivities ( ScalingActivity ... scalingActivities ) { } } | if ( this . scalingActivities == null ) { setScalingActivities ( new java . util . ArrayList < ScalingActivity > ( scalingActivities . length ) ) ; } for ( ScalingActivity ele : scalingActivities ) { this . scalingActivities . add ( ele ) ; } return this ; |
public class NotifdEventConsumer { private void setEventChannelTimeoutMillis ( EventChannel eventChannel , int millis ) { } } | // Change Jacorb policy for timeout
org . omg . CORBA . Policy p = new org . jacorb . orb . policies . RelativeRoundtripTimeoutPolicy ( 10000 * millis ) ; eventChannel . _set_policy_override ( new Policy [ ] { p } , org . omg . CORBA . SetOverrideType . ADD_OVERRIDE ) ; |
public class BinaryUtil { /** * Method to convert a byte array to a Collection of Binary .
* @ param bytes Array of bytes .
* @ throws IOException If unable to read the binary collection .
* @ return Collection of Binary . */
public static Collection < Binary > toBinaryCollection ( byte [ ] bytes ) throws IOException { } } | try ( ByteArrayInputStream bais = new ByteArrayInputStream ( bytes ) ; BigEndianBinaryReader reader = new BigEndianBinaryReader ( bais ) ) { final int size = reader . expectInt ( ) ; Collection < Binary > result = new ArrayList < > ( ) ; for ( int i = 0 ; i < size ; i ++ ) { int length = reader . expectInt ( ) ; result . add ( reader . expectBinary ( length ) ) ; } return result ; } |
public class MMCImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } } | switch ( featureID ) { case AfplibPackage . MMC__MM_CID : return getMMCid ( ) ; case AfplibPackage . MMC__PARAMETER1 : return getPARAMETER1 ( ) ; case AfplibPackage . MMC__RG : return getRg ( ) ; } return super . eGet ( featureID , resolve , coreType ) ; |
public class RealmTableImpl { /** * ( non - Javadoc )
* @ see org . jdiameter . client . api . controller . IRealmTable # getRealmForPeer ( java . lang . String ) */
@ Override public String getRealmForPeer ( String fqdn ) { } } | Collection < Realm > realms = getRealms ( ) ; for ( Realm r : realms ) { IRealm ir = ( IRealm ) r ; if ( ir . hasPeerName ( fqdn ) ) { return ir . getName ( ) ; } } return null ; |
public class CmsMacroResolver { /** * Reads a bundle ( key , value , descriptor ) from Descriptor Resource and property resource . < p >
* @ param resourceBundle property resource
* @ param descriptor resource
* @ param clonedCms cms instance
* @ return Map < key , [ value , descriptor ] >
* @ throws CmsXmlException exception
* @ throws CmsException exception */
public static Map < String , String [ ] > getBundleMapFromResources ( Properties resourceBundle , CmsResource descriptor , CmsObject clonedCms ) throws CmsXmlException , CmsException { } } | Map < String , String [ ] > ret = new LinkedHashMap < String , String [ ] > ( ) ; // Read XML content of descriptor
CmsXmlContent xmlContentDesc = CmsXmlContentFactory . unmarshal ( clonedCms , clonedCms . readFile ( descriptor ) ) ; CmsXmlContentValueSequence messages = xmlContentDesc . getValueSequence ( Descriptor . N_MESSAGE , Descriptor . LOCALE ) ; // Iterate through content
for ( int i = 0 ; i < messages . getElementCount ( ) ; i ++ ) { // Read key and default text from descriptor , label from bundle ( localized )
String prefix = messages . getValue ( i ) . getPath ( ) + "/" ; String key = xmlContentDesc . getValue ( prefix + Descriptor . N_KEY , Descriptor . LOCALE ) . getStringValue ( clonedCms ) ; String label = resourceBundle . getProperty ( key ) ; String defaultText = xmlContentDesc . getValue ( prefix + Descriptor . N_DESCRIPTION , Descriptor . LOCALE ) . getStringValue ( clonedCms ) ; ret . put ( key , new String [ ] { label , defaultText } ) ; } return ret ; |
public class XLog { /** * Log a message with level { @ link LogLevel # WARN } .
* @ param format the format of the message to log
* @ param args the arguments of the message to log */
public static void w ( String format , Object ... args ) { } } | assertInitialization ( ) ; sLogger . w ( format , args ) ; |
public class ConfigurationParseHelper { /** * Parses a string into an integer value .
* @ param value a string containing an int value to parse
* @ param errorMsgOnParseFailure message being wrapped in a SearchException if value is { @ code null } or not an
* integer
* @ return the parsed integer value
* @ throws SearchException both for null values and for Strings not containing a valid int . */
public static int parseInt ( String value , String errorMsgOnParseFailure ) { } } | if ( value == null ) { throw new SearchException ( errorMsgOnParseFailure ) ; } else { try { return Integer . parseInt ( value . trim ( ) ) ; } catch ( NumberFormatException nfe ) { throw log . getInvalidIntegerValueException ( errorMsgOnParseFailure , nfe ) ; } } |
public class FacesConfigTypeImpl { /** * Returns all < code > behavior < / code > elements
* @ return list of < code > behavior < / code > */
public List < FacesConfigBehaviorType < FacesConfigType < T > > > getAllBehavior ( ) { } } | List < FacesConfigBehaviorType < FacesConfigType < T > > > list = new ArrayList < FacesConfigBehaviorType < FacesConfigType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "behavior" ) ; for ( Node node : nodeList ) { FacesConfigBehaviorType < FacesConfigType < T > > type = new FacesConfigBehaviorTypeImpl < FacesConfigType < T > > ( this , "behavior" , childNode , node ) ; list . add ( type ) ; } return list ; |
public class ParameterAdapter { /** * Retrieves the type of a parameter , or otherwise null
* @ param definitionDocumentResolver the definition document resolver
* @ return the type of the parameter , or otherwise null */
private Type getType ( Map < String , Model > definitions , DocumentResolver definitionDocumentResolver ) { } } | Validate . notNull ( parameter , "parameter must not be null!" ) ; Type type = null ; if ( parameter instanceof BodyParameter ) { BodyParameter bodyParameter = ( BodyParameter ) parameter ; Model model = bodyParameter . getSchema ( ) ; if ( model != null ) { type = ModelUtils . getType ( model , definitions , definitionDocumentResolver ) ; } else { type = new BasicType ( "string" , bodyParameter . getName ( ) ) ; } } else if ( parameter instanceof AbstractSerializableParameter ) { AbstractSerializableParameter serializableParameter = ( AbstractSerializableParameter ) parameter ; @ SuppressWarnings ( "unchecked" ) List < String > enums = serializableParameter . getEnum ( ) ; if ( CollectionUtils . isNotEmpty ( enums ) ) { type = new EnumType ( serializableParameter . getName ( ) , enums ) ; } else { type = new BasicType ( serializableParameter . getType ( ) , serializableParameter . getName ( ) , serializableParameter . getFormat ( ) ) ; } if ( serializableParameter . getType ( ) . equals ( "array" ) ) { String collectionFormat = serializableParameter . getCollectionFormat ( ) ; type = new ArrayType ( serializableParameter . getName ( ) , new PropertyAdapter ( serializableParameter . getItems ( ) ) . getType ( definitionDocumentResolver ) , collectionFormat ) ; } } else if ( parameter instanceof RefParameter ) { String refName = ( ( RefParameter ) parameter ) . getSimpleRef ( ) ; type = new RefType ( definitionDocumentResolver . apply ( refName ) , new ObjectType ( refName , null /* FIXME , not used for now */
) ) ; } return type ; |
public class AbstractReadableSetProperty { /** * Notifies the change listeners that items have been added .
* @ param newItems Newly added items . */
protected void doNotifyListenersOfAddedValues ( Set < R > newItems ) { } } | List < SetValueChangeListener < R > > listenersCopy = new ArrayList < SetValueChangeListener < R > > ( listeners ) ; Set < R > unmodifiable = Collections . unmodifiableSet ( newItems ) ; for ( SetValueChangeListener < R > listener : listenersCopy ) { listener . valuesAdded ( this , unmodifiable ) ; } |
public class GenerateIndexConfig { /** * Reads the annotations from the specified classes and generates MarkLogic index
* configurations specified by the annotations . Accepts the
* following options :
* - classes : a space - separated list of java pojo classes visible on the classpath
* - file : a file path to write with the output ( otherwise uses standard out )
* @ param args an array of the above documented options followed directly
* by the value for that option
* @ throws IOException if an error occurs reading the classes or writing the output
* @ throws IllegalStateException if errors are found in your annotations
* @ throws ClassNotFoundException if the classes are not found on the classpath */
public static void main ( String [ ] args ) throws IOException , ClassNotFoundException { } } | String [ ] classes = new String [ ] { } ; Writer out = null ; try { for ( int i = 0 ; i < args . length ; i ++ ) { String name = args [ i ] ; if ( name . startsWith ( "-" ) && name . length ( ) > 1 && ++ i < args . length ) { String argValue = args [ i ] ; if ( "-classes" . equals ( name ) ) { classes = argValue . split ( "\\s+" ) ; } else if ( "-file" . equals ( name ) ) { out = new FileWriter ( argValue ) ; } } } if ( out == null ) out = new OutputStreamWriter ( System . out ) ; ObjectMapper mapper = new ObjectMapper ( ) ; generateConfig ( classes , mapper , out ) ; } finally { if ( out != null ) out . close ( ) ; } |
public class Batch { /** * Try to add a record to this batch
* This method first check room space if a give record can be added
* If there is no space for new record , a null is returned ; otherwise { @ link Batch # append ( Object ) }
* is invoked and { @ link RecordFuture } object is returned . User can call get ( ) method on this object
* which will block current thread until the batch s fully completed ( sent and received acknowledgement ) .
* The future object also contains meta information where this new record is located , usually an offset inside this batch .
* @ param record : record needs to be added
* @ param callback : A callback which will be invoked when the whole batch gets sent and acknowledged
* @ param largeMessagePolicy : the { @ link LargeMessagePolicy } that is in effect for this batch
* @ return A future object which contains { @ link RecordMetadata } */
public Future < RecordMetadata > tryAppend ( D record , WriteCallback callback , LargeMessagePolicy largeMessagePolicy ) throws RecordTooLargeException { } } | if ( ! hasRoom ( record , largeMessagePolicy ) ) { LOG . debug ( "Cannot add {} to previous batch because the batch already has {} bytes" , record . toString ( ) , getCurrentSizeInByte ( ) ) ; if ( largeMessagePolicy == LargeMessagePolicy . FAIL ) { throw new RecordTooLargeException ( ) ; } return null ; } this . append ( record ) ; thunks . add ( new Thunk ( callback , getRecordSizeInByte ( record ) ) ) ; RecordFuture future = new RecordFuture ( latch , recordCount ) ; recordCount ++ ; return future ; |
public class JSONObject { /** * Get the long value associated with a key . If the number value is too long
* for a long , it will be clipped .
* @ param key A key string .
* @ return The long value .
* @ throws JSONException if the key is not found or if the value cannot be
* converted to a long . */
public long getLong ( String key ) { } } | verifyIsNull ( ) ; Object o = get ( key ) ; if ( o != null ) { return o instanceof Number ? ( ( Number ) o ) . longValue ( ) : ( long ) getDouble ( key ) ; } throw new JSONException ( "JSONObject[" + JSONUtils . quote ( key ) + "] is not a number." ) ; |
public class MessageProcessor { /** * Method to return the connection that the SYSTEM queue was created on .
* This connection is used wherever an internal producerSession or
* consumerSession is required .
* @ return SICoreConnection The connection made for registering internal
* consumers / producers . */
public MPCoreConnection getSystemConnection ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { SibTr . entry ( tc , "getSystemConnection" ) ; SibTr . exit ( tc , "getSystemConnection" , _connectionToMP ) ; } return _connectionToMP ; |
public class PutSigningProfileRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( PutSigningProfileRequest putSigningProfileRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( putSigningProfileRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( putSigningProfileRequest . getProfileName ( ) , PROFILENAME_BINDING ) ; protocolMarshaller . marshall ( putSigningProfileRequest . getSigningMaterial ( ) , SIGNINGMATERIAL_BINDING ) ; protocolMarshaller . marshall ( putSigningProfileRequest . getPlatformId ( ) , PLATFORMID_BINDING ) ; protocolMarshaller . marshall ( putSigningProfileRequest . getOverrides ( ) , OVERRIDES_BINDING ) ; protocolMarshaller . marshall ( putSigningProfileRequest . getSigningParameters ( ) , SIGNINGPARAMETERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ProfileManager { /** * SDK5.0 signature */
public ProfilePolicyMetadata [ ] queryPolicyMetadata ( String [ ] policyName , Profile profile ) throws RuntimeFault , RemoteException { } } | return getVimService ( ) . queryPolicyMetadata ( getMOR ( ) , policyName , profile == null ? null : profile . getMOR ( ) ) ; |
public class RTreeIndexCoreExtension { /** * Check if the table and column has the RTree extension and if found , drop
* the triggers
* @ param tableName
* table name
* @ param columnName
* column name
* @ return true if dropped */
public boolean dropTriggers ( String tableName , String columnName ) { } } | boolean dropped = has ( tableName , columnName ) ; if ( dropped ) { dropAllTriggers ( tableName , columnName ) ; } return dropped ; |
public class SchedulerForType { /** * Preempt a session .
* @ param schedulable The session .
* @ param maxToPreemt Maximum number of resources to preempt .
* @ param maxRunningTime Running time threshold for preemption .
* @ return The number of preempted resources .
* @ throws InvalidSessionHandle */
private int preemptSession ( SessionSchedulable schedulable , int maxToPreemt , long maxRunningTime ) throws InvalidSessionHandle { } } | Session session = schedulable . getSession ( ) ; List < Integer > grantIds ; synchronized ( session ) { grantIds = session . getGrantsToPreempt ( maxToPreemt , maxRunningTime , type ) ; } List < ResourceGrant > revokedGrants = sessionManager . revokeResource ( session . getHandle ( ) , grantIds ) ; for ( ResourceGrant grant : revokedGrants ) { nodeManager . cancelGrant ( grant . nodeName , session . getSessionId ( ) , grant . getId ( ) ) ; } sessionNotifier . notifyRevokeResource ( session . getHandle ( ) , revokedGrants , true ) ; int preempted = revokedGrants . size ( ) ; LOG . info ( "Preempt " + preempted + " " + type + " tasks for Session:" + session . getHandle ( ) ) ; return preempted ; |
public class GrammarAccess { /** * Returns the method name for accessing a rule ' s content via a ParseRuleAccess implementation .
* @ param rule the rule for which the accessor method is needed
* @ return the method ' s name . */
public String gaRuleElementsMethodName ( final AbstractRule rule ) { } } | String _gaRuleIdentifyer = this . gaRuleIdentifyer ( rule ) ; String _plus = ( "get" + _gaRuleIdentifyer ) ; return ( _plus + "Access" ) ; |
public class WebAppDescriptorImpl { /** * If not already created , a new < code > context - param < / code > element will be created and returned .
* Otherwise , the first existing < code > context - param < / code > element will be returned .
* @ return the instance defined for the element < code > context - param < / code > */
public ParamValueType < WebAppDescriptor > getOrCreateContextParam ( ) { } } | List < Node > nodeList = model . get ( "context-param" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new ParamValueTypeImpl < WebAppDescriptor > ( this , "context-param" , model , nodeList . get ( 0 ) ) ; } return createContextParam ( ) ; |
public class GeometryServiceImpl { /** * Format the given geometry object to Well Known Text representation .
* @ param geometry
* The geometry to format .
* @ return Get WKT representation of the given geometry , or null in case something went wrong . */
public String toWkt ( Geometry geometry ) { } } | try { return WktService . toWkt ( geometry ) ; } catch ( WktException e ) { return null ; } |
public class StatsAgent { /** * return null . Yes , ugly . Bang it out , then refactor later . */
private String parseParamsForStatistics ( ParameterSet params , JSONObject obj ) throws Exception { } } | if ( ( params . toArray ( ) . length < 1 ) || ( params . toArray ( ) . length > 2 ) ) { return "Incorrect number of arguments to @Statistics (expects 2, received " + params . toArray ( ) . length + ")" ; } Object first = params . toArray ( ) [ 0 ] ; if ( ! ( first instanceof String ) ) { return "First argument to @Statistics must be a valid STRING selector, instead was " + first ; } String subselector = ( String ) first ; try { StatsSelector s = StatsSelector . valueOf ( subselector . toUpperCase ( ) ) ; subselector = s . name ( ) ; } catch ( Exception e ) { return "First argument to @Statistics must be a valid STRING selector, instead was " + first ; } boolean interval = false ; if ( params . toArray ( ) . length == 2 ) { interval = ( ( Number ) ( params . toArray ( ) [ 1 ] ) ) . longValue ( ) == 1L ; } obj . put ( "subselector" , subselector ) ; obj . put ( "interval" , interval ) ; return null ; |
public class MenuUtil { /** * Adds a new menu item to the menu with the specified name and
* attributes . The supplied method name will be called ( it must have
* the same signature as { @ link ActionListener # actionPerformed } but
* can be named whatever you like ) when the menu item is selected .
* @ param menu the menu to add the item to .
* @ param name the item name .
* @ param mnem the mnemonic key for the item .
* @ param accel the keystroke for the item or null if none .
* @ param target the object on which to invoke a method when the menu is selected .
* @ param callbackName the name of the method to invoke when the menu is selected .
* @ return the new menu item . */
public static JMenuItem addMenuItem ( JMenu menu , String name , int mnem , KeyStroke accel , Object target , String callbackName ) { } } | JMenuItem item = createItem ( name , Integer . valueOf ( mnem ) , accel ) ; item . addActionListener ( new ReflectedAction ( target , callbackName ) ) ; menu . add ( item ) ; return item ; |
public class ClientSocketStats { /** * Record operation for async ops time
* @ param dest Destination of the socket to connect to . Will actually record
* if null . Otherwise will call this on self and corresponding child
* with this param null .
* @ param opTimeUs The number of us for the op to finish */
public void recordAsyncOpTimeNs ( SocketDestination dest , long opTimeNs ) { } } | if ( dest != null ) { getOrCreateNodeStats ( dest ) . recordAsyncOpTimeNs ( null , opTimeNs ) ; recordAsyncOpTimeNs ( null , opTimeNs ) ; } else { this . asynOpTimeRequestCounter . addRequest ( opTimeNs ) ; } |
public class DataPage { /** * 取得List的第N页的subList
* @ param list
* 要分页的list
* @ param pageSize
* @ param pageNo
* @ return List */
public static < T > List < T > subList ( List < T > list , int pageSize , int pageNo ) { } } | pageSize = ( pageSize <= 0 ? 10 : pageSize ) ; pageNo = ( pageNo <= 0 ? 1 : pageNo ) ; int begin = ( pageSize * ( pageNo - 1 ) > list . size ( ) ? list . size ( ) : pageSize * ( pageNo - 1 ) ) ; int end = ( pageSize * pageNo > list . size ( ) ? list . size ( ) : pageSize * pageNo ) ; return new ArrayList < T > ( list . subList ( begin , end ) ) ; |
public class AstyanaxTableDAO { /** * Returns a data center with local access to all specified placements using a deterministic algorithm that
* always picks the same data center given the same set of placements . Returns { @ code null } if no data center
* has access to all specified placements . */
private String selectDataCenterForPlacements ( String ... placements ) { } } | // Get the set of data centers that have local access to all the placements . Then pick one deterministically
// ( pick the one that sorts first alphabetically ) and designate that one as the one to perform maintenance .
Set < DataCenter > intersection = null ; for ( String placement : placements ) { Set < DataCenter > dataCenters = Sets . newLinkedHashSet ( _placementFactory . getDataCenters ( placement ) ) ; if ( intersection == null ) { intersection = dataCenters ; } else { intersection . retainAll ( dataCenters ) ; } } if ( intersection == null || intersection . isEmpty ( ) ) { return null ; } return Ordering . natural ( ) . min ( intersection ) . getName ( ) ; |
public class GVRMain { /** * Override this method to supply a custom splash screen image .
* @ param gvrContext
* The new { @ link GVRContext }
* @ return Texture to display
* @ since 1.6.4 */
public GVRTexture getSplashTexture ( GVRContext gvrContext ) { } } | Bitmap bitmap = BitmapFactory . decodeResource ( gvrContext . getContext ( ) . getResources ( ) , R . drawable . __default_splash_screen__ ) ; GVRTexture tex = new GVRTexture ( gvrContext ) ; tex . setImage ( new GVRBitmapImage ( gvrContext , bitmap ) ) ; return tex ; |
public class HMMPOSTagger { /** * Load training data from a corpora .
* @ param dir a file object defining the top directory */
public static void load ( String dir , List < String [ ] > sentences , List < PennTreebankPOS [ ] > labels ) { } } | List < File > files = new ArrayList < > ( ) ; walkin ( new File ( dir ) , files ) ; for ( File file : files ) { try { FileInputStream stream = new FileInputStream ( file ) ; BufferedReader reader = new BufferedReader ( new InputStreamReader ( stream ) ) ; String line = null ; List < String > sent = new ArrayList < > ( ) ; List < PennTreebankPOS > label = new ArrayList < > ( ) ; while ( ( line = reader . readLine ( ) ) != null ) { line = line . trim ( ) ; if ( line . isEmpty ( ) ) { if ( ! sent . isEmpty ( ) ) { sentences . add ( sent . toArray ( new String [ sent . size ( ) ] ) ) ; labels . add ( label . toArray ( new PennTreebankPOS [ label . size ( ) ] ) ) ; sent . clear ( ) ; label . clear ( ) ; } } else if ( ! line . startsWith ( "===" ) && ! line . startsWith ( "*x*" ) ) { String [ ] words = line . split ( "\\s" ) ; for ( String word : words ) { String [ ] w = word . split ( "/" ) ; if ( w . length == 2 ) { sent . add ( w [ 0 ] ) ; int pos = w [ 1 ] . indexOf ( '|' ) ; String tag = pos == - 1 ? w [ 1 ] : w [ 1 ] . substring ( 0 , pos ) ; if ( tag . equals ( "PRP$R" ) ) tag = "PRP$" ; if ( tag . equals ( "JJSS" ) ) tag = "JJS" ; label . add ( PennTreebankPOS . getValue ( tag ) ) ; } } } } if ( ! sent . isEmpty ( ) ) { sentences . add ( sent . toArray ( new String [ sent . size ( ) ] ) ) ; labels . add ( label . toArray ( new PennTreebankPOS [ label . size ( ) ] ) ) ; sent . clear ( ) ; label . clear ( ) ; } reader . close ( ) ; } catch ( Exception e ) { logger . error ( "Failed to load training data {}" , file , e ) ; } } |
public class LineageInfo { /** * Prefix all keys with { @ link LineageEventBuilder # LIENAGE _ EVENT _ NAMESPACE } */
private static String getKey ( Object ... objects ) { } } | Object [ ] args = new Object [ objects . length + 1 ] ; args [ 0 ] = LineageEventBuilder . LIENAGE_EVENT_NAMESPACE ; System . arraycopy ( objects , 0 , args , 1 , objects . length ) ; return LineageEventBuilder . getKey ( args ) ; |
public class ElementTag { /** * Resolves all attributes , setting into the created element as appropriate ,
* This is only called for captureLevel > = META .
* Attributes are resolved before the element is added to any parent node .
* Typically , deferred expressions will be evaluated here .
* Overriding methods must call this implementation . */
protected void evaluateAttributes ( E element , ELContext elContext ) throws JspTagException , IOException { } } | String idStr = nullIfEmpty ( resolveValue ( id , String . class , elContext ) ) ; if ( idStr != null ) element . setId ( idStr ) ; |
public class AbstractControllerConfig { /** * Adds a set of filters to a set of controllers .
* The filters are invoked in the order specified . Will reject adding the same instance of a filter more than once .
* @ param filters filters to be added .
* @ return object with < code > to ( ) < / code > method which accepts a controller class . */
protected FilterBuilder add ( HttpSupportFilter ... filters ) { } } | for ( HttpSupportFilter filter : filters ) { if ( allFilters . contains ( filter ) ) { throw new IllegalArgumentException ( "Cannot register the same filter instance more than once." ) ; } } allFilters . addAll ( Collections . list ( filters ) ) ; return new FilterBuilder ( filters ) ; |
public class JawrLessSource { /** * Adds a linked resource to the less source
* @ param linkedResource
* the linked resource to add */
private void addLinkedResource ( FilePathMapping linkedResource ) { } } | linkedResources . add ( linkedResource ) ; if ( parent != null ) { parent . addLinkedResource ( linkedResource ) ; } |
public class ViewHelper { /** * Equivalent to calling ImageView . setImageResource
* @ param cacheView The cache of views to get the view from
* @ param viewId The id of the view whose image should change
* @ param resId the resource identifier of the drawable */
public static void setImageResource ( EfficientCacheView cacheView , int viewId , @ DrawableRes int resId ) { } } | View view = cacheView . findViewByIdEfficient ( viewId ) ; if ( view instanceof ImageView ) { ( ( ImageView ) view ) . setImageResource ( resId ) ; } |
public class JobConf { /** * Get the { @ link JobPriority } for this job .
* @ return the { @ link JobPriority } for this job . */
public JobPriority getJobPriority ( ) { } } | String prio = get ( "mapred.job.priority" ) ; if ( prio == null ) { return JobPriority . NORMAL ; } return JobPriority . valueOf ( prio ) ; |
public class AnalyzedTokenReadings { /** * Checks if one of the token ' s readings has one of the given lemmas
* @ param lemmas to look for */
public boolean hasAnyLemma ( String ... lemmas ) { } } | boolean found = false ; for ( String lemma : lemmas ) { for ( AnalyzedToken reading : anTokReadings ) { if ( reading . getLemma ( ) != null ) { found = lemma . equals ( reading . getLemma ( ) ) ; if ( found ) { return found ; } } } } return found ; |
public class BusStop { /** * Add a bus halt reference .
* @ param halt the halt . */
void addBusHalt ( BusItineraryHalt halt ) { } } | if ( this . halts == null ) { this . halts = new WeakArrayList < > ( ) ; } this . halts . add ( halt ) ; |
public class HttpUtil { /** * 将Map形式的Form表单数据转换为Url参数形式 < br >
* paramMap中如果key为空 ( null和 " " ) 会被忽略 , 如果value为null , 会被做为空白符 ( " " ) < br >
* 会自动url编码键和值
* < pre >
* key1 = v1 & amp ; key2 = & amp ; key3 = v3
* < / pre >
* @ param paramMap 表单数据
* @ param charset 编码
* @ return url参数 */
public static String toParams ( Map < String , ? > paramMap , Charset charset ) { } } | if ( CollectionUtil . isEmpty ( paramMap ) ) { return StrUtil . EMPTY ; } if ( null == charset ) { // 默认编码为系统编码
charset = CharsetUtil . CHARSET_UTF_8 ; } final StringBuilder sb = new StringBuilder ( ) ; boolean isFirst = true ; String key ; Object value ; String valueStr ; for ( Entry < String , ? > item : paramMap . entrySet ( ) ) { if ( isFirst ) { isFirst = false ; } else { sb . append ( "&" ) ; } key = item . getKey ( ) ; value = item . getValue ( ) ; if ( value instanceof Iterable ) { value = CollectionUtil . join ( ( Iterable < ? > ) value , "," ) ; } else if ( value instanceof Iterator ) { value = CollectionUtil . join ( ( Iterator < ? > ) value , "," ) ; } valueStr = Convert . toStr ( value ) ; if ( StrUtil . isNotEmpty ( key ) ) { sb . append ( URLUtil . encodeQuery ( key , charset ) ) . append ( "=" ) ; if ( StrUtil . isNotEmpty ( valueStr ) ) { sb . append ( URLUtil . encodeQuery ( valueStr , charset ) ) ; } } } return sb . toString ( ) ; |
public class Page { /** * Returns the value at a specified offset of this page . If a constant was
* not stored at that offset , the behavior of the method is unpredictable .
* @ param offset
* the byte offset within the page
* @ param type
* the type of the value
* @ return the constant value at that offset */
public synchronized Constant getVal ( int offset , Type type ) { } } | int size ; byte [ ] byteVal = null ; // Check the length of bytes
if ( type . isFixedSize ( ) ) { size = type . maxSize ( ) ; } else { byteVal = new byte [ ByteHelper . INT_SIZE ] ; contents . get ( offset , byteVal ) ; size = ByteHelper . toInteger ( byteVal ) ; offset += ByteHelper . INT_SIZE ; } // Get bytes and translate it to Constant
byteVal = new byte [ size ] ; contents . get ( offset , byteVal ) ; return Constant . newInstance ( type , byteVal ) ; |
public class UnsafeExternalSorter { /** * Return the total memory usage of this sorter , including the data pages and the sorter ' s pointer
* array . */
private long getMemoryUsage ( ) { } } | long totalPageSize = 0 ; for ( MemoryBlock page : allocatedPages ) { totalPageSize += page . size ( ) ; } return ( ( inMemSorter == null ) ? 0 : inMemSorter . getMemoryUsage ( ) ) + totalPageSize ; |
public class AsynchronousRequest { /** * For more info on pvp season API go < a href = " https : / / wiki . guildwars2 . com / wiki / API : 2 / pvp / seasons " > here < / a > < br / >
* Give user the access to { @ link Callback # onResponse ( Call , Response ) } and { @ link Callback # onFailure ( Call , Throwable ) } methods for custom interactions
* @ param ids list of pvp season id ( s )
* @ param callback callback that is going to be used for { @ link Call # enqueue ( Callback ) }
* @ throws GuildWars2Exception empty ID list
* @ throws NullPointerException if given { @ link Callback } is empty
* @ see PvPSeason pvp season info */
public void getPvPSeasonInfo ( String [ ] ids , Callback < List < PvPSeason > > callback ) throws GuildWars2Exception , NullPointerException { } } | isParamValid ( new ParamChecker ( ids ) ) ; gw2API . getPvPSeasonInfo ( processIds ( ids ) , GuildWars2 . lang . getValue ( ) ) . enqueue ( callback ) ; |
public class CmsAttributeValueView { /** * Activates the value widget if prNamet . < p > */
void activateWidget ( ) { } } | if ( m_activationHandlerRegistration != null ) { m_activationHandlerRegistration . removeHandler ( ) ; m_activationHandlerRegistration = null ; } if ( ( m_widget != null ) && ! m_widget . isActive ( ) ) { m_widget . setActive ( true ) ; if ( ( m_defaultValue != null ) && ( m_defaultValue . trim ( ) . length ( ) > 0 ) ) { m_widget . setValue ( m_defaultValue , true ) ; } m_handler . updateButtonVisisbility ( ) ; removeStyleName ( formCss ( ) . emptyValue ( ) ) ; } |
public class ReportDefinitionServiceLocator { /** * For the given interface , get the stub implementation .
* If this service has no port for the given interface ,
* then ServiceException is thrown . */
public java . rmi . Remote getPort ( Class serviceEndpointInterface ) throws javax . xml . rpc . ServiceException { } } | try { if ( com . google . api . ads . adwords . axis . v201809 . cm . ReportDefinitionServiceInterface . class . isAssignableFrom ( serviceEndpointInterface ) ) { com . google . api . ads . adwords . axis . v201809 . cm . ReportDefinitionServiceSoapBindingStub _stub = new com . google . api . ads . adwords . axis . v201809 . cm . ReportDefinitionServiceSoapBindingStub ( new java . net . URL ( ReportDefinitionServiceInterfacePort_address ) , this ) ; _stub . setPortName ( getReportDefinitionServiceInterfacePortWSDDServiceName ( ) ) ; return _stub ; } } catch ( java . lang . Throwable t ) { throw new javax . xml . rpc . ServiceException ( t ) ; } throw new javax . xml . rpc . ServiceException ( "There is no stub implementation for the interface: " + ( serviceEndpointInterface == null ? "null" : serviceEndpointInterface . getName ( ) ) ) ; |
public class PreferenceFragment { /** * Returns the text of the example dialog ' s negative button .
* @ return The text of the negative button */
private String getNegativeButtonText ( ) { } } | SharedPreferences sharedPreferences = PreferenceManager . getDefaultSharedPreferences ( getActivity ( ) ) ; String key = getString ( R . string . negative_button_text_preference_key ) ; String defaultValue = getString ( R . string . negative_button_text_preference_default_value ) ; return sharedPreferences . getString ( key , defaultValue ) ; |
public class HttpUrl { /** * Returns a new { @ code HttpUrl } representing { @ code url } if it is a well - formed HTTP or HTTPS
* URL , or throws an exception if it isn ' t .
* @ throws MalformedURLException if there was a non - host related URL issue
* @ throws UnknownHostException if the host was invalid */
static HttpUrl getChecked ( String url ) throws MalformedURLException , UnknownHostException { } } | Builder builder = new Builder ( ) ; Builder . ParseResult result = builder . parse ( null , url ) ; switch ( result ) { case SUCCESS : return builder . build ( ) ; case INVALID_HOST : throw new UnknownHostException ( "Invalid host: " + url ) ; case UNSUPPORTED_SCHEME : case MISSING_SCHEME : case INVALID_PORT : default : throw new MalformedURLException ( "Invalid URL: " + result + " for " + url ) ; } |
public class PusherInternal { /** * create single thread supportExecutor for push replication */
private void initSupportExecutor ( ) { } } | if ( supportExecutor == null || supportExecutor . isShutdown ( ) ) { supportExecutor = Executors . newSingleThreadExecutor ( new ThreadFactory ( ) { @ Override public Thread newThread ( Runnable r ) { String maskedRemote = URLUtils . sanitizeURL ( remote ) ; return new Thread ( r , "CBLPusherSupportExecutor-" + maskedRemote ) ; } } ) ; } |
public class Metric { /** * ~ Methods * * * * * */
@ Override public void setScope ( String scope ) { } } | requireArgument ( scope != null && ! scope . trim ( ) . isEmpty ( ) , "Scope cannot be null or empty." ) ; super . setScope ( scope ) ; |
public class ClassEntry { /** * Loads the contents of the class file into the buffer . */
public void load ( ByteArrayBuffer buffer ) throws IOException { } } | synchronized ( this ) { Source classPath = getClassPath ( ) ; buffer . clear ( ) ; int retry = 3 ; for ( int i = 0 ; i < retry ; i ++ ) { long length = - 1 ; try ( InputStream is = classPath . inputStream ( ) ) { length = classPath . length ( ) ; long lastModified = classPath . getLastModified ( ) ; if ( length < 0 ) throw new IOException ( "class loading failed because class file '" + classPath + "' does not have a positive length. Possibly the file has been overwritten" ) ; buffer . setLength ( ( int ) length ) ; int results = IoUtil . readAll ( is , buffer . getBuffer ( ) , 0 , ( int ) length ) ; if ( results == length && length == classPath . length ( ) && lastModified == classPath . getLastModified ( ) ) { return ; } log . warning ( L . l ( "{0}: class file length mismatch expected={1} received={2}. The class file may have been modified concurrently." , this , length , results ) ) ; } } } |
public class CLAFactory { /** * instanceType .
* @ param oneField a { @ link java . lang . reflect . Field } object .
* @ return a { @ link java . lang . Class } object . */
static final public Class < ? > instanceType ( final Field oneField ) { } } | if ( oneField . getType ( ) . isEnum ( ) ) return oneField . getType ( ) ; if ( oneField . getType ( ) . isArray ( ) ) return oneField . getType ( ) . getComponentType ( ) ; if ( oneField . getType ( ) == List . class ) return ( ( Class < ? > ) ( ( ParameterizedType ) oneField . getGenericType ( ) ) . getActualTypeArguments ( ) [ 0 ] ) ; return oneField . getType ( ) ; |
public class BasicRegistry { /** * Finds key entries in the specified Map < String , ? > which match the specified pattern .
* @ see # getUsers ( String , int )
* @ see # getGroups ( String , int )
* @ param pattern pattern to match
* @ param limit limit of entries to return
* @ return a SearchResult object */
private SearchResult searchMap ( Map < String , ? > map , String pattern , int limit ) { } } | if ( pattern == null ) { throw new IllegalArgumentException ( "pattern is null" ) ; } if ( pattern . isEmpty ( ) ) { throw new IllegalArgumentException ( "pattern is an empty String" ) ; } String regexPattern = convertToRegex ( pattern ) ; if ( limit < 0 ) { return new SearchResult ( ) ; } if ( map . size ( ) == 0 ) { return new SearchResult ( ) ; } int count = 0 ; // Set the stopping point 1 past our limit . If we reach
// this point , then we know there are more entries than
// limit which match pattern , so we can say hasMore is
// true . We have to keep trying to match 1 past the limit
// because if we stop at limit , we can ' t be sure the
// other entries will match pattern .
int stoppingPoint = ( limit == 0 ) ? 0 : limit + 1 ; boolean hasMore = false ; List < String > matched = new ArrayList < String > ( ) ; Set < String > userNames = map . keySet ( ) ; Iterator < String > itr = userNames . iterator ( ) ; while ( itr . hasNext ( ) ) { String name = itr . next ( ) ; if ( name . matches ( regexPattern ) ) { matched . add ( name ) ; count ++ ; if ( count == stoppingPoint ) { matched . remove ( name ) ; hasMore = true ; break ; } } } if ( count > 0 ) { return new SearchResult ( matched , hasMore ) ; } else { return new SearchResult ( ) ; } |
public class JSDocInfo { /** * Documents a parameter . Parameters are described using the { @ code @ param }
* annotation .
* @ param parameter the parameter ' s name
* @ param description the parameter ' s description */
boolean documentParam ( String parameter , String description ) { } } | if ( ! lazyInitDocumentation ( ) ) { return true ; } if ( documentation . parameters == null ) { documentation . parameters = new LinkedHashMap < > ( ) ; } if ( ! documentation . parameters . containsKey ( parameter ) ) { documentation . parameters . put ( parameter , description ) ; return true ; } else { return false ; } |
public class Model { /** * Provides a list of child models in one to many , many to many and polymorphic associations , but in addition also allows to filter this list
* by criteria .
* < p > < / p >
* < strong > 1 . < / strong > For one to many , the criteria is against the child table .
* < p > < / p >
* < strong > 2 . < / strong > For polymorphic association , the criteria is against the child table .
* < p > < / p >
* < strong > 3 . < / strong > For many to many , the criteria is against the join table .
* For example , if you have table PROJECTS , ASSIGNMENTS and PROGRAMMERS , where a project has many programmers and a programmer
* has many projects , and ASSIGNMENTS is a join table , you can write code like this , assuming that the ASSIGNMENTS table
* has a column < code > duration _ weeks < / code > :
* < pre >
* List < Project > threeWeekProjects = programmer . get ( Project . class , " duration _ weeks = ? " , 3 ) ;
* < / pre >
* where this list will contain all projects to which this programmer is assigned for 3 weeks .
* @ param targetModelClass related type
* @ param criteria sub - query for join table .
* @ param params parameters for a sub - query
* @ return list of relations in many to many */
public < C extends Model > LazyList < C > get ( Class < C > targetModelClass , String criteria , Object ... params ) { } } | OneToManyAssociation oneToManyAssociation = metaModelLocal . getAssociationForTarget ( targetModelClass , OneToManyAssociation . class ) ; MetaModel mm = metaModelLocal ; Many2ManyAssociation manyToManyAssociation = metaModelLocal . getAssociationForTarget ( targetModelClass , Many2ManyAssociation . class ) ; OneToManyPolymorphicAssociation oneToManyPolymorphicAssociation = metaModelLocal . getAssociationForTarget ( targetModelClass , OneToManyPolymorphicAssociation . class ) ; String additionalCriteria = criteria != null ? " AND ( " + criteria + " ) " : "" ; String subQuery ; String targetId = metaModelOf ( targetModelClass ) . getIdName ( ) ; MetaModel targetMM = metaModelOf ( targetModelClass ) ; String targetTable = targetMM . getTableName ( ) ; if ( oneToManyAssociation != null ) { subQuery = oneToManyAssociation . getFkName ( ) + " = ? " + additionalCriteria ; } else if ( manyToManyAssociation != null ) { String joinTable = manyToManyAssociation . getJoin ( ) ; String query = "SELECT " + targetTable + ".* FROM " + targetTable + ", " + joinTable + " WHERE " + targetTable + "." + targetId + " = " + joinTable + "." + manyToManyAssociation . getTargetFkName ( ) + " AND " + joinTable + "." + manyToManyAssociation . getSourceFkName ( ) + " = ? " + additionalCriteria ; Object [ ] allParams = new Object [ params . length + 1 ] ; allParams [ 0 ] = getId ( ) ; System . arraycopy ( params , 0 , allParams , 1 , params . length ) ; return new LazyList < > ( true , metaModelOf ( manyToManyAssociation . getTargetClass ( ) ) , query , allParams ) ; } else if ( oneToManyPolymorphicAssociation != null ) { subQuery = "parent_id = ? AND " + " parent_type = '" + oneToManyPolymorphicAssociation . getTypeLabel ( ) + "'" + additionalCriteria ; } else { throw new NotAssociatedException ( metaModelLocal . getModelClass ( ) , targetModelClass ) ; } Object [ ] allParams = new Object [ params . length + 1 ] ; allParams [ 0 ] = getId ( ) ; System . arraycopy ( params , 0 , allParams , 1 , params . length ) ; return new LazyList < > ( subQuery , targetMM , allParams ) ; |
public class UserFeedback { /** * Add a Warn UserFeedbackEvent and log . */
public void warn ( UserFeedbackEvent . Stage stage , String message ) { } } | Log . warn ( stage + ": " + message ) ; addEvent ( new UserFeedbackEvent ( autoML , UserFeedbackEvent . Level . Warn , stage , message ) ) ; |
public class DeleteFileExtensions { /** * Tries to delete all given files in the list . Caution : This can not be undone .
* @ param files
* The files to delete .
* @ throws IOException
* Signals that an I / O exception has occurred . */
public static void delete ( final Collection < File > files ) throws IOException { } } | for ( final File file : files ) { delete ( file ) ; } |
public class MessageSerializer { /** * Serializes the failure message sent to the
* { @ link org . apache . flink . queryablestate . network . Client } in case of
* server related errors .
* @ param allocThe { @ link ByteBufAllocator } used to allocate the buffer to serialize the message into .
* @ param causeThe exception thrown at the server .
* @ returnThe failure message . */
public static ByteBuf serializeServerFailure ( final ByteBufAllocator alloc , final Throwable cause ) throws IOException { } } | final ByteBuf buf = alloc . ioBuffer ( ) ; // Frame length is set at end
buf . writeInt ( 0 ) ; writeHeader ( buf , MessageType . SERVER_FAILURE ) ; try ( ByteBufOutputStream bbos = new ByteBufOutputStream ( buf ) ; ObjectOutput out = new ObjectOutputStream ( bbos ) ) { out . writeObject ( cause ) ; } // Set frame length
int frameLength = buf . readableBytes ( ) - Integer . BYTES ; buf . setInt ( 0 , frameLength ) ; return buf ; |
public class LagrangeFormula { /** * UsingLlangrange ' s formula it interpulates the value of a function at the specified sample
* point given discrete samples . Which samples are used and the order of the approximation are
* given by i0 and i1.
* @ param sample Where the estimate is done .
* @ param x Where the function was sampled .
* @ param y The function ' s value at the sample points
* @ param i0 The first point considered .
* @ param i1 The last point considered .
* @ return The estimated y value at the sample point . */
public static double process_F64 ( double sample , double x [ ] , double y [ ] , int i0 , int i1 ) { } } | double result = 0 ; for ( int i = i0 ; i <= i1 ; i ++ ) { double numerator = 1.0 ; for ( int j = i0 ; j <= i1 ; j ++ ) { if ( i != j ) numerator *= sample - x [ j ] ; } double denominator = 1.0 ; double a = x [ i ] ; for ( int j = i0 ; j <= i1 ; j ++ ) { if ( i != j ) denominator *= a - x [ j ] ; } result += ( numerator / denominator ) * y [ i ] ; } return result ; |
public class Journal { /** * Retrieve the persisted data for recovering the given segment from disk . */
private PersistedRecoveryPaxosData getPersistedPaxosData ( long segmentTxId ) throws IOException { } } | File f = journalStorage . getPaxosFile ( segmentTxId ) ; if ( ! f . exists ( ) ) { // Default instance has no fields filled in ( they ' re optional )
return null ; } InputStream in = new FileInputStream ( f ) ; try { PersistedRecoveryPaxosData ret = PersistedRecoveryPaxosData . parseDelimitedFrom ( in ) ; Preconditions . checkState ( ret != null && ret . getSegmentState ( ) . getStartTxId ( ) == segmentTxId , "Bad persisted data for segment %s: %s" , segmentTxId , ret ) ; return ret ; } finally { IOUtils . closeStream ( in ) ; } |
public class CommerceTaxFixedRateAddressRelUtil { /** * Returns the last commerce tax fixed rate address rel in the ordered set where commerceTaxMethodId = & # 63 ; .
* @ param commerceTaxMethodId the commerce tax method ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching commerce tax fixed rate address rel , or < code > null < / code > if a matching commerce tax fixed rate address rel could not be found */
public static CommerceTaxFixedRateAddressRel fetchByCommerceTaxMethodId_Last ( long commerceTaxMethodId , OrderByComparator < CommerceTaxFixedRateAddressRel > orderByComparator ) { } } | return getPersistence ( ) . fetchByCommerceTaxMethodId_Last ( commerceTaxMethodId , orderByComparator ) ; |
public class WriteRecordClass { /** * Write the resource file for this record class */
public void writeFieldResources ( String strClassName ) { } } | boolean bResourceListBundle = ResourceTypeField . LIST_RESOURCE_BUNDLE . equals ( this . getRecord ( ProgramControl . PROGRAM_CONTROL_FILE ) . getField ( ProgramControl . RESOURCE_TYPE ) . toString ( ) ) ; Record recClassInfo = this . getMainRecord ( ) ; String strPackage = this . getPackage ( bResourceListBundle ? CodeType . RESOURCE_CODE : CodeType . RESOURCE_PROPERTIES ) ; // Now , write the field resources ( descriptions )
FieldStuff fieldStuff = new FieldStuff ( ) ; String strBaseClass = "ListResourceBundle" ; String strClassSuffix = "Resources" ; boolean bResourceOnlyFile = false ; if ( RESOURCE_CLASS . equals ( recClassInfo . getField ( ClassInfo . BASE_CLASS_NAME ) . toString ( ) ) ) bResourceOnlyFile = true ; // Resource only class
if ( bResourceOnlyFile ) if ( strClassName . endsWith ( strClassSuffix ) ) strClassSuffix = "" ; if ( ! bResourceListBundle ) strClassSuffix += ".properties" ; this . writeHeading ( strClassName + strClassSuffix , strPackage , bResourceListBundle ? ClassProject . CodeType . RESOURCE_CODE : ClassProject . CodeType . RESOURCE_PROPERTIES ) ; if ( bResourceListBundle ) { m_StreamOut . writeit ( "package " + strPackage + ";\n\n" ) ; m_StreamOut . writeit ( "import java.util.*;\n\n" ) ; m_StreamOut . writeit ( "public class " + strClassName + strClassSuffix + " extends " + strBaseClass + "\n" ) ; m_StreamOut . writeit ( "{\n" ) ; m_StreamOut . setTabs ( + 1 ) ; m_StreamOut . writeit ( "public Object[][] getContents()\n" ) ; m_StreamOut . writeit ( "{\n" ) ; m_StreamOut . writeit ( "\treturn contents;\n" ) ; m_StreamOut . writeit ( "}\n" ) ; m_StreamOut . writeit ( "\n" ) ; m_StreamOut . writeit ( "// To Localize this, just change the strings in the second column\n" ) ; m_StreamOut . writeit ( "protected static final Object[][] contents =\n" ) ; m_StreamOut . writeit ( "{\n" ) ; m_StreamOut . setTabs ( - 1 ) ; } int count = 0 ; try { Record recFieldData = this . getRecord ( FieldData . FIELD_DATA_FILE ) ; if ( ! bResourceOnlyFile ) { recFieldData . close ( ) ; while ( recFieldData . hasNext ( ) ) { recFieldData . next ( ) ; this . getFieldData ( fieldStuff , false ) ; { if ( bResourceListBundle ) if ( count > 0 ) m_StreamOut . writeit ( "," ) ; if ( count > 0 ) m_StreamOut . writeit ( "\n" ) ; if ( fieldStuff . strFieldDesc . equals ( "null" ) ) fieldStuff . strFieldDesc = fieldStuff . strFieldName ; if ( bResourceListBundle ) m_StreamOut . writeit ( "\t\t{\"" + ResourcesUtilities . fixPropertyKey ( fieldStuff . strFieldName ) + "\", " + ResourcesUtilities . fixPropertyValue ( fieldStuff . strFieldDesc , bResourceListBundle ) + "}" ) ; else m_StreamOut . writeit ( ResourcesUtilities . fixPropertyKey ( fieldStuff . strFieldName ) + "=" + ResourcesUtilities . fixPropertyValue ( fieldStuff . strFieldDesc , bResourceListBundle ) ) ; count ++ ; } } // Now write out the tips
this . readRecordClass ( strClassName ) ; // Return the record to the original position
} String strTipSuffix = DBConstants . TIP ; if ( bResourceOnlyFile ) strTipSuffix = "" ; recFieldData . close ( ) ; while ( recFieldData . hasNext ( ) ) { recFieldData . next ( ) ; String strPre = DBConstants . BLANK ; if ( recFieldData . getField ( FieldData . FIELD_NAME ) . getString ( ) . equals ( recFieldData . getField ( FieldData . BASE_FIELD_NAME ) . getString ( ) ) ) if ( recFieldData . getField ( FieldData . FIELD_DESC_VERTICAL ) . getLength ( ) == 0 ) { if ( bResourceListBundle ) strPre = "//" ; else strPre = "# " ; } this . getFieldData ( fieldStuff , false ) ; if ( fieldStuff . strFieldTip != null ) { if ( bResourceListBundle ) if ( count > 0 ) m_StreamOut . writeit ( "," ) ; if ( count > 0 ) m_StreamOut . writeit ( "\n" ) ; if ( bResourceListBundle ) m_StreamOut . writeit ( strPre + "\t\t{\"" + ResourcesUtilities . fixPropertyKey ( fieldStuff . strFieldName + strTipSuffix ) + "\", " + ResourcesUtilities . fixPropertyValue ( fieldStuff . strFieldTip , bResourceListBundle ) + "}" ) ; else m_StreamOut . writeit ( strPre + ResourcesUtilities . fixPropertyKey ( fieldStuff . strFieldName + strTipSuffix ) + "=" + ResourcesUtilities . fixPropertyValue ( fieldStuff . strFieldTip , bResourceListBundle ) ) ; count ++ ; } } if ( bResourceListBundle ) { m_StreamOut . writeit ( "\n" ) ; m_StreamOut . setTabs ( + 1 ) ; m_StreamOut . writeit ( "// END OF MATERIAL TO LOCALIZE\n" ) ; m_StreamOut . writeit ( "};\n" ) ; } recFieldData . close ( ) ; this . writeEndCode ( bResourceListBundle ) ; } catch ( DBException ex ) { ex . printStackTrace ( ) ; } this . readRecordClass ( strClassName ) ; // Return the record to the original position |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.