signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DialogRootView { /** * Creates and returns a listener , which allows to observe when the scroll view , which is * contained by the dialog , is scrolled . * @ return The listener , which has been created , as an instance of the type { @ link * OnScrollChangedListener } . The listener may not be null */ @ NonNull private ScrollListener createScrollViewScrollListener ( ) { } }
return new ScrollListener ( ) { @ Override public void onScrolled ( final boolean scrolledToTop , final boolean scrolledToBottom ) { adaptDividerVisibilities ( scrolledToTop , scrolledToBottom , true ) ; } } ;
public class AntlrProgramBuilder { /** * 通过Antlr的ParseTree生成Beetl的ProgramMetaData * @ param tree * @ return */ public ProgramMetaData build ( ParseTree tree ) { } }
int size = tree . getChildCount ( ) - 1 ; List < Statement > ls = new ArrayList < Statement > ( size ) ; for ( int i = 0 ; i < size ; i ++ ) { Statement st = parseStatment ( ( ParserRuleContext ) tree . getChild ( i ) ) ; if ( st != null ) { ls . add ( st ) ; } } if ( pbCtx . current . gotoValue == IGoto . RETURN || pbCtx . current . gotoValue == IGoto . BREAK ) { // 如果顶级scope也有return 和break , 则检测 data . hasGoto = true ; } pbCtx . anzlyszeGlobal ( ) ; pbCtx . anzlyszeLocal ( ) ; data . varIndexSize = pbCtx . varIndexSize ; data . tempVarStartIndex = pbCtx . globalIndexMap . size ( ) ; data . statements = ls . toArray ( new Statement [ 0 ] ) ; data . globalIndexMap = pbCtx . globalIndexMap ; data . globalVarAttr = pbCtx . globaVarAttr ; data . setTemplateRootScopeIndexMap ( pbCtx . rootIndexMap ) ; return data ;
public class ReportingApi { /** * Delete a subscription * Delete the specified subscription by closing all its statistics . * @ param subscriptionId The unique ID of the subscription . ( required ) * @ return ApiResponse & lt ; ApiSuccessResponse & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < ApiSuccessResponse > unsubscribeWithHttpInfo ( String subscriptionId ) throws ApiException { } }
com . squareup . okhttp . Call call = unsubscribeValidateBeforeCall ( subscriptionId , null , null ) ; Type localVarReturnType = new TypeToken < ApiSuccessResponse > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class ASMifier { /** * Prints the ASM source code to generate the given class to the standard * output . * Usage : ASMifier [ - debug ] & lt ; binary class name or class file name & gt ; * @ param args * the command line arguments . * @ throws Exception * if the class cannot be found , or if an IO exception occurs . */ public static void main ( final String [ ] args ) throws Exception { } }
int i = 0 ; int flags = ClassReader . SKIP_DEBUG ; boolean ok = true ; if ( args . length < 1 || args . length > 2 ) { ok = false ; } if ( ok && "-debug" . equals ( args [ 0 ] ) ) { i = 1 ; flags = 0 ; if ( args . length != 2 ) { ok = false ; } } if ( ! ok ) { System . err . println ( "Prints the ASM code to generate the given class." ) ; System . err . println ( "Usage: ASMifier [-debug] " + "<fully qualified class name or class file name>" ) ; return ; } ClassReader cr ; if ( args [ i ] . endsWith ( ".class" ) || args [ i ] . indexOf ( '\\' ) > - 1 || args [ i ] . indexOf ( '/' ) > - 1 ) { cr = new ClassReader ( new FileInputStream ( args [ i ] ) ) ; } else { cr = new ClassReader ( args [ i ] ) ; } cr . accept ( new TraceClassVisitor ( null , new ASMifier ( ) , new PrintWriter ( System . out ) ) , flags ) ;
public class HsqlDbms { /** * Returns the { @ link HsqlDatabaseDescriptor } with the given name . */ @ Override public HsqlDatabaseDescriptor dbByName ( final String dbName ) { } }
Collection < HsqlDatabaseDescriptor > select = CollectionUtils . select ( dbs , new Predicate < HsqlDatabaseDescriptor > ( ) { @ Override public boolean evaluate ( HsqlDatabaseDescriptor db ) { return dbName . equals ( db . getDbName ( ) ) ; } } ) ; if ( CollectionUtils . size ( select ) == 0 ) { throw new IllegalArgumentException ( "There are no databases called '" + dbName + "'" ) ; } if ( CollectionUtils . size ( select ) >= 2 ) { throw new IllegalArgumentException ( "More than one database is called '" + dbName + "'" ) ; } return CollectionUtils . extractSingleton ( select ) ;
public class appfwjsoncontenttype { /** * Use this API to delete appfwjsoncontenttype of given name . */ public static base_response delete ( nitro_service client , String jsoncontenttypevalue ) throws Exception { } }
appfwjsoncontenttype deleteresource = new appfwjsoncontenttype ( ) ; deleteresource . jsoncontenttypevalue = jsoncontenttypevalue ; return deleteresource . delete_resource ( client ) ;
public class AnnotationDetector { /** * Scan as list . * @ param clas the clas * @ param basePackage the base package * @ return the list */ public static List < String > scanAsList ( final Class < ? extends Annotation > clas , final String ... basePackage ) { } }
final List < String > classes = new ArrayList < > ( ) ; scan ( clas , basePackage , new AnnotationHandler ( ) { @ Override public void handleAnnotationFound ( String className ) { classes . add ( className ) ; } } ) ; return classes ;
public class EntityScannerBuilder { /** * Add an Equality Filter to the Scanner , Will Filter Results Not Equal to the * Filter Value * @ param fieldName * The name of the column you want to apply the filter on * @ param filterValue * The value for comparison * @ return EntityScannerBuilder */ public EntityScannerBuilder < E > addEqualFilter ( String fieldName , Object filterValue ) { } }
SingleFieldEntityFilter singleFieldEntityFilter = new SingleFieldEntityFilter ( entityMapper . getEntitySchema ( ) , entityMapper . getEntitySerDe ( ) , fieldName , filterValue , CompareFilter . CompareOp . EQUAL ) ; filterList . add ( singleFieldEntityFilter . getFilter ( ) ) ; return this ;
public class IOUtils { /** * read file * @ param file file * @ param charset The name of a supported { @ link Charset < / code > charset < code > } * @ return if file not exist , return null , else return content of file * @ throws IOException if an error occurs while operator BufferedReader */ public static String readString ( File file , Charset charset ) throws IOException { } }
if ( file == null || ! file . isFile ( ) ) { return null ; } StringBuilder fileContent = new StringBuilder ( ) ; BufferedReader reader = null ; try { InputStreamReader is = new InputStreamReader ( new FileInputStream ( file ) , charset ) ; reader = new BufferedReader ( is ) ; String line = null ; while ( ( line = reader . readLine ( ) ) != null ) { if ( ! fileContent . toString ( ) . equals ( "" ) ) { fileContent . append ( "\r\n" ) ; } fileContent . append ( line ) ; } reader . close ( ) ; return fileContent . toString ( ) ; } finally { closeQuietly ( reader ) ; }
public class ConfigFactory { /** * Like { @ link # load ( ) } but allows specifying parse options and resolve * options . * @ param parseOptions * Options for parsing resources * @ param resolveOptions * options for resolving the assembled config stack * @ return configuration for an application * @ since 1.3.0 */ public static Config load ( ConfigParseOptions parseOptions , final ConfigResolveOptions resolveOptions ) { } }
final ConfigParseOptions withLoader = ensureClassLoader ( parseOptions , "load" ) ; return load ( defaultApplication ( withLoader ) , resolveOptions ) ;
public class Pool { /** * Gets the next free object from the pool . If the pool doesn ' t contain any objects , * a new object will be created and given to the caller of this method back . * @ return T borrowed object */ public T borrowObject ( ) { } }
T object ; if ( ( object = pool . poll ( ) ) == null ) { object = createObject ( ) ; } return object ;
public class ParetoStochasticLaw { /** * Replies a random value that respect * the current stochastic law . * @ param k represents the shape of the distribution * @ param xmin is the minimum value of the distribution * @ return a value depending of the stochastic law parameters * @ throws MathException when error in math definition . */ @ Pure public static double random ( double k , double xmin ) throws MathException { } }
return StochasticGenerator . generateRandomValue ( new ParetoStochasticLaw ( k , xmin ) ) ;
public class TagProjectRequest { /** * The tags you want to add to the project . * @ param tags * The tags you want to add to the project . * @ return Returns a reference to this object so that method calls can be chained together . */ public TagProjectRequest withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;
public class BDTImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . BDT__DOC_NAME : return DOC_NAME_EDEFAULT == null ? docName != null : ! DOC_NAME_EDEFAULT . equals ( docName ) ; case AfplibPackage . BDT__RESERVED : return RESERVED_EDEFAULT == null ? reserved != null : ! RESERVED_EDEFAULT . equals ( reserved ) ; case AfplibPackage . BDT__TRIPLETS : return triplets != null && ! triplets . isEmpty ( ) ; } return super . eIsSet ( featureID ) ;
public class P4 { /** * Executes the proc . * @ param a * @ param b * @ param c * @ param d * @ return result * @ throws Throwable */ protected Object run ( A a , B b , C c , D d ) throws Throwable { } }
throw notImplemented ( "run(A, B, C, D)" ) ;
public class MetaGraphDef { /** * < pre > * collection _ def : Map from collection name to collections . * See CollectionDef section for details . * < / pre > * < code > map & lt ; string , . tensorflow . CollectionDef & gt ; collection _ def = 4 ; < / code > */ public boolean containsCollectionDef ( java . lang . String key ) { } }
if ( key == null ) { throw new java . lang . NullPointerException ( ) ; } return internalGetCollectionDef ( ) . getMap ( ) . containsKey ( key ) ;
public class LoggingPanel { /** * Reads the configuration parameters described in the panel from the * ConfigSettings and and sets the contained values . * @ param config * Reference to the ConfigSettings object */ @ Override public void applyConfig ( final ConfigSettings config ) { } }
Object o = config . getConfigParameter ( ConfigurationKeys . LOGGING_PATH_DIFFTOOL ) ; if ( o != null ) { this . diffToolField . setText ( ( String ) o ) ; } else { this . diffToolField . setText ( "" ) ; } o = config . getConfigParameter ( ConfigurationKeys . LOGGING_LOGLEVEL_DIFFTOOL ) ; if ( o != null ) { this . diffToolLogLevelComboBox . setSelectedItem ( o ) ; }
public class BipartiteGraph { /** * Runs a breadth - first - search starting at the root node . */ public IntArrayList bfs ( int root , boolean isRootT1 ) { } }
boolean [ ] marked1 = new boolean [ nodes1 . size ( ) ] ; boolean [ ] marked2 = new boolean [ nodes2 . size ( ) ] ; return bfs ( root , isRootT1 , marked1 , marked2 ) ;
public class BeanMap { /** * This method reinitializes the bean map to have default values for the * bean ' s properties . This is accomplished by constructing a new instance * of the bean which the map uses as its underlying data source . This * behavior for { @ link Map # clear ( ) clear ( ) } differs from the Map contract in that * the mappings are not actually removed from the map ( the mappings for a * BeanMap are fixed ) . */ @ Override public void clear ( ) { } }
if ( bean == null ) { return ; } Class < ? > beanClass = null ; try { beanClass = bean . getClass ( ) ; bean = beanClass . newInstance ( ) ; } catch ( Exception e ) { throw new UnsupportedOperationException ( "Could not create new instance of class: " + beanClass ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcChiller ( ) { } }
if ( ifcChillerEClass == null ) { ifcChillerEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 87 ) ; } return ifcChillerEClass ;
public class NodeHealthCheckerService { /** * Method used to terminate the node health monitoring service . */ void stop ( ) { } }
if ( ! shouldRun ( conf ) ) { return ; } nodeHealthScriptScheduler . cancel ( ) ; if ( shexec != null ) { Process p = shexec . getProcess ( ) ; if ( p != null ) { p . destroy ( ) ; } }
public class BaseMessageRecordDesc { /** * This utility sets this param to the field ' s raw data . * @ param record The record to get the data from . */ public int putRawFieldData ( Convert field ) { } }
String strKey = this . getFullKey ( field . getFieldName ( ) ) ; Class < ? > classData = String . class ; if ( field . getField ( ) != null ) classData = this . getMessage ( ) . getNativeClassType ( field . getField ( ) . getDataClass ( ) ) ; Object objValue = field . getData ( ) ; try { objValue = DataConverters . convertObjectToDatatype ( objValue , classData , null ) ; // I do this just to be careful . } catch ( Exception ex ) { objValue = null ; } this . getMessage ( ) . putNative ( strKey , objValue ) ; return Constant . NORMAL_RETURN ;
public class NOPImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . NOP__UNDF_DATA : return UNDF_DATA_EDEFAULT == null ? undfData != null : ! UNDF_DATA_EDEFAULT . equals ( undfData ) ; } return super . eIsSet ( featureID ) ;
public class BccClient { /** * Creating a newly SecurityGroup with specified rules . * @ param request The request containing all options for creating a new SecurityGroup . * @ return The response with the id of SecurityGroup that was created newly . */ public CreateSecurityGroupResponse createSecurityGroup ( CreateSecurityGroupRequest request ) { } }
checkNotNull ( request , "request should not be null." ) ; if ( Strings . isNullOrEmpty ( request . getClientToken ( ) ) ) { request . setClientToken ( this . generateClientToken ( ) ) ; } checkStringNotEmpty ( request . getName ( ) , "request name should not be empty." ) ; if ( null == request . getRules ( ) || request . getRules ( ) . isEmpty ( ) ) { throw new IllegalArgumentException ( "request rules should not be empty" ) ; } InternalRequest internalRequest = this . createRequest ( request , HttpMethodName . POST , SECURITYGROUP_PREFIX ) ; internalRequest . addParameter ( "clientToken" , request . getClientToken ( ) ) ; fillPayload ( internalRequest , request ) ; return invokeHttpClient ( internalRequest , CreateSecurityGroupResponse . class ) ;
public class AmazonSNSClient { /** * Returns a list of the requester ' s topics . Each call returns a limited list of topics , up to 100 . If there are * more topics , a < code > NextToken < / code > is also returned . Use the < code > NextToken < / code > parameter in a new * < code > ListTopics < / code > call to get further results . * This action is throttled at 30 transactions per second ( TPS ) . * @ param listTopicsRequest * @ return Result of the ListTopics operation returned by the service . * @ throws InvalidParameterException * Indicates that a request parameter does not comply with the associated constraints . * @ throws InternalErrorException * Indicates an internal service error . * @ throws AuthorizationErrorException * Indicates that the user has been denied access to the requested resource . * @ sample AmazonSNS . ListTopics * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / sns - 2010-03-31 / ListTopics " target = " _ top " > AWS API * Documentation < / a > */ @ Override public ListTopicsResult listTopics ( ListTopicsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeListTopics ( request ) ;
public class EnvironmentConverter { /** * Converts the given { @ code environment } to the given { @ link StandardEnvironment } * type . If the environment is already of the same type , no conversion is performed * and it is returned unchanged . * @ param environment the Environment to convert * @ param type the type to convert the Environment to * @ return the converted Environment */ StandardEnvironment convertEnvironmentIfNecessary ( ConfigurableEnvironment environment , Class < ? extends StandardEnvironment > type ) { } }
if ( type . equals ( environment . getClass ( ) ) ) { return ( StandardEnvironment ) environment ; } return convertEnvironment ( environment , type ) ;
public class XsdAsmUtils { /** * Changes the first char of the received { @ link String } to uppercase . * @ param name The received { @ link String } . * @ return The received { @ link String } with the first char in uppercase . */ static String firstToUpper ( String name ) { } }
if ( name . length ( ) == 1 ) { return name . toUpperCase ( ) ; } String firstLetter = name . substring ( 0 , 1 ) . toUpperCase ( ) ; return firstLetter + name . substring ( 1 ) ;
public class AccountsImpl { /** * Lists all node agent SKUs supported by the Azure Batch service . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ param accountListNodeAgentSkusNextOptions Additional parameters for the operation * @ param serviceFuture the ServiceFuture object tracking the Retrofit calls * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < NodeAgentSku > > listNodeAgentSkusNextAsync ( final String nextPageLink , final AccountListNodeAgentSkusNextOptions accountListNodeAgentSkusNextOptions , final ServiceFuture < List < NodeAgentSku > > serviceFuture , final ListOperationCallback < NodeAgentSku > serviceCallback ) { } }
return AzureServiceFuture . fromHeaderPageResponse ( listNodeAgentSkusNextSinglePageAsync ( nextPageLink , accountListNodeAgentSkusNextOptions ) , new Func1 < String , Observable < ServiceResponseWithHeaders < Page < NodeAgentSku > , AccountListNodeAgentSkusHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Page < NodeAgentSku > , AccountListNodeAgentSkusHeaders > > call ( String nextPageLink ) { return listNodeAgentSkusNextSinglePageAsync ( nextPageLink , accountListNodeAgentSkusNextOptions ) ; } } , serviceCallback ) ;
public class ThreadSafeJOptionPane { /** * Brings up an information - message dialog titled " Message " . * @ param parentComponent * determines the < code > Frame < / code > in which the dialog is * displayed ; if < code > null < / code > , or if the * < code > parentComponent < / code > has no < code > Frame < / code > , a * default < code > Frame < / code > is used * @ param message * the < code > Object < / code > to display */ public static void showMessageDialog ( final Component parentComponent , final Object message ) { } }
execute ( new VoidOptionPane ( ) { public void show ( ) { JOptionPane . showMessageDialog ( parentComponent , message ) ; } } ) ;
public class EmojiParser { /** * Detects all unicode emojis in input string and replaces them with the * return value of transformer . transform ( ) * @ param input the string to process * @ param transformer emoji transformer to apply to each emoji * @ return input string with all emojis transformed */ public static String parseFromUnicode ( String input , EmojiTransformer transformer ) { } }
int prev = 0 ; StringBuilder sb = new StringBuilder ( ) ; List < UnicodeCandidate > replacements = getUnicodeCandidates ( input ) ; for ( UnicodeCandidate candidate : replacements ) { sb . append ( input . substring ( prev , candidate . getEmojiStartIndex ( ) ) ) ; sb . append ( transformer . transform ( candidate ) ) ; prev = candidate . getFitzpatrickEndIndex ( ) ; } return sb . append ( input . substring ( prev ) ) . toString ( ) ;
public class MessageBody { /** * Creates a message body from a list of Data sections . Each Data section is a byte array . * Please note that this version of the SDK supports only one Data section in a message . It means only a list of exactly one byte array in it is accepted as message body . * @ param binaryData a list of byte arrays . * @ return MessageBody instance wrapping around the binary data . */ public static MessageBody fromBinaryData ( List < byte [ ] > binaryData ) { } }
if ( binaryData == null || binaryData . size ( ) == 0 || binaryData . size ( ) > 1 ) { throw new IllegalArgumentException ( "Binary data is null or has more than one byte array in it." ) ; } MessageBody body = new MessageBody ( ) ; body . bodyType = MessageBodyType . BINARY ; body . valueData = null ; body . sequenceData = null ; body . binaryData = binaryData ; return body ;
public class ActionConverter { /** * Just create the JSONObject and set the consume and the end attribute . * @ param a the action to convert * @ return a skeleton JSONObject */ private JSONObject makeActionSkeleton ( Action a ) { } }
JSONObject o = new JSONObject ( ) ; o . put ( START_LABEL , a . getStart ( ) ) ; o . put ( END_LABEL , a . getEnd ( ) ) ; JSONObject hooks = new JSONObject ( ) ; for ( Action . Hook k : Action . Hook . values ( ) ) { JSONArray arr = new JSONArray ( ) ; for ( Event e : a . getEvents ( k ) ) { arr . add ( toJSON ( e ) ) ; } hooks . put ( k . toString ( ) , arr ) ; } o . put ( HOOK_LABEL , hooks ) ; return o ;
public class JettyService { /** * Creates a new { @ link JettyService } from an existing Jetty { @ link Server } . * @ param jettyServer the Jetty { @ link Server } */ public static JettyService forServer ( Server jettyServer ) { } }
requireNonNull ( jettyServer , "jettyServer" ) ; return new JettyService ( null , blockingTaskExecutor -> jettyServer ) ;
public class DateIntervalFormat { /** * Construct a DateIntervalFormat from skeleton and a given locale . * In this factory method , * the date interval pattern information is load from resource files . * Users are encouraged to created date interval formatter this way and * to use the pre - defined skeleton macros . * There are pre - defined skeletons in DateFormat , * such as MONTH _ DAY , YEAR _ MONTH _ WEEKDAY _ DAY etc . * Those skeletons have pre - defined interval patterns in resource files . * Users are encouraged to use them . * For example : * DateIntervalFormat . getInstance ( DateFormat . MONTH _ DAY , false , loc ) ; * The given Locale provides the interval patterns . * For example , for en _ GB , if skeleton is YEAR _ ABBR _ MONTH _ WEEKDAY _ DAY , * which is " yMMMEEEd " , * the interval patterns defined in resource file to above skeleton are : * " EEE , d MMM , yyyy - EEE , d MMM , yyyy " for year differs , * " EEE , d MMM - EEE , d MMM , yyyy " for month differs , * " EEE , d - EEE , d MMM , yyyy " for day differs , * @ param skeleton the skeleton on which interval format based . * @ param locale the given locale * @ return a date time interval formatter . */ public static final DateIntervalFormat getInstance ( String skeleton , ULocale locale ) { } }
DateTimePatternGenerator generator = DateTimePatternGenerator . getInstance ( locale ) ; return new DateIntervalFormat ( skeleton , locale , new SimpleDateFormat ( generator . getBestPattern ( skeleton ) , locale ) ) ;
public class BeanValidator { /** * This method creates ValidatorFactory instances or retrieves them from the container . * Once created , ValidatorFactory instances are stored in the container under the key * VALIDATOR _ FACTORY _ KEY for performance . * @ param context The FacesContext . * @ return The ValidatorFactory instance . * @ throws FacesException if no ValidatorFactory can be obtained because : a ) the * container is not a Servlet container or b ) because Bean Validation is not available . */ private ValidatorFactory createValidatorFactory ( FacesContext context ) { } }
Map < String , Object > applicationMap = context . getExternalContext ( ) . getApplicationMap ( ) ; Object attr = applicationMap . get ( VALIDATOR_FACTORY_KEY ) ; if ( attr instanceof ValidatorFactory ) { return ( ValidatorFactory ) attr ; } else { synchronized ( this ) { if ( _ExternalSpecifications . isBeanValidationAvailable ( ) ) { ValidatorFactory factory = Validation . buildDefaultValidatorFactory ( ) ; applicationMap . put ( VALIDATOR_FACTORY_KEY , factory ) ; return factory ; } else { throw new FacesException ( "Bean Validation is not present" ) ; } } }
public class LoggerWrapper { /** * Delegate to the appropriate method of the underlying logger . */ public void error ( Marker marker , String format , Object arg1 , Object arg2 ) { } }
if ( ! logger . isErrorEnabled ( marker ) ) return ; if ( instanceofLAL ) { String formattedMessage = MessageFormatter . format ( format , arg1 , arg2 ) . getMessage ( ) ; ( ( LocationAwareLogger ) logger ) . log ( marker , fqcn , LocationAwareLogger . ERROR_INT , formattedMessage , new Object [ ] { arg1 , arg2 } , null ) ; } else { logger . error ( marker , format , arg1 , arg2 ) ; }
public class GLU { /** * This code comes directly from GLU except that it is for float */ static int glhInvertMatrixf2 ( float [ ] m , float [ ] out ) { } }
float [ ] [ ] wtmp = new float [ 4 ] [ 8 ] ; float m0 , m1 , m2 , m3 , s ; float [ ] r0 , r1 , r2 , r3 ; r0 = wtmp [ 0 ] ; r1 = wtmp [ 1 ] ; r2 = wtmp [ 2 ] ; r3 = wtmp [ 3 ] ; r0 [ 0 ] = MAT ( m , 0 , 0 ) ; r0 [ 1 ] = MAT ( m , 0 , 1 ) ; r0 [ 2 ] = MAT ( m , 0 , 2 ) ; r0 [ 3 ] = MAT ( m , 0 , 3 ) ; r0 [ 4 ] = 1.0f ; r0 [ 5 ] = r0 [ 6 ] = r0 [ 7 ] = 0.0f ; r1 [ 0 ] = MAT ( m , 1 , 0 ) ; r1 [ 1 ] = MAT ( m , 1 , 1 ) ; r1 [ 2 ] = MAT ( m , 1 , 2 ) ; r1 [ 3 ] = MAT ( m , 1 , 3 ) ; r1 [ 5 ] = 1.0f ; r1 [ 4 ] = r1 [ 6 ] = r1 [ 7 ] = 0.0f ; r2 [ 0 ] = MAT ( m , 2 , 0 ) ; r2 [ 1 ] = MAT ( m , 2 , 1 ) ; r2 [ 2 ] = MAT ( m , 2 , 2 ) ; r2 [ 3 ] = MAT ( m , 2 , 3 ) ; r2 [ 6 ] = 1.0f ; r2 [ 4 ] = r2 [ 5 ] = r2 [ 7 ] = 0.0f ; r3 [ 0 ] = MAT ( m , 3 , 0 ) ; r3 [ 1 ] = MAT ( m , 3 , 1 ) ; r3 [ 2 ] = MAT ( m , 3 , 2 ) ; r3 [ 3 ] = MAT ( m , 3 , 3 ) ; r3 [ 7 ] = 1.0f ; r3 [ 4 ] = r3 [ 5 ] = r3 [ 6 ] = 0.0f ; /* choose pivot - or die */ if ( Math . abs ( r3 [ 0 ] ) > Math . abs ( r2 [ 0 ] ) ) { float [ ] r = r2 ; r2 = r3 ; r3 = r ; } if ( Math . abs ( r2 [ 0 ] ) > Math . abs ( r1 [ 0 ] ) ) { float [ ] r = r2 ; r2 = r1 ; r1 = r ; } if ( Math . abs ( r1 [ 0 ] ) > Math . abs ( r0 [ 0 ] ) ) { float [ ] r = r1 ; r1 = r0 ; r0 = r ; } if ( 0.0 == r0 [ 0 ] ) return 0 ; /* eliminate first variable */ m1 = r1 [ 0 ] / r0 [ 0 ] ; m2 = r2 [ 0 ] / r0 [ 0 ] ; m3 = r3 [ 0 ] / r0 [ 0 ] ; s = r0 [ 1 ] ; r1 [ 1 ] -= m1 * s ; r2 [ 1 ] -= m2 * s ; r3 [ 1 ] -= m3 * s ; s = r0 [ 2 ] ; r1 [ 2 ] -= m1 * s ; r2 [ 2 ] -= m2 * s ; r3 [ 2 ] -= m3 * s ; s = r0 [ 3 ] ; r1 [ 3 ] -= m1 * s ; r2 [ 3 ] -= m2 * s ; r3 [ 3 ] -= m3 * s ; s = r0 [ 4 ] ; if ( s != 0.0 ) { r1 [ 4 ] -= m1 * s ; r2 [ 4 ] -= m2 * s ; r3 [ 4 ] -= m3 * s ; } s = r0 [ 5 ] ; if ( s != 0.0 ) { r1 [ 5 ] -= m1 * s ; r2 [ 5 ] -= m2 * s ; r3 [ 5 ] -= m3 * s ; } s = r0 [ 6 ] ; if ( s != 0.0 ) { r1 [ 6 ] -= m1 * s ; r2 [ 6 ] -= m2 * s ; r3 [ 6 ] -= m3 * s ; } s = r0 [ 7 ] ; if ( s != 0.0 ) { r1 [ 7 ] -= m1 * s ; r2 [ 7 ] -= m2 * s ; r3 [ 7 ] -= m3 * s ; } /* choose pivot - or die */ if ( Math . abs ( r3 [ 1 ] ) > Math . abs ( r2 [ 1 ] ) ) { float [ ] r = r2 ; r2 = r3 ; r3 = r ; } if ( Math . abs ( r2 [ 1 ] ) > Math . abs ( r1 [ 1 ] ) ) { float [ ] r = r2 ; r2 = r1 ; r1 = r ; } if ( 0.0 == r1 [ 1 ] ) return 0 ; /* eliminate second variable */ m2 = r2 [ 1 ] / r1 [ 1 ] ; m3 = r3 [ 1 ] / r1 [ 1 ] ; r2 [ 2 ] -= m2 * r1 [ 2 ] ; r3 [ 2 ] -= m3 * r1 [ 2 ] ; r2 [ 3 ] -= m2 * r1 [ 3 ] ; r3 [ 3 ] -= m3 * r1 [ 3 ] ; s = r1 [ 4 ] ; if ( 0.0 != s ) { r2 [ 4 ] -= m2 * s ; r3 [ 4 ] -= m3 * s ; } s = r1 [ 5 ] ; if ( 0.0 != s ) { r2 [ 5 ] -= m2 * s ; r3 [ 5 ] -= m3 * s ; } s = r1 [ 6 ] ; if ( 0.0 != s ) { r2 [ 6 ] -= m2 * s ; r3 [ 6 ] -= m3 * s ; } s = r1 [ 7 ] ; if ( 0.0 != s ) { r2 [ 7 ] -= m2 * s ; r3 [ 7 ] -= m3 * s ; } /* choose pivot - or die */ if ( Math . abs ( r3 [ 2 ] ) > Math . abs ( r2 [ 2 ] ) ) { float [ ] r = r2 ; r2 = r3 ; r3 = r ; } if ( 0.0 == r2 [ 2 ] ) return 0 ; /* eliminate third variable */ m3 = r3 [ 2 ] / r2 [ 2 ] ; r3 [ 3 ] -= m3 * r2 [ 3 ] ; r3 [ 4 ] -= m3 * r2 [ 4 ] ; r3 [ 5 ] -= m3 * r2 [ 5 ] ; r3 [ 6 ] -= m3 * r2 [ 6 ] ; r3 [ 7 ] -= m3 * r2 [ 7 ] ; /* last check */ if ( 0.0 == r3 [ 3 ] ) return 0 ; s = 1.0f / r3 [ 3 ] ; /* now back substitute row 3 */ r3 [ 4 ] *= s ; r3 [ 5 ] *= s ; r3 [ 6 ] *= s ; r3 [ 7 ] *= s ; m2 = r2 [ 3 ] ; /* now back substitute row 2 */ s = 1.0f / r2 [ 2 ] ; r2 [ 4 ] = s * ( r2 [ 4 ] - r3 [ 4 ] * m2 ) ; r2 [ 5 ] = s * ( r2 [ 5 ] - r3 [ 5 ] * m2 ) ; r2 [ 6 ] = s * ( r2 [ 6 ] - r3 [ 6 ] * m2 ) ; r2 [ 7 ] = s * ( r2 [ 7 ] - r3 [ 7 ] * m2 ) ; m1 = r1 [ 3 ] ; r1 [ 4 ] -= r3 [ 4 ] * m1 ; r1 [ 5 ] -= r3 [ 5 ] * m1 ; r1 [ 6 ] -= r3 [ 6 ] * m1 ; r1 [ 7 ] -= r3 [ 7 ] * m1 ; m0 = r0 [ 3 ] ; r0 [ 4 ] -= r3 [ 4 ] * m0 ; r0 [ 5 ] -= r3 [ 5 ] * m0 ; r0 [ 6 ] -= r3 [ 6 ] * m0 ; r0 [ 7 ] -= r3 [ 7 ] * m0 ; m1 = r1 [ 2 ] ; /* now back substitute row 1 */ s = 1.0f / r1 [ 1 ] ; r1 [ 4 ] = s * ( r1 [ 4 ] - r2 [ 4 ] * m1 ) ; r1 [ 5 ] = s * ( r1 [ 5 ] - r2 [ 5 ] * m1 ) ; r1 [ 6 ] = s * ( r1 [ 6 ] - r2 [ 6 ] * m1 ) ; r1 [ 7 ] = s * ( r1 [ 7 ] - r2 [ 7 ] * m1 ) ; m0 = r0 [ 2 ] ; r0 [ 4 ] -= r2 [ 4 ] * m0 ; r0 [ 5 ] -= r2 [ 5 ] * m0 ; r0 [ 6 ] -= r2 [ 6 ] * m0 ; r0 [ 7 ] -= r2 [ 7 ] * m0 ; m0 = r0 [ 1 ] ; /* now back substitute row 0 */ s = 1.0f / r0 [ 0 ] ; r0 [ 4 ] = s * ( r0 [ 4 ] - r1 [ 4 ] * m0 ) ; r0 [ 5 ] = s * ( r0 [ 5 ] - r1 [ 5 ] * m0 ) ; r0 [ 6 ] = s * ( r0 [ 6 ] - r1 [ 6 ] * m0 ) ; r0 [ 7 ] = s * ( r0 [ 7 ] - r1 [ 7 ] * m0 ) ; MAT ( out , 0 , 0 , r0 [ 4 ] ) ; MAT ( out , 0 , 1 , r0 [ 5 ] ) ; MAT ( out , 0 , 2 , r0 [ 6 ] ) ; MAT ( out , 0 , 3 , r0 [ 7 ] ) ; MAT ( out , 1 , 0 , r1 [ 4 ] ) ; MAT ( out , 1 , 1 , r1 [ 5 ] ) ; MAT ( out , 1 , 2 , r1 [ 6 ] ) ; MAT ( out , 1 , 3 , r1 [ 7 ] ) ; MAT ( out , 2 , 0 , r2 [ 4 ] ) ; MAT ( out , 2 , 1 , r2 [ 5 ] ) ; MAT ( out , 2 , 2 , r2 [ 6 ] ) ; MAT ( out , 2 , 3 , r2 [ 7 ] ) ; MAT ( out , 3 , 0 , r3 [ 4 ] ) ; MAT ( out , 3 , 1 , r3 [ 5 ] ) ; MAT ( out , 3 , 2 , r3 [ 6 ] ) ; MAT ( out , 3 , 3 , r3 [ 7 ] ) ; return 1 ;
public class BeanContextServicesSupport { /** * Release all services requested by the given child . * @ param bcssChild * a child * @ param delegatedServices * only release services that are delegated to parent context */ private void releaseServicesForChild ( BCSSChild bcssChild , boolean delegatedServices ) { } }
if ( bcssChild . serviceRecords == null || bcssChild . serviceRecords . isEmpty ( ) ) { return ; } synchronized ( bcssChild . child ) { Object records [ ] = bcssChild . serviceRecords . toArray ( ) ; for ( int i = 0 ; i < records . length ; i ++ ) { ServiceRecord rec = ( ServiceRecord ) records [ i ] ; if ( delegatedServices ) { if ( rec . isDelegate ) { releaseServiceWithoutCheck ( rec . child , bcssChild , rec . requestor , rec . service , true ) ; } } else { releaseServiceWithoutCheck ( rec . child , bcssChild , rec . requestor , rec . service , false ) ; } } }
public class GridNode { /** * Get a neighbor node at a certain direction . * @ param direction the direction to get the node at . * @ return the node . */ public GridNode getNodeAt ( Direction direction ) { } }
int newCol = col + direction . col ; int newRow = row + direction . row ; GridNode node = new GridNode ( gridIter , cols , rows , xRes , yRes , newCol , newRow ) ; return node ;
public class DefaultObjectMapperFactory { /** * configures given object mapper instance . * @ param mapper the object to configure * @ return mapper instance for chaining */ protected ObjectMapper configure ( ObjectMapper mapper ) { } }
mapper . configure ( JsonParser . Feature . ALLOW_SINGLE_QUOTES , true ) ; mapper . configure ( JsonParser . Feature . ALLOW_UNQUOTED_FIELD_NAMES , true ) ; return mapper ;
public class PdfContentByte { /** * Gets a < CODE > Graphics2D < / CODE > to write on . The graphics * are translated to PDF commands . * @ param width the width of the panel * @ param height the height of the panel * @ param fontMapper the mapping from awt fonts to < CODE > BaseFont < / CODE > * @ return a < CODE > Graphics2D < / CODE > */ public java . awt . Graphics2D createGraphics ( float width , float height , FontMapper fontMapper ) { } }
return new PdfGraphics2D ( this , width , height , fontMapper , false , false , 0 ) ;
public class CommerceDiscountUserSegmentRelPersistenceImpl { /** * Returns a range of all the commerce discount user segment rels where commerceDiscountId = & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceDiscountUserSegmentRelModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param commerceDiscountId the commerce discount ID * @ param start the lower bound of the range of commerce discount user segment rels * @ param end the upper bound of the range of commerce discount user segment rels ( not inclusive ) * @ return the range of matching commerce discount user segment rels */ @ Override public List < CommerceDiscountUserSegmentRel > findByCommerceDiscountId ( long commerceDiscountId , int start , int end ) { } }
return findByCommerceDiscountId ( commerceDiscountId , start , end , null ) ;
public class SoapAddressRewriteHelper { /** * Rewrite the provided address according to the current server * configuration and always using the specified uriScheme . * @ param sarm The deployment SOAPAddressRewriteMetadata * @ param origAddress The source address * @ param newAddress The new ( candidate ) address * @ param uriScheme The uriScheme to use for rewrite * @ return The obtained address */ private static String rewriteSoapAddress ( SOAPAddressRewriteMetadata sarm , String origAddress , String newAddress , String uriScheme ) { } }
try { URL url = new URL ( newAddress ) ; String path = url . getPath ( ) ; String host = sarm . getWebServiceHost ( ) ; String port = getDotPortNumber ( uriScheme , sarm ) ; StringBuilder sb = new StringBuilder ( uriScheme ) ; sb . append ( "://" ) ; sb . append ( host ) ; sb . append ( port ) ; if ( isPathRewriteRequired ( sarm ) ) { sb . append ( SEDProcessor . newInstance ( sarm . getWebServicePathRewriteRule ( ) ) . processLine ( path ) ) ; } else { sb . append ( path ) ; } final String urlStr = sb . toString ( ) ; ADDRESS_REWRITE_LOGGER . addressRewritten ( origAddress , urlStr ) ; return urlStr ; } catch ( MalformedURLException e ) { ADDRESS_REWRITE_LOGGER . invalidAddressProvidedUseItWithoutRewriting ( newAddress , origAddress ) ; return origAddress ; }
public class JobExecutionsInner { /** * Lists all executions in a job agent . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; JobExecutionInner & gt ; object */ public Observable < Page < JobExecutionInner > > listByAgentNextAsync ( final String nextPageLink ) { } }
return listByAgentNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < JobExecutionInner > > , Page < JobExecutionInner > > ( ) { @ Override public Page < JobExecutionInner > call ( ServiceResponse < Page < JobExecutionInner > > response ) { return response . body ( ) ; } } ) ;
public class OkRequest { /** * Write the map to url params * @ param values map * @ return this request */ public OkRequest < T > params ( final Map < String , String > values ) { } }
if ( ! values . isEmpty ( ) ) { for ( Map . Entry < String , String > entry : values . entrySet ( ) ) { param ( entry ) ; } } return this ;
public class Configuration { /** * Get the value of the < code > name < / code > property . If no such property * exists , then < code > defaultValue < / code > is returned . * @ param name property name . * @ param defaultValue default value . * @ return property value , or < code > defaultValue < / code > if the property * doesn ' t exist . */ public String get ( String name , String defaultValue ) { } }
return substituteVars ( getProps ( ) . getProperty ( name , defaultValue ) ) ;
public class BinarySparseDatasetParser { /** * Parse a binary sparse dataset from given URI . * @ param uri the URI of data source . * @ throws java . io . IOException */ public BinarySparseDataset parse ( String name , URI uri ) throws IOException , ParseException { } }
return parse ( name , new File ( uri ) ) ;
public class ExtensionsDao { /** * { @ inheritDoc } * Delete using the unique columns */ @ Override public int delete ( Extensions extensions ) throws SQLException { } }
DeleteBuilder < Extensions , Void > db = deleteBuilder ( ) ; setUniqueWhere ( db . where ( ) , extensions . getExtensionName ( ) , true , extensions . getTableName ( ) , true , extensions . getColumnName ( ) ) ; int deleted = db . delete ( ) ; return deleted ;
public class MetadataStore { /** * Same as { @ link # getOrAssignSegmentId ( String , Duration , Function ) ) except that this simply returns a CompletableFuture * with the SegmentId . * @ param streamSegmentName The case - sensitive StreamSegment Name . * @ param timeout The timeout for the operation . * @ return A CompletableFuture that , when completed normally , will contain SegmentId . If failed , this will contain the * exception that caused the failure . */ @ VisibleForTesting CompletableFuture < Long > getOrAssignSegmentId ( String streamSegmentName , Duration timeout ) { } }
return getOrAssignSegmentId ( streamSegmentName , timeout , CompletableFuture :: completedFuture ) ;
public class XSynchronizedExpressionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case XbasePackage . XSYNCHRONIZED_EXPRESSION__PARAM : return param != null ; case XbasePackage . XSYNCHRONIZED_EXPRESSION__EXPRESSION : return expression != null ; } return super . eIsSet ( featureID ) ;
public class AppConfigHandler { /** * Init the Handler . Can only be executed once . * @ param _ values values for the init */ public static void init ( final Map < String , String > _values ) { } }
if ( AppConfigHandler . HANDLER == null ) { AppConfigHandler . HANDLER = new AppConfigHandler ( _values ) ; }
public class DefaultTerminalFactory { /** * Creates a new TelnetTerminal * Note : a telnetPort should have been set with setTelnetPort ( ) , * otherwise creation of TelnetTerminal will most likely fail . * @ return New terminal emulator exposed as a { @ link Terminal } interface */ public TelnetTerminal createTelnetTerminal ( ) { } }
try { System . err . print ( "Waiting for incoming telnet connection on port " + telnetPort + " ... " ) ; System . err . flush ( ) ; TelnetTerminalServer tts = new TelnetTerminalServer ( telnetPort ) ; TelnetTerminal rawTerminal = tts . acceptConnection ( ) ; tts . close ( ) ; // Just for single - shot : free up the port ! System . err . println ( "Ok, got it!" ) ; if ( mouseCaptureMode != null ) { rawTerminal . setMouseCaptureMode ( mouseCaptureMode ) ; } if ( inputTimeout >= 0 ) { rawTerminal . getInputDecoder ( ) . setTimeoutUnits ( inputTimeout ) ; } return rawTerminal ; } catch ( IOException ioe ) { throw new RuntimeException ( ioe ) ; }
public class ParameterResolverImpl { /** * Get default value for parameter - from OSGi configuration property or parameter definition itself . * @ param parameter Parameter definition * @ return Default value or null */ private < T > T getParameterDefaultValue ( Parameter < T > parameter ) { } }
String defaultOsgiConfigProperty = parameter . getDefaultOsgiConfigProperty ( ) ; if ( StringUtils . isNotBlank ( defaultOsgiConfigProperty ) ) { String [ ] parts = StringUtils . split ( defaultOsgiConfigProperty , ":" ) ; String className = parts [ 0 ] ; String propertyName = parts [ 1 ] ; ServiceReference ref = bundleContext . getServiceReference ( className ) ; if ( ref != null ) { Object value = ref . getProperty ( propertyName ) ; return TypeConversion . osgiPropertyToObject ( value , parameter . getType ( ) , parameter . getDefaultValue ( ) ) ; } } return parameter . getDefaultValue ( ) ;
public class StringUtils { /** * 检查指定的字符串列表是否不为空 。 */ public static boolean areNotEmpty ( String ... values ) { } }
boolean result = true ; if ( values == null || values . length == 0 ) { result = false ; } else { for ( String value : values ) { result &= ! isEmpty ( value ) ; } } return result ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcSimpleProperty ( ) { } }
if ( ifcSimplePropertyEClass == null ) { ifcSimplePropertyEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 514 ) ; } return ifcSimplePropertyEClass ;
public class Matrix4d { /** * Pre - multiply a rotation around the Z axis to this matrix by rotating the given amount of radians * about the Z axis and store the result in < code > dest < / code > . * When used with a right - handed coordinate system , the produced rotation will rotate a vector * counter - clockwise around the rotation axis , when viewing along the negative axis direction towards the origin . * When used with a left - handed coordinate system , the rotation is clockwise . * If < code > M < / code > is < code > this < / code > matrix and < code > R < / code > the rotation matrix , * then the new matrix will be < code > R * M < / code > . So when transforming a * vector < code > v < / code > with the new matrix by using < code > R * M * v < / code > , the * rotation will be applied last ! * In order to set the matrix to a rotation matrix without pre - multiplying the rotation * transformation , use { @ link # rotationZ ( double ) rotationZ ( ) } . * Reference : < a href = " http : / / en . wikipedia . org / wiki / Rotation _ matrix # Rotation _ matrix _ from _ axis _ and _ angle " > http : / / en . wikipedia . org < / a > * @ see # rotationZ ( double ) * @ param ang * the angle in radians to rotate about the Z axis * @ param dest * will hold the result * @ return dest */ public Matrix4d rotateLocalZ ( double ang , Matrix4d dest ) { } }
double sin = Math . sin ( ang ) ; double cos = Math . cosFromSin ( sin , ang ) ; double nm00 = cos * m00 - sin * m01 ; double nm01 = sin * m00 + cos * m01 ; double nm10 = cos * m10 - sin * m11 ; double nm11 = sin * m10 + cos * m11 ; double nm20 = cos * m20 - sin * m21 ; double nm21 = sin * m20 + cos * m21 ; double nm30 = cos * m30 - sin * m31 ; double nm31 = sin * m30 + cos * m31 ; dest . m00 = nm00 ; dest . m01 = nm01 ; dest . m02 = m02 ; dest . m03 = m03 ; dest . m10 = nm10 ; dest . m11 = nm11 ; dest . m12 = m12 ; dest . m13 = m13 ; dest . m20 = nm20 ; dest . m21 = nm21 ; dest . m22 = m22 ; dest . m23 = m23 ; dest . m30 = nm30 ; dest . m31 = nm31 ; dest . m32 = m32 ; dest . m33 = m33 ; dest . properties = properties & ~ ( PROPERTY_PERSPECTIVE | PROPERTY_IDENTITY | PROPERTY_TRANSLATION ) ; return dest ;
public class Condition { /** * 获取所有的 AND 子条件 * @ return */ public List < Condition > getAndConditions ( ) { } }
if ( null == subConditions ) { return null ; } List < Condition > ret = new ArrayList < Condition > ( ) ; for ( Condition subContion : subConditions ) { if ( AND . equals ( subContion . joinType ) ) { ret . add ( subContion ) ; } } return ret ;
public class IoUtils { /** * Reads in the values from a particular column of { @ code filename } , as * delimited by { @ code delimiter } . * @ param filename * @ param columnNumber * @ param delimiter * @ return */ public static List < String > readColumnFromDelimitedFile ( String filename , int columnNumber , String delimiter ) { } }
List < String > columnValues = Lists . newArrayList ( ) ; try { BufferedReader in = new BufferedReader ( new FileReader ( filename ) ) ; String line ; while ( ( line = in . readLine ( ) ) != null ) { String [ ] parts = line . split ( delimiter ) ; columnValues . add ( parts [ columnNumber ] ) ; } in . close ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } return columnValues ;
public class TPCHQuery10 { public static void main ( String [ ] args ) throws Exception { } }
final ParameterTool params = ParameterTool . fromArgs ( args ) ; final ExecutionEnvironment env = ExecutionEnvironment . getExecutionEnvironment ( ) ; if ( ! params . has ( "customer" ) && ! params . has ( "orders" ) && ! params . has ( "lineitem" ) && ! params . has ( "nation" ) ) { System . err . println ( " This program expects data from the TPC-H benchmark as input data." ) ; System . err . println ( " Due to legal restrictions, we can not ship generated data." ) ; System . err . println ( " You can find the TPC-H data generator at http://www.tpc.org/tpch/." ) ; System . err . println ( " Usage: TPCHQuery10 --customer <path> --orders <path> --lineitem <path> --nation <path> [--output <path>]" ) ; return ; } // get customer data set : ( custkey , name , address , nationkey , acctbal ) DataSet < Tuple5 < Integer , String , String , Integer , Double > > customers = getCustomerDataSet ( env , params . get ( "customer" ) ) ; // get orders data set : ( orderkey , custkey , orderdate ) DataSet < Tuple3 < Integer , Integer , String > > orders = getOrdersDataSet ( env , params . get ( "orders" ) ) ; // get lineitem data set : ( orderkey , extendedprice , discount , returnflag ) DataSet < Tuple4 < Integer , Double , Double , String > > lineitems = getLineitemDataSet ( env , params . get ( "lineitem" ) ) ; // get nation data set : ( nationkey , name ) DataSet < Tuple2 < Integer , String > > nations = getNationsDataSet ( env , params . get ( "nation" ) ) ; // orders filtered by year : ( orderkey , custkey ) DataSet < Tuple2 < Integer , Integer > > ordersFilteredByYear = // filter by year orders . filter ( order -> Integer . parseInt ( order . f2 . substring ( 0 , 4 ) ) > 1990 ) // project fields out that are no longer required . project ( 0 , 1 ) ; // lineitems filtered by flag : ( orderkey , revenue ) DataSet < Tuple2 < Integer , Double > > lineitemsFilteredByFlag = // filter by flag lineitems . filter ( lineitem -> lineitem . f3 . equals ( "R" ) ) // compute revenue and project out return flag // revenue per item = l _ extendedprice * ( 1 - l _ discount ) . map ( lineitem -> new Tuple2 < > ( lineitem . f0 , lineitem . f1 * ( 1 - lineitem . f2 ) ) ) . returns ( Types . TUPLE ( Types . INT , Types . DOUBLE ) ) ; // for lambda with generics // join orders with lineitems : ( custkey , revenue ) DataSet < Tuple2 < Integer , Double > > revenueByCustomer = ordersFilteredByYear . joinWithHuge ( lineitemsFilteredByFlag ) . where ( 0 ) . equalTo ( 0 ) . projectFirst ( 1 ) . projectSecond ( 1 ) ; revenueByCustomer = revenueByCustomer . groupBy ( 0 ) . aggregate ( Aggregations . SUM , 1 ) ; // join customer with nation ( custkey , name , address , nationname , acctbal ) DataSet < Tuple5 < Integer , String , String , String , Double > > customerWithNation = customers . joinWithTiny ( nations ) . where ( 3 ) . equalTo ( 0 ) . projectFirst ( 0 , 1 , 2 ) . projectSecond ( 1 ) . projectFirst ( 4 ) ; // join customer ( with nation ) with revenue ( custkey , name , address , nationname , acctbal , revenue ) DataSet < Tuple6 < Integer , String , String , String , Double , Double > > result = customerWithNation . join ( revenueByCustomer ) . where ( 0 ) . equalTo ( 0 ) . projectFirst ( 0 , 1 , 2 , 3 , 4 ) . projectSecond ( 1 ) ; // emit result if ( params . has ( "output" ) ) { result . writeAsCsv ( params . get ( "output" ) , "\n" , "|" ) ; // execute program env . execute ( "TPCH Query 10 Example" ) ; } else { System . out . println ( "Printing result to stdout. Use --output to specify output path." ) ; result . print ( ) ; }
public class JsDocInfoParser { /** * Extracts the top - level block comment from the JsDoc comment , if any . * This method differs from the extractMultilineTextualBlock in that it * terminates under different conditions ( it doesn ' t have the same * prechecks ) , it does not first read in the remaining of the current * line and its conditions for ignoring the " * " ( STAR ) are different . * @ param token The starting token . * @ return The extraction information . */ private ExtractionInfo extractBlockComment ( JsDocToken token ) { } }
return extractMultilineComment ( token , getWhitespaceOption ( WhitespaceOption . TRIM ) , false , false ) ;
public class UserProfileHandlerImpl { /** * Notifying listeners after profile deletion . * @ param userProfile * the user profile which is used in delete operation * @ throws Exception * if any listener failed to handle the event */ private void postDelete ( UserProfile userProfile ) throws Exception { } }
for ( UserProfileEventListener listener : listeners ) { listener . postDelete ( userProfile ) ; }
public class CharBuffer { /** * Method to append a string to char buffer * @ param str String to append */ public void append ( String str ) { } }
if ( str == null ) return ; int restLength = buffer . length - pos ; if ( str . length ( ) < restLength ) { str . getChars ( 0 , str . length ( ) , buffer , pos ) ; pos += str . length ( ) ; } else { str . getChars ( 0 , restLength , buffer , pos ) ; curr . next = new Entity ( buffer ) ; curr = curr . next ; length += buffer . length ; buffer = new char [ ( buffer . length > str . length ( ) - restLength ) ? buffer . length : str . length ( ) - restLength ] ; str . getChars ( restLength , str . length ( ) , buffer , 0 ) ; pos = str . length ( ) - restLength ; }
public class GlobalExceptionHandler { /** * 构造系统业务错误 * @ param mvc mvc * @ param paramErrors 错误参数 * @ return mvc */ private ModelAndView buildBizError ( ModelAndView mvc , Map < String , Object > paramErrors ) { } }
Map < String , Object > error = new HashMap < String , Object > ( ) ; error . put ( "field" , paramErrors ) ; mvc . addObject ( "status" , GlobalResponseStatusMsg . BIZ_ERROR . getCode ( ) ) ; mvc . addObject ( "statusInfo" , error ) ; return mvc ;
public class CollidableConfig { /** * Create the collidable data from node . * @ param configurer The configurer reference ( must not be < code > null < / code > ) . * @ return The associated group , { @ link # DEFAULT _ GROUP } if not defined . * @ throws LionEngineException If unable to read node . */ public static Integer imports ( Configurer configurer ) { } }
Check . notNull ( configurer ) ; if ( configurer . hasNode ( NODE_GROUP ) ) { final String group = configurer . getText ( NODE_GROUP ) ; try { return Integer . valueOf ( group ) ; } catch ( final NumberFormatException exception ) { throw new LionEngineException ( exception , ERROR_INVALID_GROUP + group ) ; } } return DEFAULT_GROUP ;
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 1710:1 : ruleXMultiplicativeExpression returns [ EObject current = null ] : ( this _ XUnaryOperation _ 0 = ruleXUnaryOperation ( ( ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) ) ( ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) ) ) * ) ; */ public final EObject ruleXMultiplicativeExpression ( ) throws RecognitionException { } }
EObject current = null ; EObject this_XUnaryOperation_0 = null ; EObject lv_rightOperand_3_0 = null ; enterRule ( ) ; try { // InternalXbaseWithAnnotations . g : 1716:2 : ( ( this _ XUnaryOperation _ 0 = ruleXUnaryOperation ( ( ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) ) ( ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) ) ) * ) ) // InternalXbaseWithAnnotations . g : 1717:2 : ( this _ XUnaryOperation _ 0 = ruleXUnaryOperation ( ( ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) ) ( ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) ) ) * ) { // InternalXbaseWithAnnotations . g : 1717:2 : ( this _ XUnaryOperation _ 0 = ruleXUnaryOperation ( ( ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) ) ( ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) ) ) * ) // InternalXbaseWithAnnotations . g : 1718:3 : this _ XUnaryOperation _ 0 = ruleXUnaryOperation ( ( ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) ) ( ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) ) ) * { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXMultiplicativeExpressionAccess ( ) . getXUnaryOperationParserRuleCall_0 ( ) ) ; } pushFollow ( FOLLOW_28 ) ; this_XUnaryOperation_0 = ruleXUnaryOperation ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = this_XUnaryOperation_0 ; afterParserOrEnumRuleCall ( ) ; } // InternalXbaseWithAnnotations . g : 1726:3 : ( ( ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) ) ( ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) ) ) * loop29 : do { int alt29 = 2 ; switch ( input . LA ( 1 ) ) { case 44 : { int LA29_2 = input . LA ( 2 ) ; if ( ( synpred17_InternalXbaseWithAnnotations ( ) ) ) { alt29 = 1 ; } } break ; case 45 : { int LA29_3 = input . LA ( 2 ) ; if ( ( synpred17_InternalXbaseWithAnnotations ( ) ) ) { alt29 = 1 ; } } break ; case 46 : { int LA29_4 = input . LA ( 2 ) ; if ( ( synpred17_InternalXbaseWithAnnotations ( ) ) ) { alt29 = 1 ; } } break ; case 47 : { int LA29_5 = input . LA ( 2 ) ; if ( ( synpred17_InternalXbaseWithAnnotations ( ) ) ) { alt29 = 1 ; } } break ; } switch ( alt29 ) { case 1 : // InternalXbaseWithAnnotations . g : 1727:4 : ( ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) ) ( ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) ) { // InternalXbaseWithAnnotations . g : 1727:4 : ( ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) ) // InternalXbaseWithAnnotations . g : 1728:5 : ( ( ( ) ( ( ruleOpMulti ) ) ) ) = > ( ( ) ( ( ruleOpMulti ) ) ) { // InternalXbaseWithAnnotations . g : 1738:5 : ( ( ) ( ( ruleOpMulti ) ) ) // InternalXbaseWithAnnotations . g : 1739:6 : ( ) ( ( ruleOpMulti ) ) { // InternalXbaseWithAnnotations . g : 1739:6 : ( ) // InternalXbaseWithAnnotations . g : 1740:7: { if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getXMultiplicativeExpressionAccess ( ) . getXBinaryOperationLeftOperandAction_1_0_0_0 ( ) , current ) ; } } // InternalXbaseWithAnnotations . g : 1746:6 : ( ( ruleOpMulti ) ) // InternalXbaseWithAnnotations . g : 1747:7 : ( ruleOpMulti ) { // InternalXbaseWithAnnotations . g : 1747:7 : ( ruleOpMulti ) // InternalXbaseWithAnnotations . g : 1748:8 : ruleOpMulti { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getXMultiplicativeExpressionRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXMultiplicativeExpressionAccess ( ) . getFeatureJvmIdentifiableElementCrossReference_1_0_0_1_0 ( ) ) ; } pushFollow ( FOLLOW_9 ) ; ruleOpMulti ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } } } // InternalXbaseWithAnnotations . g : 1764:4 : ( ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) ) // InternalXbaseWithAnnotations . g : 1765:5 : ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) { // InternalXbaseWithAnnotations . g : 1765:5 : ( lv _ rightOperand _ 3_0 = ruleXUnaryOperation ) // InternalXbaseWithAnnotations . g : 1766:6 : lv _ rightOperand _ 3_0 = ruleXUnaryOperation { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXMultiplicativeExpressionAccess ( ) . getRightOperandXUnaryOperationParserRuleCall_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_28 ) ; lv_rightOperand_3_0 = ruleXUnaryOperation ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getXMultiplicativeExpressionRule ( ) ) ; } set ( current , "rightOperand" , lv_rightOperand_3_0 , "org.eclipse.xtext.xbase.Xbase.XUnaryOperation" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop29 ; } } while ( true ) ; } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class Tuple3ifx { /** * Replies the z property . * @ return the z property . */ @ Pure public IntegerProperty zProperty ( ) { } }
if ( this . z == null ) { this . z = new SimpleIntegerProperty ( this , MathFXAttributeNames . Z ) ; } return this . z ;
public class DBInitializer { /** * Init root node parent record . */ protected void postInit ( Connection connection ) throws SQLException { } }
String select = "select * from " + DBInitializerHelper . getItemTableName ( containerConfig ) + " where ID='" + Constants . ROOT_PARENT_UUID + "' and PARENT_ID='" + Constants . ROOT_PARENT_UUID + "'" ; if ( ! connection . createStatement ( ) . executeQuery ( select ) . next ( ) ) { String insert = DBInitializerHelper . getRootNodeInitializeScript ( containerConfig ) ; connection . createStatement ( ) . executeUpdate ( insert ) ; }
public class CodecUtil { /** * Checks if is single position prefix . * @ param fieldInfo * the field info * @ param prefix * the prefix * @ return true , if is single position prefix * @ throws IOException * Signals that an I / O exception has occurred . */ public static boolean isSinglePositionPrefix ( FieldInfo fieldInfo , String prefix ) throws IOException { } }
if ( fieldInfo == null ) { throw new IOException ( "no fieldInfo" ) ; } else { String info = fieldInfo . getAttribute ( MtasCodecPostingsFormat . MTAS_FIELDINFO_ATTRIBUTE_PREFIX_SINGLE_POSITION ) ; if ( info == null ) { throw new IOException ( "no " + MtasCodecPostingsFormat . MTAS_FIELDINFO_ATTRIBUTE_PREFIX_SINGLE_POSITION ) ; } else { return Arrays . asList ( info . split ( Pattern . quote ( MtasToken . DELIMITER ) ) ) . contains ( prefix ) ; } }
public class CmsJspNavBuilder { /** * Returns a navigation element for the named resource . < p > * @ param sitePath the resource name to get the navigation information for , * must be a full path name , e . g . " / docs / index . html " * @ return a navigation element for the given resource */ public CmsJspNavElement getNavigationForResource ( String sitePath ) { } }
CmsJspNavElement result = getNavigationForResource ( sitePath , CmsResourceFilter . DEFAULT , false ) ; if ( ( result != null ) && ( result . getNavContext ( ) == null ) ) { result . setNavContext ( new NavContext ( this , Visibility . navigation , CmsResourceFilter . DEFAULT ) ) ; } return result ;
public class Execution { /** * NOTE : This method only throws exceptions if it is in an illegal state to be scheduled , or if the tasks needs * to be scheduled immediately and no resource is available . If the task is accepted by the schedule , any * error sets the vertex state to failed and triggers the recovery logic . * @ param slotProvider The slot provider to use to allocate slot for this execution attempt . * @ param queued Flag to indicate whether the scheduler may queue this task if it cannot * immediately deploy it . * @ param locationPreferenceConstraint constraint for the location preferences * @ param allPreviousExecutionGraphAllocationIds set with all previous allocation ids in the job graph . * Can be empty if the allocation ids are not required for scheduling . * @ return Future which is completed once the Execution has been deployed */ public CompletableFuture < Void > scheduleForExecution ( SlotProvider slotProvider , boolean queued , LocationPreferenceConstraint locationPreferenceConstraint , @ Nonnull Set < AllocationID > allPreviousExecutionGraphAllocationIds ) { } }
assertRunningInJobMasterMainThread ( ) ; final ExecutionGraph executionGraph = vertex . getExecutionGraph ( ) ; final Time allocationTimeout = executionGraph . getAllocationTimeout ( ) ; try { final CompletableFuture < Execution > allocationFuture = allocateAndAssignSlotForExecution ( slotProvider , queued , locationPreferenceConstraint , allPreviousExecutionGraphAllocationIds , allocationTimeout ) ; final CompletableFuture < Void > deploymentFuture ; if ( allocationFuture . isDone ( ) || queued ) { deploymentFuture = allocationFuture . thenRun ( ThrowingRunnable . unchecked ( this :: deploy ) ) ; } else { deploymentFuture = FutureUtils . completedExceptionally ( new IllegalArgumentException ( "The slot allocation future has not been completed yet." ) ) ; } deploymentFuture . whenComplete ( ( Void ignored , Throwable failure ) -> { if ( failure != null ) { final Throwable stripCompletionException = ExceptionUtils . stripCompletionException ( failure ) ; final Throwable schedulingFailureCause ; if ( stripCompletionException instanceof TimeoutException ) { schedulingFailureCause = new NoResourceAvailableException ( "Could not allocate enough slots within timeout of " + allocationTimeout + " to run the job. " + "Please make sure that the cluster has enough resources." ) ; } else { schedulingFailureCause = stripCompletionException ; } markFailed ( schedulingFailureCause ) ; } } ) ; return deploymentFuture ; } catch ( IllegalExecutionStateException e ) { return FutureUtils . completedExceptionally ( e ) ; }
public class FileInfo { /** * < code > optional . alluxio . grpc . file . PAcl acl = 27 ; < / code > */ public alluxio . grpc . PAcl getAcl ( ) { } }
return acl_ == null ? alluxio . grpc . PAcl . getDefaultInstance ( ) : acl_ ;
public class nspbr { /** * Use this API to fetch all the nspbr resources that are configured on netscaler . */ public static nspbr [ ] get ( nitro_service service ) throws Exception { } }
nspbr obj = new nspbr ( ) ; nspbr [ ] response = ( nspbr [ ] ) obj . get_resources ( service ) ; return response ;
public class DescribeFleetsResult { /** * Information about the fleets . * @ param fleets * Information about the fleets . */ public void setFleets ( java . util . Collection < Fleet > fleets ) { } }
if ( fleets == null ) { this . fleets = null ; return ; } this . fleets = new java . util . ArrayList < Fleet > ( fleets ) ;
public class ExportQueue { /** * Registers an observer that will export queued data . Use this method in conjunction with * { @ link ExportQueue # configure ( String ) } . * @ since 1.1.0 */ public void registerObserver ( ObserverProvider . Registry obsRegistry , org . apache . fluo . recipes . core . export . function . Exporter < K , V > exporter ) { } }
Preconditions . checkState ( opts . exporterType == null , "Expected exporter type not be set, it was set to %s. Cannot not use the old and new way of configuring " + "exporters at the same time." , opts . exporterType ) ; Observer obs ; try { obs = new ExportObserverImpl < K , V > ( queueId , opts , serializer , exporter ) ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; } obsRegistry . forColumn ( ExportBucket . newNotificationColumn ( queueId ) , NotificationType . WEAK ) . withId ( "exportq-" + queueId ) . useObserver ( obs ) ;
public class RetentionSet { /** * Get a list of all retention reference stream cut records on or before ( inclusive ) the given record . * @ param record reference record * @ return list of reference records before given reference record . */ public List < StreamCutReferenceRecord > retentionRecordsBefore ( StreamCutReferenceRecord record ) { } }
Preconditions . checkNotNull ( record ) ; int beforeIndex = getGreatestLowerBound ( this , record . getRecordingTime ( ) , StreamCutReferenceRecord :: getRecordingTime ) ; return retentionRecords . subList ( 0 , beforeIndex + 1 ) ;
public class NodeManager { /** * Remove the references to the session * @ param session the session to be deleted */ public void deleteSession ( String session ) { } }
for ( Set < String > sessions : hostsToSessions . values ( ) ) { sessions . remove ( session ) ; }
public class HOSECodeGenerator { /** * Produces a HOSE code for Atom < code > root < / code > in the { @ link IAtomContainer } < code > ac < / code > . The HOSE * code is produced for the number of spheres given by < code > noOfSpheres < / code > . * IMPORTANT : if you want aromaticity to be included in the code , you need * to run the IAtomContainer < code > ac < / code > to the { @ link org . openscience . cdk . aromaticity . CDKHueckelAromaticityDetector } prior to * using < code > getHOSECode ( ) < / code > . This method only gives proper results if the molecule is * fully saturated ( if not , the order of the HOSE code might depend on atoms in higher spheres ) . * This method is known to fail for protons sometimes . * IMPORTANT : Your molecule must contain implicit or explicit hydrogens * for this method to work properly . * @ param ac The IAtomContainer with the molecular skeleton in which the root atom resides * @ param root The root atom for which to produce the HOSE code * @ param noOfSpheres The number of spheres to look at * @ param ringsize The size of the ring ( s ) it is in is included in center atom code * @ return The HOSECode value * @ exception org . openscience . cdk . exception . CDKException Thrown if something is wrong */ public String getHOSECode ( IAtomContainer ac , IAtom root , int noOfSpheres , boolean ringsize ) throws CDKException { } }
ensureIsotopeFactory ( ) ; CanonicalLabeler canLabler = new CanonicalLabeler ( ) ; canLabler . canonLabel ( ac ) ; centerCode = "" ; this . atomContainer = ac ; maxSphere = noOfSpheres ; spheres = new List [ noOfSpheres + 1 ] ; for ( int i = 0 ; i < ac . getAtomCount ( ) ; i ++ ) { ac . getAtom ( i ) . setFlag ( CDKConstants . VISITED , false ) ; } root . setFlag ( CDKConstants . VISITED , true ) ; rootNode = new TreeNode ( root . getSymbol ( ) , null , root , ( double ) 0 , atomContainer . getConnectedBondsCount ( root ) , 0 ) ; /* * All we need to observe is how the ranking of substituents in the * subsequent spheres of the root nodes influences the ranking of the * first sphere , since the order of a node in a sphere depends on the * order the preceding node in its branch */ HOSECode = new StringBuffer ( ) ; createCenterCode ( root , ac , ringsize ) ; breadthFirstSearch ( root , true ) ; createCode ( ) ; fillUpSphereDelimiters ( ) ; logger . debug ( "HOSECodeGenerator -> HOSECode: " , HOSECode ) ; return HOSECode . toString ( ) ;
public class Context { /** * Resolves an instance , be it into the application or defined by a previous command instruction . * This method should be used as it simulates some modifications made to the application by command * instructions . When validating instructions , it will always be more reliable than directly picking * up instances in the application . * @ param instancePath a non - null instance path * @ return an instance , or null if it was never created */ public Instance resolveInstance ( String instancePath ) { } }
Instance instance = null ; String componentName = this . instancePathToComponentName . get ( instancePath ) ; if ( componentName != null ) { String instanceName = InstanceHelpers . findInstanceName ( instancePath ) ; Component component ; if ( DefineVariableCommandInstruction . FAKE_COMPONENT_NAME . equals ( componentName ) ) component = new Component ( DefineVariableCommandInstruction . FAKE_COMPONENT_NAME ) ; else component = ComponentHelpers . findComponent ( this . app , componentName ) ; if ( ! Utils . isEmptyOrWhitespaces ( instanceName ) && component != null ) instance = new Instance ( instanceName ) . component ( component ) ; else this . logger . warning ( "Instance's component of " + instancePath + " could not be resolved." ) ; } return instance ;
public class FileExecutor { /** * 合并文件 * @ param files 文件数组 * @ param destinationFile 目标文件 * @ param filterRegex 过滤规则数组 ( 正则表达式 , 与文件数组一一对应 , 即第个文件使用一个过滤规则 , 为空时不过滤 ) * @ throws IOException 异常 */ public static void mergeFiles ( File [ ] files , File destinationFile , String [ ] filterRegex ) throws IOException { } }
if ( filterRegex . length < files . length ) { filterRegex = ArrayUtils . concatArrays ( filterRegex , new String [ files . length - filterRegex . length ] ) ; } createNewFile ( destinationFile ) ; int i = 0 ; for ( File file : files ) { mergeFiles ( file , destinationFile , filterRegex [ i ++ ] ) ; }
public class PartitionStateManager { /** * Returns a copy of the current partition table . */ public InternalPartition [ ] getPartitionsCopy ( ) { } }
NopPartitionListener listener = new NopPartitionListener ( ) ; InternalPartition [ ] result = new InternalPartition [ partitions . length ] ; for ( int i = 0 ; i < partitionCount ; i ++ ) { result [ i ] = partitions [ i ] . copy ( listener ) ; } return result ;
public class MockWebServer { /** * Transfer bytes from { @ code source } to { @ code sink } until either { @ code byteCount } bytes have * been transferred or { @ code source } is exhausted . The transfer is throttled according to { @ code * policy } . */ private void throttledTransfer ( MockResponse policy , Socket socket , BufferedSource source , BufferedSink sink , long byteCount , boolean isRequest ) throws IOException { } }
if ( byteCount == 0 ) return ; Buffer buffer = new Buffer ( ) ; long bytesPerPeriod = policy . getThrottleBytesPerPeriod ( ) ; long periodDelayMs = policy . getThrottlePeriod ( TimeUnit . MILLISECONDS ) ; long halfByteCount = byteCount / 2 ; boolean disconnectHalfway = isRequest ? policy . getSocketPolicy ( ) == DISCONNECT_DURING_REQUEST_BODY : policy . getSocketPolicy ( ) == DISCONNECT_DURING_RESPONSE_BODY ; while ( ! socket . isClosed ( ) ) { for ( long b = 0 ; b < bytesPerPeriod ; ) { // Ensure we do not read past the allotted bytes in this period . long toRead = Math . min ( byteCount , bytesPerPeriod - b ) ; // Ensure we do not read past halfway if the policy will kill the connection . if ( disconnectHalfway ) { toRead = Math . min ( toRead , byteCount - halfByteCount ) ; } long read = source . read ( buffer , toRead ) ; if ( read == - 1 ) return ; sink . write ( buffer , read ) ; sink . flush ( ) ; b += read ; byteCount -= read ; if ( disconnectHalfway && byteCount == halfByteCount ) { socket . close ( ) ; return ; } if ( byteCount == 0 ) return ; } if ( periodDelayMs != 0 ) { try { Thread . sleep ( periodDelayMs ) ; } catch ( InterruptedException e ) { throw new AssertionError ( e ) ; } } }
public class DynamoDBProgrammaticKeyValue { /** * A high - resolution alarm is one that is configured to fire on threshold breaches of high - resolution metrics . See * this [ announcement ] ( https : / / aws . amazon . com / about - aws / whats - new / 2017/07 / amazon - cloudwatch - introduces - high - resolution - custom - metrics - and - alarms / ) . * DynamoDB only publishes 1 minute consumed capacity metrics . By publishing high resolution consumed capacity * metrics on the client side , you can react and alarm on spikes in load much quicker . * @ param alarmName name of the high resolution alarm to create * @ param metricName name of the metric to alarm on * @ param threshold threshold at which to alarm on after 5 breaches of the threshold */ private void createHighResolutionAlarm ( String alarmName , String metricName , double threshold ) { } }
putMetricAlarm . apply ( new PutMetricAlarmRequest ( ) . withNamespace ( CUSTOM_TABLE_METRICS_NAMESPACE ) . withDimensions ( tableDimension ) . withMetricName ( metricName ) . withAlarmName ( alarmName ) . withStatistic ( Statistic . Sum ) . withUnit ( StandardUnit . Count ) . withComparisonOperator ( ComparisonOperator . GreaterThanThreshold ) . withDatapointsToAlarm ( 5 ) . withEvaluationPeriods ( 5 ) // alarm when 5 out of 5 consecutive measurements are high . withActionsEnabled ( false ) // TODO add actions in a later PR . withPeriod ( 10 ) // high resolution alarm . withThreshold ( 10 * threshold ) ) ;
public class JdbcCpoXaAdapter { /** * Persists a collection of Objects into the datasource . The CpoAdapter will check to see if this object exists in the * datasource . If it exists , the object is updated in the datasource If the object does not exist , then it is created * in the datasource . This method stores the object in the datasource . The objects in the collection will be treated * as one transaction , meaning that if one of the objects fail being inserted or updated in the datasource then the * entire collection will be rolled back . * < pre > Example : * < code > * class SomeObject so = null ; * class CpoAdapter cpo = null ; * try { * cpo = new JdbcCpoAdapter ( new JdbcDataSourceInfo ( driver , url , user , password , 1,1 , false ) ) ; * } catch ( CpoException ce ) { * / / Handle the error * cpo = null ; * if ( cpo ! = null ) { * ArrayList al = new ArrayList ( ) ; * for ( int i = 0 ; i < 3 ; i + + ) { * so = new SomeObject ( ) ; * so . setId ( 1 ) ; * so . setName ( " SomeName " ) ; * al . add ( so ) ; * try { * cpo . persistObjects ( al ) ; * } catch ( CpoException ce ) { * / / Handle the error * < / code > * < / pre > * @ param coll This is a collection of objects that have been defined within the metadata of the datasource . If the * class is not defined an exception will be thrown . * @ return DOCUMENT ME ! * @ throws CpoException Thrown if there are errors accessing the datasource * @ see # existsObject * @ see # insertObject * @ see # updateObject */ @ Override public < T > long persistObjects ( Collection < T > coll ) throws CpoException { } }
return getCurrentResource ( ) . persistObjects ( coll ) ;
public class DRUMS { /** * Adds or merges the given data . If all memory buckets are full , this method is blocking the calling thread . < br > * < br > * A merge calls the method { @ link Data # merge ( AbstractKVStorable ) } . * @ param toPersist * data to insert or update * @ throws DRUMSException * if an unexpected error occurs * @ throws InterruptedException * if the call blocks and the current thread is interrupted */ public void insertOrMerge ( Data ... toPersist ) throws DRUMSException , InterruptedException { } }
try { bucketContainer . addToCache ( toPersist ) ; } catch ( BucketContainerException ex ) { // This exception should never be thrown because the hash function should map all keys to a bucket . throw new DRUMSException ( ex ) ; }
public class Morphia { /** * Creates an entity and populates its state based on the dbObject given . This method is primarily an internal method . Reliance on * this method may break your application in future releases . * @ param < T > type of the entity * @ param datastore the Datastore to use when fetching this reference * @ param entityClass type to create * @ param dbObject the object state to use * @ return the newly created and populated entity */ public < T > T fromDBObject ( final Datastore datastore , final Class < T > entityClass , final DBObject dbObject ) { } }
return fromDBObject ( datastore , entityClass , dbObject , mapper . createEntityCache ( ) ) ;
public class SimpleExpression { /** * Create a { @ code this < > right } expression * @ param right rhs of the comparison * @ return this ! = right */ public BooleanExpression ne ( Expression < ? super T > right ) { } }
return Expressions . booleanOperation ( Ops . NE , mixin , right ) ;
public class CacheImpl { /** * Checks if the given { @ link CacheItem } has expired or needs to be refreshed . * @ param item * @ param itemsToRefresh the list of items where to put the specified item if it needs to be refreshed */ protected void doChecks ( CacheItem item , List < CacheItem > itemsToRefresh ) { } }
boolean expired ; boolean willBeRefreshed ; try { expired = checkForExpiration ( item ) ; if ( expired ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( item + " was removed because it expired" ) ; } } else { willBeRefreshed = checkForRefresh ( item , itemsToRefresh ) ; if ( willBeRefreshed ) { if ( logger . isDebugEnabled ( ) ) { logger . debug ( item + " will be refreshed" ) ; } } } } catch ( Exception ex ) { logger . warn ( "Exception while checking " + item + " for expiration/refresh" , ex ) ; }
public class ObjectMapperProvider { /** * checks if the member is to be filtered out or no if filter itself is * null , writes out that member * @ param member * @ param includeFilter * @ param jsonGenerator * @ throws JsonGenerationException * @ throws IOException */ public static void filteredCounterWrite ( String member , Predicate < String > includeFilter , Predicate < String > includeCounterFilter , CounterMap counterMap , JsonGenerator jsonGenerator ) throws IOException { } }
if ( includeFilter != null && includeCounterFilter == null ) { if ( includeFilter . apply ( member ) ) { jsonGenerator . writeFieldName ( member ) ; jsonGenerator . writeObject ( counterMap ) ; } } else { if ( includeCounterFilter != null ) { // get group name , counter name , // check if it is wanted // if yes print it . boolean startObjectGroupMap = false ; jsonGenerator . writeFieldName ( member ) ; String fullCounterName ; jsonGenerator . writeStartObject ( ) ; for ( String group : counterMap . getGroups ( ) ) { Map < String , Counter > groupMap = counterMap . getGroup ( group ) ; for ( Map . Entry < String , Counter > nameCounterEntry : groupMap . entrySet ( ) ) { Counter counter = nameCounterEntry . getValue ( ) ; fullCounterName = group + "." + counter . getKey ( ) ; if ( includeCounterFilter . apply ( fullCounterName ) ) { if ( startObjectGroupMap == false ) { jsonGenerator . writeFieldName ( group ) ; jsonGenerator . writeStartObject ( ) ; startObjectGroupMap = true ; } jsonGenerator . writeFieldName ( counter . getKey ( ) ) ; jsonGenerator . writeNumber ( counter . getValue ( ) ) ; } } if ( startObjectGroupMap ) { jsonGenerator . writeEndObject ( ) ; startObjectGroupMap = false ; } } jsonGenerator . writeEndObject ( ) ; } }
public class SCM { /** * Compares the current state of the remote repository against the given baseline { @ link SCMRevisionState } . * Conceptually , the act of polling is to take two states of the repository and to compare them to see * if there ' s any difference . In practice , however , comparing two arbitrary repository states is an expensive * operation , so in this abstraction , we chose to mix ( 1 ) the act of building up a repository state and * ( 2 ) the act of comparing it with the earlier state , so that SCM implementations can implement this * more easily . * Multiple invocations of this method may happen over time to make sure that the remote repository * is " quiet " before Hudson schedules a new build . * @ param project * The project to check for updates * @ param launcher * Abstraction of the machine where the polling will take place . If SCM declares * that { @ linkplain # requiresWorkspaceForPolling ( ) the polling doesn ' t require a workspace } , this parameter is null . * @ param workspace * The workspace directory that contains baseline files . If SCM declares * that { @ linkplain # requiresWorkspaceForPolling ( ) the polling doesn ' t require a workspace } , this parameter is null . * @ param listener * Logs during the polling should be sent here . * @ param baseline * The baseline of the comparison . This object is the return value from earlier * { @ link # compareRemoteRevisionWith ( AbstractProject , Launcher , FilePath , TaskListener , SCMRevisionState ) } or * { @ link # calcRevisionsFromBuild ( AbstractBuild , Launcher , TaskListener ) } . * @ return * This method returns multiple values that are bundled together into the { @ link PollingResult } value type . * { @ link PollingResult # baseline } should be the value of the baseline parameter , { @ link PollingResult # remote } * is the current state of the remote repository ( this object only needs to be understandable to the future * invocations of this method ) , * and { @ link PollingResult # change } that indicates the degree of changes found during the comparison . * @ throws InterruptedException * interruption is usually caused by the user aborting the computation . * this exception should be simply propagated all the way up . * @ since 1.568 */ public PollingResult compareRemoteRevisionWith ( @ Nonnull Job < ? , ? > project , @ Nullable Launcher launcher , @ Nullable FilePath workspace , @ Nonnull TaskListener listener , @ Nonnull SCMRevisionState baseline ) throws IOException , InterruptedException { } }
if ( project instanceof AbstractProject && Util . isOverridden ( SCM . class , getClass ( ) , "compareRemoteRevisionWith" , AbstractProject . class , Launcher . class , FilePath . class , TaskListener . class , SCMRevisionState . class ) ) { return compareRemoteRevisionWith ( ( AbstractProject ) project , launcher , workspace , listener , baseline ) ; } else { throw new AbstractMethodError ( "you must override the new overload of compareRemoteRevisionWith" ) ; }
public class MethodNode { /** * Makes the given class visitor visit this method . * @ param cv * a class visitor . */ public void accept ( final ClassVisitor cv ) { } }
String [ ] exceptions = new String [ this . exceptions . size ( ) ] ; this . exceptions . toArray ( exceptions ) ; MethodVisitor mv = cv . visitMethod ( access , name , desc , signature , exceptions ) ; if ( mv != null ) { accept ( mv ) ; }
public class JodaBeanSimpleMapReader { /** * parse object , event passed in */ @ SuppressWarnings ( "unchecked" ) private Object parseObject ( Object input , Class < ? > declaredType , MetaProperty < ? > metaProp , Class < ? > beanType , SerIterable parentIterable ) throws Exception { } }
// parse based on type if ( Bean . class . isAssignableFrom ( declaredType ) ) { if ( input instanceof Map ) { return parseBean ( ( Map < String , Object > ) input , declaredType ) ; } else { return parseSimple ( input , declaredType ) ; } } else { if ( input instanceof List || input instanceof Map ) { SerIterable childIterable = null ; if ( metaProp != null ) { childIterable = settings . getIteratorFactory ( ) . createIterable ( metaProp , beanType , true ) ; } else if ( parentIterable != null ) { childIterable = settings . getIteratorFactory ( ) . createIterable ( parentIterable ) ; } if ( childIterable == null ) { if ( input instanceof List ) { if ( declaredType . isArray ( ) ) { childIterable = SerIteratorFactory . array ( declaredType . getComponentType ( ) ) ; } else { childIterable = SerIteratorFactory . list ( Object . class , Collections . < Class < ? > > emptyList ( ) ) ; } } else { childIterable = SerIteratorFactory . map ( String . class , Object . class , Collections . < Class < ? > > emptyList ( ) ) ; } } return parseIterable ( input , childIterable ) ; } else { return parseSimple ( input , declaredType ) ; } }
public class Reflection { /** * Find the best method on the target class that matches the signature specified with the specified name and the list of * argument classes . This method first attempts to find the method with the specified argument classes ; if no such method is * found , a NoSuchMethodException is thrown . * @ param methodName the name of the method that is to be invoked . * @ param argumentsClasses the list of Class instances that correspond to the classes for each argument passed to the method . * @ return the Method object that references the method that satisfies the requirements , or null if no satisfactory method * could be found . * @ throws NoSuchMethodException if a matching method is not found . * @ throws SecurityException if access to the information is denied . */ public Method findBestMethodWithSignature ( String methodName , Class < ? > ... argumentsClasses ) throws NoSuchMethodException , SecurityException { } }
return findBestMethodWithSignature ( methodName , true , argumentsClasses ) ;
public class PESection { /** * Returns the section bytes . The file is read if the bytes aren ' t * already loaded . * @ return bytes of the section * @ throws IOException * if file can not be read * @ throws IllegalStateException * if section is too large to fit into a byte array . This * happens if the size is larger than int can hold . */ public byte [ ] getBytes ( ) throws IOException { } }
if ( sectionbytes . isPresent ( ) ) { return sectionbytes . get ( ) . clone ( ) ; } loadSectionBytes ( ) ; byte [ ] result = sectionbytes . get ( ) ; assert result != null ; return result ;
public class MethodBuilder { /** * Add proxy method for equals */ private void addEquals ( TypeSpec . Builder classBuilder ) { } }
MethodSpec . Builder methodBuilder = MethodSpec . methodBuilder ( "equals" ) . addModifiers ( Modifier . PUBLIC ) . addParameter ( ClassName . get ( Object . class ) , "obj" ) . returns ( boolean . class ) . addAnnotation ( Override . class ) . addStatement ( "return (obj instanceof " + getRemoterInterfaceClassName ( ) + ClassBuilder . PROXY_SUFFIX + ") && obj.hashCode() == hashCode()" ) ; classBuilder . addMethod ( methodBuilder . build ( ) ) ;
public class MetaStore { /** * Note : used in backward compatibility code with pre 1.2.6 release . This can be removed in later releases . */ private FileBuffer deleteConfigurationFromMetadataBuffer ( File metaFile , FileBuffer buffer ) { } }
long term = buffer . readLong ( 0 ) ; int vote = buffer . readInt ( 8 ) ; buffer . close ( ) ; try { Files . delete ( metaFile . toPath ( ) ) ; } catch ( IOException ex ) { throw new RuntimeException ( String . format ( "Failed to delete [%s]." , metaFile ) , ex ) ; } FileBuffer truncatedBuffer = FileBuffer . allocate ( metaFile , 12 ) ; truncatedBuffer . writeLong ( 0 , term ) . flush ( ) ; truncatedBuffer . writeInt ( 8 , vote ) . flush ( ) ; return truncatedBuffer ;
public class ActionUtil { /** * Adds / removes an action to / from a component . * @ param component Component to be associated with the action . * @ param action Action to invoke when listener event is triggered . If null , dissociates the * event listener from the component . * @ param eventName The name of the event that will trigger the action . * @ return The newly created or just removed action listener . */ public static ActionListener addAction ( BaseComponent component , IAction action , String eventName ) { } }
ActionListener listener ; if ( action == null ) { listener = removeAction ( component , eventName ) ; } else { listener = getListener ( component , eventName ) ; if ( listener == null ) { listener = new ActionListener ( component , action , eventName ) ; } else { listener . setAction ( action ) ; } } return listener ;
public class JobInformationRecorder { /** * Sets the key value pair to the job information . This would be persisted along with the other job * information like start time , end time etc * @ param key key of the key value pair * @ param value value of the key value pair * @ return this object for chaining */ public JobInformationRecorder withProperty ( String key , String value ) { } }
if ( sourceBatcher . isStarted ( ) ) throw new IllegalStateException ( "Configuration cannot be changed after startJob has been called" ) ; properties . put ( key , value ) ; return this ;
public class HttpHelper { /** * Create COPY method * @ param sourcePath Source path , relative to repository baseURL * @ param destinationPath Destination path , relative to repository baseURL * @ return COPY method */ public HttpCopy createCopyMethod ( final String sourcePath , final String destinationPath ) { } }
return new HttpCopy ( repositoryURL + sourcePath , repositoryURL + destinationPath ) ;
public class MojoHelper { /** * Check that we have a valid set of platforms . * If no platforms are configured we create 1 default platform . * @ param platforms the list of BuildPlatform ' s to validate * @ return the passed in list or a new list with a default platform added * @ throws MojoExecutionException if the configuration is invalid . */ public static List < BuildPlatform > validatePlatforms ( List < BuildPlatform > platforms ) throws MojoExecutionException { } }
if ( platforms == null ) { platforms = new ArrayList < BuildPlatform > ( ) ; platforms . add ( new BuildPlatform ( ) ) ; } else { Set < String > platformNames = new HashSet < String > ( ) ; for ( BuildPlatform platform : platforms ) { if ( platformNames . contains ( platform . getName ( ) ) ) { throw new MojoExecutionException ( "Duplicate platform '" + platform . getName ( ) + "' in configuration, platform names must be unique" ) ; } platformNames . add ( platform . getName ( ) ) ; platform . identifyPrimaryConfiguration ( ) ; } } return platforms ;
public class FileSystem { /** * Creates a new empty directory in the specified directory , using the * given prefix and suffix strings to generate its name . If this method * returns successfully then it is guaranteed that : * < ol > * < li > The directory denoted by the returned abstract pathname did not exist * before this method was invoked , and * < li > Neither this method nor any of its variants will return the same * abstract pathname again in the current invocation of the virtual * machine . * < / ol > * < p > This method provides only part of a temporary - file facility . To arrange * for a file created by this method to be deleted automatically , use the * < code > { @ link # deleteOnExit } < / code > method . * < p > The < code > prefix < / code > argument must be at least three characters * long . It is recommended that the prefix be a short , meaningful string * such as < code > " hjb " < / code > or < code > " mail " < / code > . The * < code > suffix < / code > argument may be < code > null < / code > , in which case the * suffix < code > " . tmp " < / code > will be used . * < p > To create the new directory , the prefix and the suffix may first be * adjusted to fit the limitations of the underlying platform . If the * prefix is too long then it will be truncated , but its first three * characters will always be preserved . If the suffix is too long then it * too will be truncated , but if it begins with a period character * ( < code > ' . ' < / code > ) then the period and the first three characters * following it will always be preserved . Once these adjustments have been * made the name of the new file will be generated by concatenating the * prefix , five or more internally - generated characters , and the suffix . * < p > If the < code > directory < / code > argument is < code > null < / code > then the * system - dependent default temporary - file directory will be used . The * default temporary - file directory is specified by the system property * < code > java . io . tmpdir < / code > . On UNIX systems the default value of this * property is typically < code > " / tmp " < / code > or < code > " / var / tmp " < / code > ; on * Microsoft Windows systems it is typically < code > " C : \ \ WINNT \ \ TEMP " < / code > . A different * value may be given to this system property when the Java virtual machine * is invoked , but programmatic changes to this property are not guaranteed * to have any effect upon the temporary directory used by this method . * @ param prefix is the prefix string to be used in generating the file ' s * name ; must be at least three characters long * @ param suffix is the suffix string to be used in generating the file ' s * name ; may be < code > null < / code > , in which case the * suffix < code > " . tmp " < / code > will be used * @ param directory is the directory in which the file is to be created , or * < code > null < / code > if the default temporary - file * directory is to be used * @ return An abstract pathname denoting a newly - created empty file * @ throws IllegalArgumentException * If the < code > prefix < / code > argument contains fewer than three * characters * @ throws IOException If a file could not be created * @ throws SecurityException * If a security manager exists and its < code > { @ link * java . lang . SecurityManager # checkWrite ( java . lang . String ) } < / code > * method does not allow a file to be created * @ since 6.2 */ public static File createTempDirectory ( String prefix , String suffix , File directory ) throws IOException { } }
if ( prefix == null ) { throw new NullPointerException ( ) ; } if ( prefix . length ( ) < 3 ) { throw new IllegalArgumentException ( Locale . getString ( "E4" , 3 , prefix ) ) ; // $ NON - NLS - 1 $ } final String string = ( suffix == null ) ? ".tmp" : suffix ; // $ NON - NLS - 1 $ final File targetDirectory ; if ( directory == null ) { targetDirectory = new File ( System . getProperty ( "java.io.tmpdir" ) ) ; // $ NON - NLS - 1 $ } else { targetDirectory = directory ; } File filename ; do { long index = RANDOM . nextLong ( ) ; if ( index == Long . MIN_VALUE ) { // corner case index = 0 ; } else { index = Math . abs ( index ) ; } final StringBuilder buffer = new StringBuilder ( ) ; buffer . append ( prefix ) ; buffer . append ( Long . toString ( index ) ) ; buffer . append ( string ) ; filename = new File ( targetDirectory , buffer . toString ( ) ) ; } while ( ! filename . mkdirs ( ) ) ; return filename ;