signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ConsoleServlet { /** * Displays more detailed configuration * @ return html representation of the verbose hub config */ private String getVerboseConfig ( ) { } }
StringBuilder builder = new StringBuilder ( ) ; GridHubConfiguration config = getRegistry ( ) . getHub ( ) . getConfiguration ( ) ; builder . append ( "<div id='verbose-config-container'>" ) ; builder . append ( "<a id='verbose-config-view-toggle' href='#'>View Verbose</a>" ) ; builder . append ( "<div id='verbose-config-content'>" ) ; GridHubConfiguration tmp = new GridHubConfiguration ( ) ; builder . append ( "<br/><b>The final configuration comes from:</b><br/>" ) ; builder . append ( "<b>the default :</b><br/>" ) ; builder . append ( prettyHtmlPrint ( tmp ) ) ; if ( config . getRawArgs ( ) != null ) { builder . append ( "<b>updated with command line options:</b><br/>" ) ; builder . append ( String . join ( " " , config . getRawArgs ( ) ) ) ; if ( config . getConfigFile ( ) != null ) { builder . append ( "<br/><b>and configuration loaded from " ) . append ( config . getConfigFile ( ) ) . append ( ":</b><br/>" ) ; try { builder . append ( String . join ( "<br/>" , Files . readAllLines ( new File ( config . getConfigFile ( ) ) . toPath ( ) ) ) ) ; } catch ( IOException e ) { builder . append ( "<b>" ) . append ( e . getMessage ( ) ) . append ( "</b>" ) ; } } } builder . append ( "</div>" ) ; // End of Verbose Content builder . append ( "</div>" ) ; // End of Verbose Container return builder . toString ( ) ;
public class AttributeDefinition { /** * Finds a value in the given { @ code model } whose key matches this attribute ' s { @ link # getName ( ) name } , * uses the given { @ code context } to { @ link OperationContext # resolveExpressions ( org . jboss . dmr . ModelNode ) resolve } * it and validates it using this attribute ' s { @ link # getValidator ( ) validator } . If the value is * undefined and a { @ link # getDefaultValue ( ) default value } is available , the default value is used . * @ param context the operation context * @ param model model node of type { @ link ModelType # OBJECT } , typically representing a model resource * @ return the resolved value , possibly the default value if the model does not have a defined value matching * this attribute ' s name * @ throws OperationFailedException if the value is not valid */ public ModelNode resolveModelAttribute ( final OperationContext context , final ModelNode model ) throws OperationFailedException { } }
return resolveModelAttribute ( new ExpressionResolver ( ) { @ Override public ModelNode resolveExpressions ( ModelNode node ) throws OperationFailedException { return context . resolveExpressions ( node ) ; } } , model ) ;
public class TwitterEndpointServices { /** * Evaluate the response from the oauth / request _ token endpoint . This checks the status code of the response and ensures * that oauth _ callback _ confirmed , oauth _ token , and oauth _ token _ secret values are contained in the response . * @ param responseBody * @ return */ public Map < String , Object > evaluateRequestTokenResponse ( String responseBody ) { } }
Map < String , Object > response = new HashMap < String , Object > ( ) ; String endpoint = TwitterConstants . TWITTER_ENDPOINT_REQUEST_TOKEN ; Map < String , String > responseValues = populateResponseValues ( responseBody ) ; Map < String , Object > result = checkForEmptyResponse ( endpoint , responseBody , responseValues ) ; if ( result != null ) { return result ; } // Ensure response contains oauth _ callback _ confirmed value result = checkForRequiredParameters ( endpoint , responseValues , TwitterConstants . RESPONSE_OAUTH_CALLBACK_CONFIRMED , TwitterConstants . RESPONSE_OAUTH_TOKEN , TwitterConstants . RESPONSE_OAUTH_TOKEN_SECRET ) ; if ( result != null ) { return result ; } String callbackConfirmedVal = responseValues . get ( TwitterConstants . RESPONSE_OAUTH_CALLBACK_CONFIRMED ) ; if ( ! callbackConfirmedVal . equalsIgnoreCase ( "true" ) ) { return createErrorResponse ( "TWITTER_RESPONSE_PARAM_WITH_WRONG_VALUE" , new Object [ ] { TwitterConstants . RESPONSE_OAUTH_CALLBACK_CONFIRMED , endpoint , "true" , callbackConfirmedVal } ) ; } String requestToken = "" ; for ( Entry < String , String > entry : responseValues . entrySet ( ) ) { String key = entry . getKey ( ) ; String value = entry . getValue ( ) ; if ( key . equals ( TwitterConstants . RESPONSE_OAUTH_TOKEN ) ) { requestToken = value ; if ( requestToken . isEmpty ( ) ) { return createErrorResponse ( "TWITTER_RESPONSE_PARAMETER_EMPTY" , new Object [ ] { TwitterConstants . RESPONSE_OAUTH_TOKEN , endpoint } ) ; } if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , TwitterConstants . RESPONSE_OAUTH_TOKEN + "=" + requestToken ) ; } } else if ( key . equals ( TwitterConstants . RESPONSE_OAUTH_TOKEN_SECRET ) ) { tokenSecret = value ; if ( tokenSecret . isEmpty ( ) ) { return createErrorResponse ( "TWITTER_RESPONSE_PARAMETER_EMPTY" , new Object [ ] { TwitterConstants . RESPONSE_OAUTH_TOKEN_SECRET , endpoint } ) ; } if ( tc . isDebugEnabled ( ) ) { // Request token secrets are short lived , so logging them should not be an issue Tr . debug ( tc , TwitterConstants . RESPONSE_OAUTH_TOKEN_SECRET + "=" + tokenSecret ) ; } } else if ( ! key . equals ( TwitterConstants . RESPONSE_OAUTH_CALLBACK_CONFIRMED ) ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Found an unexpected parameter in the response: " + key + "=" + value ) ; } } } response . put ( TwitterConstants . RESULT_RESPONSE_STATUS , TwitterConstants . RESULT_SUCCESS ) ; response . put ( TwitterConstants . RESPONSE_OAUTH_TOKEN , requestToken ) ; response . put ( TwitterConstants . RESPONSE_OAUTH_TOKEN_SECRET , tokenSecret ) ; return response ;
public class FeaturesInner { /** * Gets all the preview features in a provider namespace that are available through AFEC for the subscription . * @ param resourceProviderNamespace The namespace of the resource provider for getting features . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < FeatureResultInner > > list1Async ( final String resourceProviderNamespace , final ListOperationCallback < FeatureResultInner > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( list1SinglePageAsync ( resourceProviderNamespace ) , new Func1 < String , Observable < ServiceResponse < Page < FeatureResultInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < FeatureResultInner > > > call ( String nextPageLink ) { return list1NextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class Strings { /** * Ensure a given text start with requested prefix . If < code > text < / code > argument is empty returns given prefix ; * returns null if < code > text < / code > argument is null . * @ param text text to add prefix to , null accepted , * @ param prefix prefix to force on text start . * @ return prefixed text or null . */ public static String setPrefix ( String text , String prefix ) { } }
if ( text == null ) { return null ; } if ( ! text . startsWith ( prefix ) ) { text = prefix + text ; } return text ;
public class PrivateKeyUsageExtension { /** * Encode this extension value . */ private void encodeThis ( ) throws IOException { } }
if ( notBefore == null && notAfter == null ) { this . extensionValue = null ; return ; } DerOutputStream seq = new DerOutputStream ( ) ; DerOutputStream tagged = new DerOutputStream ( ) ; if ( notBefore != null ) { DerOutputStream tmp = new DerOutputStream ( ) ; tmp . putGeneralizedTime ( notBefore ) ; tagged . writeImplicit ( DerValue . createTag ( DerValue . TAG_CONTEXT , false , TAG_BEFORE ) , tmp ) ; } if ( notAfter != null ) { DerOutputStream tmp = new DerOutputStream ( ) ; tmp . putGeneralizedTime ( notAfter ) ; tagged . writeImplicit ( DerValue . createTag ( DerValue . TAG_CONTEXT , false , TAG_AFTER ) , tmp ) ; } seq . write ( DerValue . tag_Sequence , tagged ) ; this . extensionValue = seq . toByteArray ( ) ;
public class JCalendarPopup { /** * Invoked when the mouse enters a component . */ public void mouseEntered ( MouseEvent evt ) { } }
JLabel button = ( JLabel ) evt . getSource ( ) ; oldBorder = button . getBorder ( ) ; button . setBorder ( ROLLOVER_BORDER ) ;
public class CompactionRecordCountProvider { /** * Get the record count through filename . */ @ Override public long getRecordCount ( Path filepath ) { } }
String filename = filepath . getName ( ) ; Preconditions . checkArgument ( filename . startsWith ( M_OUTPUT_FILE_PREFIX ) || filename . startsWith ( MR_OUTPUT_FILE_PREFIX ) , String . format ( "%s is not a supported filename, which should start with %s, or %s." , filename , M_OUTPUT_FILE_PREFIX , MR_OUTPUT_FILE_PREFIX ) ) ; String prefixWithCounts = filename . split ( Pattern . quote ( SEPARATOR ) ) [ 0 ] ; if ( filename . startsWith ( M_OUTPUT_FILE_PREFIX ) ) { return Long . parseLong ( prefixWithCounts . substring ( M_OUTPUT_FILE_PREFIX . length ( ) ) ) ; } return Long . parseLong ( prefixWithCounts . substring ( MR_OUTPUT_FILE_PREFIX . length ( ) ) ) ;
public class InteropFramework { /** * Returns an option at given index in an array of options , or null * @ param options an array of Strings * @ param index position of the option that is sought * @ return the option or null */ String getOption ( String [ ] options , int index ) { } }
if ( ( options != null ) && ( options . length > index ) ) { return options [ index ] ; } return null ;
public class WithMavenStepExecution2 { /** * Setup the selected JDK . If none is provided nothing is done . */ private void setupJDK ( ) throws AbortException , IOException , InterruptedException { } }
String jdkInstallationName = step . getJdk ( ) ; if ( StringUtils . isEmpty ( jdkInstallationName ) ) { console . println ( "[withMaven] using JDK installation provided by the build agent" ) ; return ; } if ( withContainer ) { // see # detectWithContainer ( ) LOGGER . log ( Level . FINE , "Ignoring JDK installation parameter: {0}" , jdkInstallationName ) ; console . println ( "WARNING: \"withMaven(){...}\" step running within a container," + " tool installations are not available see https://issues.jenkins-ci.org/browse/JENKINS-36159. " + "You have specified a JDK installation \"" + jdkInstallationName + "\", which will be ignored." ) ; return ; } console . println ( "[withMaven] using JDK installation " + jdkInstallationName ) ; JDK jdk = Jenkins . getInstance ( ) . getJDK ( jdkInstallationName ) ; if ( jdk == null ) { throw new AbortException ( "Could not find the JDK installation: " + jdkInstallationName + ". Make sure it is configured on the Global Tool Configuration page" ) ; } Node node = getComputer ( ) . getNode ( ) ; if ( node == null ) { throw new AbortException ( "Could not obtain the Node for the computer: " + getComputer ( ) . getName ( ) ) ; } jdk = jdk . forNode ( node , listener ) . forEnvironment ( env ) ; jdk . buildEnvVars ( envOverride ) ;
public class VpnConnectionsInner { /** * Retrieves the details of a vpn connection . * @ param resourceGroupName The resource group name of the VpnGateway . * @ param gatewayName The name of the gateway . * @ param connectionName The name of the vpn connection . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the VpnConnectionInner object if successful . */ public VpnConnectionInner get ( String resourceGroupName , String gatewayName , String connectionName ) { } }
return getWithServiceResponseAsync ( resourceGroupName , gatewayName , connectionName ) . toBlocking ( ) . single ( ) . body ( ) ;
public class BeanContextServicesSupport { /** * Serializes all serializable services and their providers before the children of this context is serialized . * First a < code > int < / code > is writtern , indicating the number of serializable services . Then pairs of service class and service provider are writtern one * by one . * @ see com . googlecode . openbeans . beancontext . BeanContextSupport # bcsPreSerializationHook ( java . io . ObjectOutputStream ) */ protected void bcsPreSerializationHook ( ObjectOutputStream oos ) throws IOException { } }
super . bcsPreSerializationHook ( oos ) ; // serialize services synchronized ( services ) { oos . writeInt ( serializable ) ; for ( Iterator iter = services . entrySet ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { Entry entry = ( Entry ) iter . next ( ) ; if ( ( ( BCSSServiceProvider ) entry . getValue ( ) ) . getServiceProvider ( ) instanceof Serializable ) { oos . writeObject ( entry . getKey ( ) ) ; oos . writeObject ( entry . getValue ( ) ) ; } } }
public class Op { /** * Creates an array with the specified elements and an < i > operation expression < / i > on it . * @ param elements the elements of the array being created * @ return an operator , ready for chaining */ public static < T > Level0ArrayOperator < Byte [ ] , Byte > onArrayFor ( final Byte ... elements ) { } }
return onArrayOf ( Types . BYTE , VarArgsUtil . asRequiredObjectArray ( elements ) ) ;
public class FhirValidator { /** * Should the validator validate the resource against the base schema ( the schema provided with the FHIR distribution itself ) */ public synchronized boolean isValidateAgainstStandardSchematron ( ) { } }
if ( ! ourPhPresentOnClasspath ) { // No need to ask since we dont have Ph - Schematron . Also Class . forname will complain // about missing ph - schematron import . return false ; } Class < ? extends IValidatorModule > cls = SchematronProvider . getSchematronValidatorClass ( ) ; return haveValidatorOfType ( cls ) ;
public class TemplateBase { /** * Add layout section . Should not be used in user application or template * @ param name * @ param section */ private void __addLayoutSection ( String name , String section , boolean def ) { } }
Map < String , String > m = def ? layoutSections0 : layoutSections ; if ( m . containsKey ( name ) ) return ; m . put ( name , section ) ;
public class Preconditions { /** * < p > Evaluate the given { @ code predicate } using { @ code value } as input . < / p > * < p > The function throws { @ link PreconditionViolationException } if the * predicate is false . < / p > * @ param value The value * @ param predicate The predicate * @ param describer A describer for the predicate * @ param < T > The type of values * @ return value * @ throws PreconditionViolationException If the predicate is false */ public static < T > T checkPrecondition ( final T value , final Predicate < T > predicate , final Function < T , String > describer ) { } }
final boolean ok ; try { ok = predicate . test ( value ) ; } catch ( final Throwable e ) { final Violations violations = singleViolation ( failedPredicate ( e ) ) ; throw new PreconditionViolationException ( failedMessage ( value , violations ) , e , violations . count ( ) ) ; } return innerCheck ( value , ok , describer ) ;
public class MeasureUnit { /** * Create a MeasureUnit instance ( creates a singleton instance ) . * Normally this method should not be used , since there will be no formatting data * available for it , and it may not be returned by getAvailable ( ) . * However , for special purposes ( such as CLDR tooling ) , it is available . * @ deprecated This API is ICU internal only . * @ hide original deprecated declaration * @ hide draft / provisional / internal are hidden on Android */ @ Deprecated public static MeasureUnit internalGetInstance ( String type , String subType ) { } }
if ( type == null || subType == null ) { throw new NullPointerException ( "Type and subType must be non-null" ) ; } if ( ! "currency" . equals ( type ) ) { if ( ! ASCII . containsAll ( type ) || ! ASCII_HYPHEN_DIGITS . containsAll ( subType ) ) { throw new IllegalArgumentException ( "The type or subType are invalid." ) ; } } Factory factory ; if ( "currency" . equals ( type ) ) { factory = CURRENCY_FACTORY ; } else if ( "duration" . equals ( type ) ) { factory = TIMEUNIT_FACTORY ; } else { factory = UNIT_FACTORY ; } return MeasureUnit . addUnit ( type , subType , factory ) ;
public class SeaGlassStyle { /** * Returns the color for the specified state . This should NOT call any * methods on the < code > JComponent < / code > . * < p > Overridden to cause this style to populate itself with data from * UIDefaults , if necessary . < / p > * < p > In addition , SeaGlassStyle handles ColorTypes slightly differently * from Synth . < / p > * < ul > * < li > ColorType . BACKGROUND will equate to the color stored in UIDefaults * named " background " . < / li > * < li > ColorType . TEXT _ BACKGROUND will equate to the color stored in * UIDefaults named " textBackground " . < / li > * < li > ColorType . FOREGROUND will equate to the color stored in UIDefaults * named " textForeground " . < / li > * < li > ColorType . TEXT _ FOREGROUND will equate to the color stored in * UIDefaults named " textForeground " . < / li > * < / ul > * @ param ctx context SynthContext identifying requester * @ param type Type of color being requested . * @ return Color to render with */ @ Override public Color getColorForState ( SynthContext ctx , ColorType type ) { } }
String key = null ; if ( type == ColorType . BACKGROUND ) { key = "background" ; } else if ( type == ColorType . FOREGROUND ) { // map FOREGROUND as TEXT _ FOREGROUND key = "textForeground" ; } else if ( type == ColorType . TEXT_BACKGROUND ) { key = "textBackground" ; } else if ( type == ColorType . TEXT_FOREGROUND ) { key = "textForeground" ; } else if ( type == ColorType . FOCUS ) { key = "focus" ; } else if ( type != null ) { key = type . toString ( ) ; } else { return defaultColor ; } Color c = ( Color ) get ( ctx , key ) ; // if all else fails , return a default color ( which is a // ColorUIResource ) if ( c == null ) c = defaultColor ; return c ;
public class AxesChartSeriesNumericalNoErrorBars { /** * This is an internal method which shouldn ' t be called from client code . Use * XYChart . updateXYSeries or CategoryChart . updateXYSeries instead ! * @ param newXData * @ param newYData * @ param newExtraValues */ public void replaceData ( double [ ] newXData , double [ ] newYData , double [ ] newExtraValues ) { } }
// Sanity check if ( newExtraValues != null && newExtraValues . length != newYData . length ) { throw new IllegalArgumentException ( "error bars and Y-Axis sizes are not the same!!!" ) ; } if ( newXData . length != newYData . length ) { throw new IllegalArgumentException ( "X and Y-Axis sizes are not the same!!!" ) ; } xData = newXData ; yData = newYData ; extraValues = newExtraValues ; setAllData ( ) ; calculateMinMax ( ) ;
public class WebDriverWaitUtils { /** * Waits until element is present on the DOM of a page and visible . Visibility means that the element is not only * displayed but also has a height and width that is greater than 0. * @ param elementLocator * identifier of element to be visible */ public static void waitUntilElementIsVisible ( final String elementLocator ) { } }
logger . entering ( elementLocator ) ; By by = HtmlElementUtils . resolveByType ( elementLocator ) ; ExpectedCondition < WebElement > condition = ExpectedConditions . visibilityOfElementLocated ( by ) ; waitForCondition ( condition ) ; logger . exiting ( ) ;
public class QueueRegistry { /** * Retrieves and returns a registered queue by name . * @ param queueName * The name of the queue to retrieve * @ return * The retrieved queue with name < code > queueName < / code > * @ throws IllegalArgumentException if there is queue with the name < code > queueName < / code > */ public BlockingQueue < CollectionDocument > getQueue ( String queueName ) { } }
Preconditions . checkArgument ( registry . containsKey ( queueName ) , "Queue %s does not exist" , queueName ) ; return registry . get ( queueName ) ;
public class MapPolyline { /** * Replies the geo - location of the point described by the specified distance . * The desired distance is < code > 0 < / code > for the starting point and { @ link # getLength ( ) } * for the ending point . * @ param desired _ distance is the distance for which the geo location must be computed . * @ param shifting is the shifting distance . * @ param geoLocation is the point to set with geo - localized coordinates . * @ param tangent is the vector which will be set by the coordinates of the tangent at the replied point . * If < code > null < / code > the tangent will not be computed . */ protected final void computeGeoLocationForDistance ( double desired_distance , double shifting , Point2D < ? , ? > geoLocation , Vector2D < ? , ? > tangent ) { } }
assert geoLocation != null ; double desiredDistance = desired_distance ; for ( final PointGroup group : groups ( ) ) { Point2d prevPoint = null ; for ( final Point2d thepoint : group . points ( ) ) { if ( prevPoint != null ) { // Compute the length between the current point and the previous point double vx = thepoint . getX ( ) - prevPoint . getX ( ) ; double vy = thepoint . getY ( ) - prevPoint . getY ( ) ; final double norm = Math . hypot ( vx , vy ) ; if ( desiredDistance < norm && norm != 0 ) { // The desired distance is on this part ' s segment if ( norm != 0 ) { // Compute the vector and the new point vx /= norm ; double px = vx * desiredDistance + prevPoint . getX ( ) ; vy /= norm ; double py = vy * desiredDistance + prevPoint . getY ( ) ; if ( tangent != null ) { tangent . set ( vx , vy ) ; } // Shift the point on the left or on the right depending on // the sign of the shifting value if ( shifting != 0 ) { final Vector2d perpend = new Vector2d ( vx , vy ) ; perpend . makeOrthogonal ( ) ; perpend . scale ( shifting ) ; px += perpend . getX ( ) ; py += perpend . getY ( ) ; } geoLocation . set ( px , py ) ; return ; } if ( tangent != null ) { tangent . set ( Double . NaN , Double . NaN ) ; } geoLocation . set ( thepoint ) ; return ; } // pass to the next couple of points desiredDistance -= norm ; } prevPoint = thepoint ; } } // The end of the segment was reached final int ptsCount = getPointCount ( ) ; final Point2d p0 = getPointAt ( ptsCount - 2 ) ; final Point2d p1 = getPointAt ( ptsCount - 1 ) ; if ( tangent != null ) { // The tangent is colinear to the last segment tangent . sub ( p1 , p0 ) ; tangent . normalize ( ) ; } if ( shifting != 0. ) { final Vector2d perpend = new Vector2d ( p1 . getX ( ) - p0 . getX ( ) , p1 . getY ( ) - p0 . getY ( ) ) ; perpend . normalize ( ) ; perpend . makeOrthogonal ( ) ; perpend . scale ( shifting ) ; geoLocation . set ( p1 . getX ( ) + perpend . getX ( ) , p1 . getY ( ) + perpend . getY ( ) ) ; } else { geoLocation . set ( p1 ) ; }
public class ApiOvhDedicatedserver { /** * Retrieve partition charts * REST : GET / dedicated / server / { serviceName } / statistics / partition / { partition } / chart * @ param period [ required ] chart period * @ param serviceName [ required ] The internal name of your dedicated server * @ param partition [ required ] Partition */ public OvhChartReturn serviceName_statistics_partition_partition_chart_GET ( String serviceName , String partition , OvhRtmChartPeriodEnum period ) throws IOException { } }
String qPath = "/dedicated/server/{serviceName}/statistics/partition/{partition}/chart" ; StringBuilder sb = path ( qPath , serviceName , partition ) ; query ( sb , "period" , period ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhChartReturn . class ) ;
public class SelfContainedContainer { /** * Stops the service container and cleans up all file system resources . * @ throws Exception */ public void stop ( ) throws Exception { } }
final CountDownLatch latch = new CountDownLatch ( 1 ) ; this . serviceContainer . addTerminateListener ( info -> latch . countDown ( ) ) ; this . serviceContainer . shutdown ( ) ; latch . await ( ) ; executor . submit ( new Runnable ( ) { @ Override public void run ( ) { TempFileManager . deleteRecursively ( tmpDir ) ; } } ) ; executor . shutdown ( ) ;
public class ServerDumpPackager { /** * Copy relevant data ( like service data , shared config , etc ) to the dump dir to be zipped up . * @ param dumpDir * @ param installDir */ private void captureEnvData ( File dumpDir , File installDir ) { } }
File versionsSource = new File ( installDir , "lib/versions" ) ; File fixesSource = new File ( installDir , "lib/fixes" ) ; String sharedConfigDir = AccessController . doPrivileged ( new PrivilegedAction < String > ( ) { @ Override public String run ( ) { return System . getProperty ( "shared.config.dir" ) ; } } ) ; File sharedConfigSource = sharedConfigDir != null ? new File ( sharedConfigDir ) : new File ( installDir , "usr/shared/config" ) ; File versionsTarget = new File ( dumpDir , "service/versions" ) ; File fixesTarget = new File ( dumpDir , "service/fixes" ) ; File sharedConfigTarget = new File ( dumpDir , "usr/shared/config" ) ; if ( versionsTarget . mkdirs ( ) ) { try { FileUtils . copyDir ( versionsSource , versionsTarget ) ; } catch ( IOException e ) { Debug . printStackTrace ( e ) ; } } if ( fixesTarget . mkdirs ( ) ) { try { FileUtils . copyDir ( fixesSource , fixesTarget ) ; } catch ( IOException e ) { Debug . printStackTrace ( e ) ; } } if ( sharedConfigTarget . mkdirs ( ) ) { try { FileUtils . copyDir ( sharedConfigSource , sharedConfigTarget ) ; } catch ( IOException e ) { Debug . printStackTrace ( e ) ; } }
public class GoogleCloudStorageFileSystem { /** * Convert { @ code CreateFileOptions } to { @ code CreateObjectOptions } . */ public static CreateObjectOptions objectOptionsFromFileOptions ( CreateFileOptions options ) { } }
return new CreateObjectOptions ( options . overwriteExisting ( ) , options . getContentType ( ) , options . getAttributes ( ) ) ;
public class DOMAttribute { /** * - - - - - interface DOMImportable - - - - - */ @ Override public Node doImport ( Page newPage ) throws DOMException { } }
Attr newAttr = newPage . createAttribute ( name ) ; newAttr . setValue ( value ) ; return newAttr ;
public class Vector3f { /** * / * ( non - Javadoc ) * @ see org . joml . Vector3fc # div ( org . joml . Vector3fc , org . joml . Vector3f ) */ public Vector3f div ( Vector3fc v , Vector3f dest ) { } }
dest . x = x / v . x ( ) ; dest . y = y / v . y ( ) ; dest . z = z / v . z ( ) ; return dest ;
public class Assembly { /** * Submits the configured assembly to Transloadit for processing . * @ param isResumable boolean value that tells the assembly whether or not to use tus . * @ return { @ link AssemblyResponse } the response received from the Transloadit server . * @ throws RequestException if request to Transloadit server fails . * @ throws LocalOperationException if something goes wrong while running non - http operations . */ public AssemblyResponse save ( boolean isResumable ) throws RequestException , LocalOperationException { } }
Request request = new Request ( getClient ( ) ) ; options . put ( "steps" , steps . toMap ( ) ) ; // only do tus uploads if files will be uploaded if ( isResumable && getFilesCount ( ) > 0 ) { Map < String , String > tusOptions = new HashMap < String , String > ( ) ; tusOptions . put ( "tus_num_expected_upload_files" , Integer . toString ( getFilesCount ( ) ) ) ; AssemblyResponse response = new AssemblyResponse ( request . post ( "/assemblies" , options , tusOptions , null , null ) , true ) ; // check if the assembly returned an error if ( response . hasError ( ) ) { throw new RequestException ( "Request to Assembly failed: " + response . json ( ) . getString ( "error" ) ) ; } try { handleTusUpload ( response ) ; } catch ( IOException e ) { throw new LocalOperationException ( e ) ; } catch ( ProtocolException e ) { throw new RequestException ( e ) ; } return response ; } else { return new AssemblyResponse ( request . post ( "/assemblies" , options , null , files , fileStreams ) ) ; }
public class SymmOptimizer { /** * Move all the block residues of one repeat one position to the left or * right and move the corresponding boundary residues from the freePool to * the block , and viceversa . * The boundaries are determined by any irregularity ( either a gap or a * discontinuity in the alignment . */ private boolean shiftRow ( ) { } }
int su = rnd . nextInt ( order ) ; // Select the repeat int rl = rnd . nextInt ( 2 ) ; // Select between moving right ( 0 ) or left ( 1) int res = rnd . nextInt ( length ) ; // Residue as a pivot // When the pivot residue is null try to add a residue from the freePool if ( block . get ( su ) . get ( res ) == null ) { int right = res ; int left = res ; // Find the boundary to the right abd left while ( block . get ( su ) . get ( right ) == null && right < length - 1 ) { right ++ ; } while ( block . get ( su ) . get ( left ) == null && left > 0 ) { left -- ; } // If they both are null the whole block is null if ( block . get ( su ) . get ( left ) == null && block . get ( su ) . get ( right ) == null ) { return false ; } else if ( block . get ( su ) . get ( left ) == null ) { // Choose the sequentially previous residue of the known one Integer residue = block . get ( su ) . get ( right ) - 1 ; if ( freePool . contains ( residue ) ) { block . get ( su ) . set ( res , residue ) ; freePool . remove ( residue ) ; } else return false ; } else if ( block . get ( su ) . get ( right ) == null ) { // Choose the sequentially next residue of the known one Integer residue = block . get ( su ) . get ( left ) + 1 ; if ( freePool . contains ( residue ) ) { block . get ( su ) . set ( res , residue ) ; freePool . remove ( residue ) ; } else return false ; } else { // If boundaries are consecutive swap null and position ( R or L ) if ( block . get ( su ) . get ( right ) == block . get ( su ) . get ( left ) + 1 ) { switch ( rl ) { case 0 : // to the right block . get ( su ) . set ( right - 1 , block . get ( su ) . get ( right ) ) ; block . get ( su ) . set ( right , null ) ; break ; case 1 : // to the left block . get ( su ) . set ( left + 1 , block . get ( su ) . get ( left ) ) ; block . get ( su ) . set ( left , null ) ; break ; } } else { // Choose randomly a residue in between left and right to // add Integer residue = rnd . nextInt ( block . get ( su ) . get ( right ) - block . get ( su ) . get ( left ) - 1 ) + block . get ( su ) . get ( left ) + 1 ; if ( freePool . contains ( residue ) ) { block . get ( su ) . set ( res , residue ) ; freePool . remove ( residue ) ; } } } return true ; } // When the residue is different than null switch ( rl ) { case 0 : // Move to the right int leftBoundary = res - 1 ; int leftPrevRes = res ; while ( true ) { if ( leftBoundary < 0 ) break ; else { if ( block . get ( su ) . get ( leftBoundary ) == null ) { break ; // gap } else if ( block . get ( su ) . get ( leftPrevRes ) > block . get ( su ) . get ( leftBoundary ) + 1 ) { break ; // discontinuity } } leftPrevRes = leftBoundary ; leftBoundary -- ; } leftBoundary ++ ; int rightBoundary = res + 1 ; int rightPrevRes = res ; while ( true ) { if ( rightBoundary == length ) break ; else { if ( block . get ( su ) . get ( rightBoundary ) == null ) { break ; // gap } else if ( block . get ( su ) . get ( rightPrevRes ) + 1 < block . get ( su ) . get ( rightBoundary ) ) { break ; // discontinuity } } rightPrevRes = rightBoundary ; rightBoundary ++ ; } rightBoundary -- ; // Residues at the boundary Integer residueR0 = block . get ( su ) . get ( rightBoundary ) ; Integer residueL0 = block . get ( su ) . get ( leftBoundary ) ; // Remove residue at the right of the block and add to the freePool block . get ( su ) . remove ( rightBoundary ) ; if ( residueR0 != null ) { freePool . add ( residueR0 ) ; Collections . sort ( freePool ) ; } // Add the residue at the left of the block residueL0 -= 1 ; // cannot be null , throw exception if it is if ( freePool . contains ( residueL0 ) ) { block . get ( su ) . add ( leftBoundary , residueL0 ) ; freePool . remove ( residueL0 ) ; } else { block . get ( su ) . add ( leftBoundary , null ) ; } break ; case 1 : // Move to the left int leftBoundary1 = res - 1 ; int leftPrevRes1 = res ; while ( true ) { if ( leftBoundary1 < 0 ) break ; else { if ( block . get ( su ) . get ( leftBoundary1 ) == null ) { break ; // gap } else if ( block . get ( su ) . get ( leftPrevRes1 ) > block . get ( su ) . get ( leftBoundary1 ) + 1 ) { break ; // discontinuity } } leftPrevRes1 = leftBoundary1 ; leftBoundary1 -- ; } leftBoundary1 ++ ; int rightBoundary1 = res + 1 ; int rightPrevRes1 = res ; while ( true ) { if ( rightBoundary1 == length ) break ; else { if ( block . get ( su ) . get ( rightBoundary1 ) == null ) { break ; // gap } else if ( block . get ( su ) . get ( rightPrevRes1 ) + 1 < block . get ( su ) . get ( rightBoundary1 ) ) { break ; // discontinuity } } rightPrevRes1 = rightBoundary1 ; rightBoundary1 ++ ; } rightBoundary1 -- ; // Residues at the boundary Integer residueR1 = block . get ( su ) . get ( rightBoundary1 ) ; Integer residueL1 = block . get ( su ) . get ( leftBoundary1 ) ; // Add the residue at the right of the block residueR1 += 1 ; // cannot be null if ( freePool . contains ( residueR1 ) ) { if ( rightBoundary1 == length - 1 ) block . get ( su ) . add ( residueR1 ) ; else block . get ( su ) . add ( rightBoundary1 + 1 , residueR1 ) ; freePool . remove ( residueR1 ) ; } else { block . get ( su ) . add ( rightBoundary1 + 1 , null ) ; } // Remove the residue at the left of the block block . get ( su ) . remove ( leftBoundary1 ) ; freePool . add ( residueL1 ) ; Collections . sort ( freePool ) ; break ; } checkGaps ( ) ; return true ;
public class Utils { /** * Same as { @ link java . lang . System # getProperty ( java . lang . String ) } . * @ param name the name of the property * @ return the property value */ public static String getSystemProperty ( String name ) { } }
return StringUtils . isBlank ( name ) ? "" : System . getProperty ( name ) ;
public class CmsSiteManagerImpl { /** * Returns the site for the given resources root path , * or < code > null < / code > if the resources root path does not match any site . < p > * @ param rootPath the root path of a resource * @ return the site for the given resources root path , * or < code > null < / code > if the resources root path does not match any site * @ see # getSiteForSiteRoot ( String ) * @ see # getSiteRoot ( String ) */ public CmsSite getSiteForRootPath ( String rootPath ) { } }
if ( ( rootPath . length ( ) > 0 ) && ! rootPath . endsWith ( "/" ) ) { rootPath = rootPath + "/" ; } // most sites will be below the " / sites / " folder , CmsSite result = lookupSitesFolder ( rootPath ) ; if ( result != null ) { return result ; } // look through all folders that are not below " / sites / " String siteRoot = lookupAdditionalSite ( rootPath ) ; return ( siteRoot != null ) ? getSiteForSiteRoot ( siteRoot ) : null ;
public class PropertyAccessorHelper { /** * Gets object from field . * @ param from * the from * @ param field * the field * @ return the object * @ throws PropertyAccessException * the property access exception */ public static Object getObject ( Object from , Field field ) { } }
if ( ! field . isAccessible ( ) ) { field . setAccessible ( true ) ; } try { return field . get ( from ) ; } catch ( IllegalArgumentException iarg ) { throw new PropertyAccessException ( iarg ) ; } catch ( IllegalAccessException iacc ) { throw new PropertyAccessException ( iacc ) ; }
public class LogbackReconfigure { /** * 重新配置当前logback * @ param config logback的xml配置文件 * @ param context 要配置的context */ public static void reconfigure ( InputStream config , LoggerContext context ) { } }
if ( context == null ) { throw new NullPointerException ( "LoggerContext must not be null" ) ; } LoggerContext lc = context ; JoranConfigurator jc = new JoranConfigurator ( ) ; jc . setContext ( lc ) ; StatusUtil statusUtil = new StatusUtil ( lc ) ; List < SaxEvent > eventList = jc . recallSafeConfiguration ( ) ; URL mainURL = ConfigurationWatchListUtil . getMainWatchURL ( lc ) ; lc . reset ( ) ; long threshold = System . currentTimeMillis ( ) ; try { jc . doConfigure ( config ) ; if ( statusUtil . hasXMLParsingErrors ( threshold ) ) { fallbackConfiguration ( lc , eventList , mainURL ) ; } } catch ( JoranException e ) { fallbackConfiguration ( lc , eventList , mainURL ) ; }
public class SnakGroupImpl { /** * Construct a list of { @ link SnakGroup } objects from a map from property * ids to snak lists as found in JSON . * @ param snaks * the map with the data * @ return the result list */ public static List < SnakGroup > makeSnakGroups ( Map < String , List < Snak > > snaks , List < String > propertyOrder ) { } }
List < SnakGroup > result = new ArrayList < > ( snaks . size ( ) ) ; for ( String propertyName : propertyOrder ) { result . add ( new SnakGroupImpl ( snaks . get ( propertyName ) ) ) ; } return result ;
public class EncryptKit { /** * HmacSHA1加密 * @ param data 明文字符串 * @ param key 秘钥 * @ return 16进制密文 */ public static String hmacSHA1 ( String data , String key ) { } }
return hmacSHA1 ( data . getBytes ( ) , key . getBytes ( ) ) ;
public class VirtualMediaPanel { /** * We overload this to translate mouse events into the proper coordinates before they are * dispatched to any of the mouse listeners . */ @ Override protected void processMouseMotionEvent ( MouseEvent event ) { } }
event . translatePoint ( _vbounds . x , _vbounds . y ) ; super . processMouseMotionEvent ( event ) ;
public class DataStream { /** * Applies the given { @ link ProcessFunction } on the input stream , thereby * creating a transformed output stream . * < p > The function will be called for every element in the input streams and can produce zero * or more output elements . * @ param processFunction The { @ link ProcessFunction } that is called for each element * in the stream . * @ param outputType { @ link TypeInformation } for the result type of the function . * @ param < R > The type of elements emitted by the { @ code ProcessFunction } . * @ return The transformed { @ link DataStream } . */ @ Internal public < R > SingleOutputStreamOperator < R > process ( ProcessFunction < T , R > processFunction , TypeInformation < R > outputType ) { } }
ProcessOperator < T , R > operator = new ProcessOperator < > ( clean ( processFunction ) ) ; return transform ( "Process" , outputType , operator ) ;
public class OntopMappingSQLAllConfigurationImpl { /** * Please overload isMappingDefined ( ) instead . */ @ Override boolean isInputMappingDefined ( ) { } }
return super . isInputMappingDefined ( ) || options . mappingFile . isPresent ( ) || options . mappingGraph . isPresent ( ) || options . mappingReader . isPresent ( ) ;
public class ImageExtensions { /** * Convenience method to write the given { @ link BufferedImage } object to the given { @ link File } * object . * @ param bufferedImage * the { @ link BufferedImage } object to be written . * @ param formatName * the format name * @ param outputfile * the output file * @ return the file * @ throws IOException * Signals that an I / O exception has occurred . */ public static File write ( final BufferedImage bufferedImage , final String formatName , final File outputfile ) throws IOException { } }
ImageIO . write ( bufferedImage , formatName , outputfile ) ; return outputfile ;
public class XbaseFormatter2 { /** * checks whether the given lambda should be formatted as a block . * That includes newlines after and before the brackets , and a fresh line for each expression . */ protected boolean isMultilineLambda ( final XClosure closure ) { } }
final ILeafNode closingBracket = this . _nodeModelAccess . nodeForKeyword ( closure , "]" ) ; HiddenLeafs _hiddenLeafsBefore = null ; if ( closingBracket != null ) { _hiddenLeafsBefore = this . _hiddenLeafAccess . getHiddenLeafsBefore ( closingBracket ) ; } boolean _tripleNotEquals = ( _hiddenLeafsBefore != null ) ; if ( _tripleNotEquals ) { int _newLines = this . _hiddenLeafAccess . getHiddenLeafsBefore ( closingBracket ) . getNewLines ( ) ; return ( _newLines > 0 ) ; } boolean _switchResult = false ; XExpression _expression = closure . getExpression ( ) ; final XExpression block = _expression ; boolean _matched = false ; if ( block instanceof XBlockExpression ) { _matched = true ; _switchResult = ( ( ( ( XBlockExpression ) block ) . getExpressions ( ) . size ( ) > 1 ) && this . isEachExpressionInOwnLine ( ( ( XBlockExpression ) block ) . getExpressions ( ) ) ) ; } if ( ! _matched ) { _switchResult = false ; } return _switchResult ;
public class FNMRGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . FNMRG__CHAR_BOX_WD : return getCharBoxWd ( ) ; case AfplibPackage . FNMRG__CHAR_BOX_HT : return getCharBoxHt ( ) ; case AfplibPackage . FNMRG__PAT_DOSET : return getPatDOset ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;
public class SignatureBinder { /** * Example of not allowed literal variable usages across typeSignatures : * < p > < ul > * < li > x used in different base types : char ( x ) and varchar ( x ) * < li > x used in different positions of the same base type : decimal ( x , y ) and decimal ( z , x ) * < li > p used in combination with different literals , types , or literal variables : decimal ( p , s1 ) and decimal ( p , s2) * < / ul > */ private static void checkNoLiteralVariableUsageAcrossTypes ( Signature declaredSignature ) { } }
Map < String , TypeSignature > existingUsages = new HashMap < > ( ) ; for ( TypeSignature parameter : declaredSignature . getArgumentTypes ( ) ) { checkNoLiteralVariableUsageAcrossTypes ( parameter , existingUsages ) ; }
public class JSON { /** * Write a reconfiguration plan . * @ param plan the reconfiguration plan to write * @ param f the output file . If it ends with ' . gz ' it will be gzipped * @ throws IllegalArgumentException if an error occurred while writing the json */ public static void write ( ReconfigurationPlan plan , File f ) { } }
try ( OutputStreamWriter out = makeOut ( f ) ) { write ( plan , out ) ; } catch ( IOException e ) { throw new IllegalArgumentException ( e ) ; }
public class PositionDecoder { /** * This function is used to induce some tolerance if messages * were received shortly after ( due to Internet jitter in timestmaps ) . * This function is derived empirically * @ param timeDifference the time between the reception of both messages * @ param distance covered * @ param surface is the airplane e . g . taxiing ? * @ return whether distance is realistic */ public static boolean withinThreshold ( double timeDifference , double distance , boolean surface ) { } }
double x = abs ( timeDifference ) ; double d = abs ( distance ) ; // if ( d / x > = ( surface ? 51.44:514.4 ) * 2.5) // System . err . format ( " % . 2f / % . 2f = % . 2f \ n " , d , x , d / x ) ; // may be due to Internet jitter ; distance is realistic if ( x < 0.7 && d < 2000 ) return true ; else return d / x < ( surface ? 51.44 : 514.4 ) * 2.5 ; // 1000 knots for airborne , 100 for surface
public class HiveAvroORCQueryGenerator { /** * Generate DML mapping query to populate output schema table by selecting from input schema table * This method assumes that each output schema field has a corresponding source input table ' s field reference * . . in form of ' flatten _ source ' property * @ param inputAvroSchema Input schema that was used to obtain output schema ( next argument ) * @ param outputOrcSchema Output schema ( flattened or nested ) that was generated using input schema * . . and has lineage information compatible with input schema * @ param inputTblName Input table name * @ param outputTblName Output table name * @ param optionalInputDbName Optional input DB name , if not specified it will default to ' default ' * @ param optionalOutputDbName Optional output DB name , if not specified it will default to ' default ' * @ param optionalPartitionDMLInfo Optional partition info in form of map of partition key , partition value pairs * @ param optionalOverwriteTable Optional overwrite table , if not specified it is set to true * @ param optionalCreateIfNotExists Optional create if not exists , if not specified it is set to false * @ param isEvolutionEnabled If schema evolution is turned on * @ param destinationTableMeta Optional destination table metadata * @ param rowLimit Optional row limit * @ return DML query */ public static String generateTableMappingDML ( Schema inputAvroSchema , Schema outputOrcSchema , String inputTblName , String outputTblName , Optional < String > optionalInputDbName , Optional < String > optionalOutputDbName , Optional < Map < String , String > > optionalPartitionDMLInfo , Optional < Boolean > optionalOverwriteTable , Optional < Boolean > optionalCreateIfNotExists , boolean isEvolutionEnabled , Optional < Table > destinationTableMeta , Optional < Integer > rowLimit ) { } }
Preconditions . checkNotNull ( inputAvroSchema ) ; Preconditions . checkNotNull ( outputOrcSchema ) ; Preconditions . checkArgument ( StringUtils . isNotBlank ( inputTblName ) ) ; Preconditions . checkArgument ( StringUtils . isNotBlank ( outputTblName ) ) ; String inputDbName = optionalInputDbName . isPresent ( ) ? optionalInputDbName . get ( ) : DEFAULT_DB_NAME ; String outputDbName = optionalOutputDbName . isPresent ( ) ? optionalOutputDbName . get ( ) : DEFAULT_DB_NAME ; boolean shouldOverwriteTable = optionalOverwriteTable . isPresent ( ) ? optionalOverwriteTable . get ( ) : true ; boolean shouldCreateIfNotExists = optionalCreateIfNotExists . isPresent ( ) ? optionalCreateIfNotExists . get ( ) : false ; log . debug ( "Input Schema: " + inputAvroSchema . toString ( ) ) ; log . debug ( "Output Schema: " + outputOrcSchema . toString ( ) ) ; // Start building Hive DML // Refer to Hive DDL manual for explanation of clauses : // https : / / cwiki . apache . org / confluence / display / Hive / LanguageManual + DML # LanguageManualDML - InsertingdataintoHiveTablesfromqueries StringBuilder dmlQuery = new StringBuilder ( ) ; // Insert query if ( shouldOverwriteTable ) { dmlQuery . append ( String . format ( "INSERT OVERWRITE TABLE `%s`.`%s` %n" , outputDbName , outputTblName ) ) ; } else { dmlQuery . append ( String . format ( "INSERT INTO TABLE `%s`.`%s` %n" , outputDbName , outputTblName ) ) ; } // Partition details if ( optionalPartitionDMLInfo . isPresent ( ) ) { if ( optionalPartitionDMLInfo . get ( ) . size ( ) > 0 ) { dmlQuery . append ( "PARTITION (" ) ; boolean isFirstPartitionSpec = true ; for ( Map . Entry < String , String > partition : optionalPartitionDMLInfo . get ( ) . entrySet ( ) ) { if ( isFirstPartitionSpec ) { isFirstPartitionSpec = false ; } else { dmlQuery . append ( ", " ) ; } dmlQuery . append ( String . format ( "`%s`='%s'" , partition . getKey ( ) , partition . getValue ( ) ) ) ; } dmlQuery . append ( ") \n" ) ; } } // If not exists if ( shouldCreateIfNotExists ) { dmlQuery . append ( " IF NOT EXISTS \n" ) ; } // Select query dmlQuery . append ( "SELECT \n" ) ; // 1 . If evolution is enabled , and destination table does not exists // . . use columns from new schema // ( evolution does not matter if its new destination table ) // 2 . If evolution is enabled , and destination table does exists // . . use columns from new schema // ( alter table will be used before moving data from staging to final table ) // 3 . If evolution is disabled , and destination table does not exists // . . use columns from new schema // ( evolution does not matter if its new destination table ) // 4 . If evolution is disabled , and destination table does exists // . . use columns from destination schema if ( isEvolutionEnabled || ! destinationTableMeta . isPresent ( ) ) { log . info ( "Generating DML using source schema" ) ; boolean isFirst = true ; List < Schema . Field > fieldList = outputOrcSchema . getFields ( ) ; for ( Schema . Field field : fieldList ) { String flattenSource = field . getProp ( "flatten_source" ) ; String colName ; if ( StringUtils . isNotBlank ( flattenSource ) ) { colName = flattenSource ; } else { colName = field . name ( ) ; } // Escape the column name colName = colName . replaceAll ( "\\." , "`.`" ) ; if ( isFirst ) { isFirst = false ; } else { dmlQuery . append ( ", \n" ) ; } dmlQuery . append ( String . format ( " `%s`" , colName ) ) ; } } else { log . info ( "Generating DML using destination schema" ) ; boolean isFirst = true ; List < FieldSchema > fieldList = destinationTableMeta . get ( ) . getSd ( ) . getCols ( ) ; for ( FieldSchema field : fieldList ) { String colName = StringUtils . EMPTY ; if ( field . isSetComment ( ) && field . getComment ( ) . startsWith ( "from flatten_source " ) ) { // Retrieve from column ( flatten _ source ) from comment colName = field . getComment ( ) . replaceAll ( "from flatten_source " , "" ) . trim ( ) ; } else { // Or else find field in flattened schema List < Schema . Field > evolvedFieldList = outputOrcSchema . getFields ( ) ; for ( Schema . Field evolvedField : evolvedFieldList ) { if ( evolvedField . name ( ) . equalsIgnoreCase ( field . getName ( ) ) ) { String flattenSource = evolvedField . getProp ( "flatten_source" ) ; if ( StringUtils . isNotBlank ( flattenSource ) ) { colName = flattenSource ; } else { colName = evolvedField . name ( ) ; } break ; } } } // Escape the column name colName = colName . replaceAll ( "\\." , "`.`" ) ; // colName can be blank if it is deleted in new evolved schema , so we shouldn ' t try to fetch it if ( StringUtils . isNotBlank ( colName ) ) { if ( isFirst ) { isFirst = false ; } else { dmlQuery . append ( ", \n" ) ; } dmlQuery . append ( String . format ( " `%s`" , colName ) ) ; } } } dmlQuery . append ( String . format ( " %n FROM `%s`.`%s` " , inputDbName , inputTblName ) ) ; // Partition details if ( optionalPartitionDMLInfo . isPresent ( ) ) { if ( optionalPartitionDMLInfo . get ( ) . size ( ) > 0 ) { dmlQuery . append ( "WHERE " ) ; boolean isFirstPartitionSpec = true ; for ( Map . Entry < String , String > partition : optionalPartitionDMLInfo . get ( ) . entrySet ( ) ) { if ( isFirstPartitionSpec ) { isFirstPartitionSpec = false ; } else { dmlQuery . append ( " AND " ) ; } dmlQuery . append ( String . format ( "`%s`='%s'" , partition . getKey ( ) , partition . getValue ( ) ) ) ; } dmlQuery . append ( " \n" ) ; } } // Limit clause if ( rowLimit . isPresent ( ) ) { dmlQuery . append ( String . format ( "LIMIT %s" , rowLimit . get ( ) ) ) ; } return dmlQuery . toString ( ) ;
public class Pageable { /** * Specifies a specific page to jump to . * @ param page the page to jump to */ private void setCurrentPage ( int page ) { } }
if ( page >= totalPages ) { this . currentPage = totalPages ; } else if ( page <= 1 ) { this . currentPage = 1 ; } else { this . currentPage = page ; } // now work out where the sub - list should start and end startingIndex = pageSize * ( currentPage - 1 ) ; if ( startingIndex < 0 ) { startingIndex = 0 ; } endingIndex = startingIndex + pageSize ; if ( endingIndex > list . size ( ) ) { endingIndex = list . size ( ) ; }
public class Query { /** * Executes this { @ link Query } generating a new { @ link View } from the result set . * @ return a new { @ link View } based on the result set of this { @ link Query } . * @ throws IllegalStateException if the { @ link View } is { @ literal null } . * @ throws IllegalArgumentException if the { @ link View } does not contain all the { @ link Column Columns } * in the selected projection defined by this { @ link Query } . * @ see org . cp . elements . data . struct . tabular . View * @ see # run ( View , Predicate ) * @ see # results ( ) * @ see # run ( ) */ public synchronized View execute ( ) { } }
this . resultSet . set ( null ) ; View from = getFrom ( ) ; List < Column > projection = resolveProjection ( from ) ; Predicate < Row > predicate = resolvePredicate ( ) ; List < Row > resultSet = sort ( run ( from , predicate ) ) ; View view = AbstractView . of ( projection , resultSet ) ; this . resultSet . set ( view ) ; return view ;
public class FileInfo { /** * Indicates whether { @ code itemInfo } is a directory ; static version of { @ link # isDirectory ( ) } * to avoid having to create a FileInfo object just to use this logic . */ static boolean isDirectory ( GoogleCloudStorageItemInfo itemInfo ) { } }
return isGlobalRoot ( itemInfo ) || itemInfo . isBucket ( ) || objectHasDirectoryPath ( itemInfo . getObjectName ( ) ) ;
public class AbstractExtraLanguageValidator { /** * Collect the check methods . * @ param clazz the type to explore . * @ param visitedClasses the visited classes . * @ param result the collected methods . */ protected void collectMethods ( Class < ? extends AbstractExtraLanguageValidator > clazz , Collection < Class < ? > > visitedClasses , Collection < MethodWrapper > result ) { } }
if ( ! visitedClasses . add ( clazz ) ) { return ; } for ( final Method method : clazz . getDeclaredMethods ( ) ) { if ( method . getAnnotation ( Check . class ) != null && method . getParameterTypes ( ) . length == 1 ) { result . add ( createMethodWrapper ( method ) ) ; } } final Class < ? extends AbstractExtraLanguageValidator > superClass = getSuperClass ( clazz ) ; if ( superClass != null ) { collectMethods ( superClass , visitedClasses , result ) ; }
public class AbstractPutObjectRequest { /** * Sets the optional customer - provided server - side encryption key to use to * encrypt the uploaded object , and returns the updated request object so * that additional method calls can be chained together . * @ param sseKey * The optional customer - provided server - side encryption key to * use to encrypt the uploaded object . * @ return This updated request object so that additional method calls can * be chained together . */ public < T extends AbstractPutObjectRequest > T withSSECustomerKey ( SSECustomerKey sseKey ) { } }
setSSECustomerKey ( sseKey ) ; @ SuppressWarnings ( "unchecked" ) T t = ( T ) this ; return t ;
public class MessageDigest { /** * Returns the length of the digest in bytes , or 0 if this operation is * not supported by the provider and the implementation is not cloneable . * @ return the digest length in bytes , or 0 if this operation is not * supported by the provider and the implementation is not cloneable . * @ since 1.2 */ public final int getDigestLength ( ) { } }
int digestLen = engineGetDigestLength ( ) ; if ( digestLen == 0 ) { try { MessageDigest md = ( MessageDigest ) clone ( ) ; byte [ ] digest = md . digest ( ) ; return digest . length ; } catch ( CloneNotSupportedException e ) { return digestLen ; } } return digestLen ;
public class CmsTabDialog { /** * Builds the html for the tab row of the tab dialog . < p > * @ return the html for the tab row */ public String dialogTabRow ( ) { } }
StringBuffer result = new StringBuffer ( 512 ) ; StringBuffer lineRow = new StringBuffer ( 256 ) ; List < String > tabNames = getTabs ( ) ; if ( tabNames . size ( ) < 2 ) { // less than 2 tabs present , do not show them and create a border line result . append ( "<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" class=\"maxwidth\" style=\"empty-cells: show;\">\n" ) ; result . append ( "<tr>\n" ) ; result . append ( "\t<td class=\"dialogtabrow\"></td>\n" ) ; result . append ( "</tr>\n" ) ; result . append ( "</table>\n" ) ; return result . toString ( ) ; } Iterator < String > i = tabNames . iterator ( ) ; int counter = 1 ; int activeTab = getActiveTab ( ) ; result . append ( "<table border=\"0\" cellpadding=\"0\" cellspacing=\"0\" class=\"maxwidth\" style=\"empty-cells: show;\">\n" ) ; result . append ( "<tr>\n" ) ; while ( i . hasNext ( ) ) { // build a tab entry String curTab = i . next ( ) ; String curTabLink = "javascript:openTab('" + counter + "');" ; if ( counter == activeTab ) { // create the currently active tab int addDelta = 0 ; result . append ( "\t<td class=\"dialogtabactive\"" ) ; if ( counter == 1 ) { // for first tab , add special html for correct layout result . append ( " style=\"border-left-width: 1px;\"" ) ; addDelta = 1 ; } result . append ( ">" ) ; result . append ( "<span class=\"tabactive\" unselectable=\"on\"" ) ; result . append ( " style=\"width: " + ( ( curTab . length ( ) * 8 ) + addDelta ) + "px;\"" ) ; result . append ( ">" ) ; result . append ( curTab ) ; result . append ( "</span></td>\n" ) ; lineRow . append ( "\t<td></td>\n" ) ; } else { // create an inactive tab result . append ( "\t<td class=\"dialogtab\" unselectable=\"on\">" ) ; result . append ( "<a class=\"tab\" href=\"" + curTabLink + "\"" ) ; result . append ( " style=\"width: " + ( curTab . length ( ) * 8 ) + "px;\"" ) ; result . append ( ">" ) ; result . append ( curTab ) ; result . append ( "</a></td>\n" ) ; lineRow . append ( "\t<td class=\"dialogtabrow\"></td>\n" ) ; } counter ++ ; } result . append ( "\t<td class=\"maxwidth\"></td>\n" ) ; result . append ( "</tr>\n" ) ; result . append ( "<tr>\n" ) ; result . append ( lineRow ) ; result . append ( "\t<td class=\"dialogtabrow\"></td>\n" ) ; result . append ( "</tr>\n" ) ; result . append ( "</table>\n" ) ; return result . toString ( ) ;
public class GhprbPullRequest { /** * Get the PullRequest object for this PR * @ param force If true , forces retrieval of the PR info from the github API . Use sparingly . * @ return a copy of the pull request * @ throws IOException if unable to connect to GitHub */ public GHPullRequest getPullRequest ( boolean force ) throws IOException { } }
if ( this . pr == null || force ) { setPullRequest ( repo . getActualPullRequest ( this . id ) ) ; } return pr ;
public class ChangesetInfo { /** * A shortcut to getTags ( ) . get ( " source " ) . split ( " ; " ) * @ return the source of the data entered . Common values include " bing " , " survey " , " local * knowledge " , " common knowledge " , " extrapolation " , " photograph " ( and more ) . null if * none supplied . */ public String [ ] getSources ( ) { } }
String source = tags != null ? tags . get ( "source" ) : null ; return source != null ? source . split ( "(\\s)?;(\\s)?" ) : null ;
public class AmazonApiGatewayV2Client { /** * Gets an Authorizer . * @ param getAuthorizerRequest * @ return Result of the GetAuthorizer operation returned by the service . * @ throws NotFoundException * The resource specified in the request was not found . * @ throws TooManyRequestsException * The client is sending more than the allowed number of requests per unit of time . * @ sample AmazonApiGatewayV2 . GetAuthorizer */ @ Override public GetAuthorizerResult getAuthorizer ( GetAuthorizerRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetAuthorizer ( request ) ;
public class PushMetricRegistryInstance { /** * Set the history module that the push processor is to use . */ public synchronized void setHistory ( CollectHistory history ) { } }
history_ = Optional . of ( history ) ; history_ . ifPresent ( getApi ( ) :: setHistory ) ; data_ . initWithHistoricalData ( history , getDecoratorLookBack ( ) ) ;
public class SeaGlassTextFieldUI { /** * DOCUMENT ME ! * @ param c DOCUMENT ME ! * @ param region DOCUMENT ME ! * @ return DOCUMENT ME ! */ private int getComponentState ( JComponent c , Region region ) { } }
if ( region == SeaGlassRegion . SEARCH_FIELD_CANCEL_BUTTON && c . isEnabled ( ) ) { if ( ( ( JTextComponent ) c ) . getText ( ) . length ( ) == 0 ) { return DISABLED ; } else if ( isCancelArmed ) { return PRESSED ; } return ENABLED ; } return SeaGlassLookAndFeel . getComponentState ( c ) ;
public class ComplexMetricSerde { /** * Converts intermediate representation of aggregate to byte [ ] . * @ param val intermediate representation of aggregate * @ return serialized intermediate representation of aggregate in byte [ ] */ public byte [ ] toBytes ( @ Nullable Object val ) { } }
if ( val != null ) { byte [ ] bytes = getObjectStrategy ( ) . toBytes ( val ) ; return bytes != null ? bytes : ByteArrays . EMPTY_ARRAY ; } else { return ByteArrays . EMPTY_ARRAY ; }
public class IndirectBigQueryOutputCommitter { /** * Runs an import job on BigQuery for the data in the output path in addition to calling the * delegate ' s commitJob . */ @ Override public void commitJob ( JobContext context ) throws IOException { } }
super . commitJob ( context ) ; // Get the destination configuration information . Configuration conf = context . getConfiguration ( ) ; TableReference destTable = BigQueryOutputConfiguration . getTableReference ( conf ) ; String destProjectId = BigQueryOutputConfiguration . getProjectId ( conf ) ; String writeDisposition = BigQueryOutputConfiguration . getWriteDisposition ( conf ) ; Optional < BigQueryTableSchema > destSchema = BigQueryOutputConfiguration . getTableSchema ( conf ) ; String kmsKeyName = BigQueryOutputConfiguration . getKmsKeyName ( conf ) ; BigQueryFileFormat outputFileFormat = BigQueryOutputConfiguration . getFileFormat ( conf ) ; List < String > sourceUris = getOutputFileURIs ( ) ; try { getBigQueryHelper ( ) . importFromGcs ( destProjectId , destTable , destSchema . isPresent ( ) ? destSchema . get ( ) . get ( ) : null , kmsKeyName , outputFileFormat , writeDisposition , sourceUris , true ) ; } catch ( InterruptedException e ) { throw new IOException ( "Failed to import GCS into BigQuery" , e ) ; } cleanup ( context ) ;
public class TextUtils { /** * Returns a CharSequence concatenating the specified CharSequences , * retaining their spans if any . */ public static CharSequence concat ( CharSequence ... text ) { } }
if ( text . length == 0 ) { return "" ; } if ( text . length == 1 ) { return text [ 0 ] ; } boolean spanned = false ; for ( int i = 0 ; i < text . length ; i ++ ) { if ( text [ i ] instanceof Spanned ) { spanned = true ; break ; } } StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < text . length ; i ++ ) { sb . append ( text [ i ] ) ; } if ( ! spanned ) { return sb . toString ( ) ; } SpannableString ss = new SpannableString ( sb ) ; int off = 0 ; for ( int i = 0 ; i < text . length ; i ++ ) { int len = text [ i ] . length ( ) ; if ( text [ i ] instanceof Spanned ) { copySpansFrom ( ( Spanned ) text [ i ] , 0 , len , Object . class , ss , off ) ; } off += len ; } return new SpannedString ( ss ) ;
public class Code { /** * Returns the local for { @ code this } of type { @ code type } . It is an error * to call { @ code getThis ( ) } if this is a static method . */ public < T > Local < T > getThis ( TypeId < T > type ) { } }
if ( thisLocal == null ) { throw new IllegalStateException ( "static methods cannot access 'this'" ) ; } return coerce ( thisLocal , type ) ;
public class CmsSearchIndex { /** * Removes the given backup folder of this index . < p > * @ param path the backup folder to remove */ protected void removeIndexBackup ( String path ) { } }
if ( ! isBackupReindexing ( ) ) { // if no backup is generated we don ' t need to do anything return ; } // check if the target directory already exists File file = new File ( path ) ; if ( ! file . exists ( ) ) { // index does not exist yet return ; } try { FSDirectory dir = FSDirectory . open ( file . toPath ( ) ) ; dir . close ( ) ; CmsFileUtil . purgeDirectory ( file ) ; } catch ( Exception e ) { LOG . error ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IO_INDEX_BACKUP_REMOVE_2 , getName ( ) , path ) , e ) ; }
public class CmsXmlGroupContainerFactory { /** * Factory method to unmarshal ( generate ) a group container instance from a String * that contains XML data . < p > * The given encoding is used when marshalling the XML again later . < p > * < b > Warning : < / b > < br / > * This method does not support requested historic versions , it always loads the * most recent version . Use < code > { @ link # unmarshal ( CmsObject , CmsResource , ServletRequest ) } < / code > * for history support . < p > * @ param cms the cms context , if < code > null < / code > no link validation is performed * @ param xmlData the XML data in a String * @ param encoding the encoding to use when marshalling the group container later * @ param resolver the XML entity resolver to use * @ return a group container instance unmarshalled from the String * @ throws CmsXmlException if something goes wrong */ public static CmsXmlGroupContainer unmarshal ( CmsObject cms , String xmlData , String encoding , EntityResolver resolver ) throws CmsXmlException { } }
// create the XML content object from the provided String return unmarshal ( cms , CmsXmlUtils . unmarshalHelper ( xmlData , resolver ) , encoding , resolver ) ;
public class SendInvitationImpl { /** * / * ( non - Javadoc ) * @ see com . tvd12 . ezyfox . core . command . SendInvitation # invitees ( com . tvd12 . ezyfox . core . entities . ApiBaseUser [ ] ) */ @ Override public SendInvitation invitees ( ApiBaseUser ... users ) { } }
this . invitees . addAll ( Arrays . asList ( users ) ) ; return this ;
public class FeatureSourceRetriever { /** * Retrieve the FeatureSource object from the data store . * @ return An OpenGIS FeatureSource object ; * @ throws LayerException * oops */ public SimpleFeatureSource getFeatureSource ( ) throws LayerException { } }
try { if ( dataStore instanceof WFSDataStore ) { return dataStore . getFeatureSource ( featureSourceName . replace ( ":" , "_" ) ) ; } else { return dataStore . getFeatureSource ( featureSourceName ) ; } } catch ( IOException e ) { throw new LayerException ( e , ExceptionCode . FEATURE_MODEL_PROBLEM , "Cannot find feature source " + featureSourceName ) ; } catch ( NullPointerException e ) { throw new LayerException ( e , ExceptionCode . FEATURE_MODEL_PROBLEM , "Cannot find feature source " + featureSourceName ) ; }
public class DecodedBitStreamParser { /** * Byte Compaction mode ( see 5.4.3 ) permits all 256 possible 8 - bit byte values to be encoded . * This includes all ASCII characters value 0 to 127 inclusive and provides for international * character set support . * @ param mode The byte compaction mode i . e . 901 or 924 * @ param codewords The array of codewords ( data + error ) * @ param encoding Currently active character encoding * @ param codeIndex The current index into the codeword array . * @ param result The decoded data is appended to the result . * @ return The next index into the codeword array . */ private static int byteCompaction ( int mode , int [ ] codewords , Charset encoding , int codeIndex , StringBuilder result ) { } }
ByteArrayOutputStream decodedBytes = new ByteArrayOutputStream ( ) ; int count = 0 ; long value = 0 ; boolean end = false ; switch ( mode ) { case BYTE_COMPACTION_MODE_LATCH : // Total number of Byte Compaction characters to be encoded // is not a multiple of 6 int [ ] byteCompactedCodewords = new int [ 6 ] ; int nextCode = codewords [ codeIndex ++ ] ; while ( ( codeIndex < codewords [ 0 ] ) && ! end ) { byteCompactedCodewords [ count ++ ] = nextCode ; // Base 900 value = 900 * value + nextCode ; nextCode = codewords [ codeIndex ++ ] ; // perhaps it should be ok to check only nextCode > = TEXT _ COMPACTION _ MODE _ LATCH switch ( nextCode ) { case TEXT_COMPACTION_MODE_LATCH : case BYTE_COMPACTION_MODE_LATCH : case NUMERIC_COMPACTION_MODE_LATCH : case BYTE_COMPACTION_MODE_LATCH_6 : case BEGIN_MACRO_PDF417_CONTROL_BLOCK : case BEGIN_MACRO_PDF417_OPTIONAL_FIELD : case MACRO_PDF417_TERMINATOR : codeIndex -- ; end = true ; break ; default : if ( ( count % 5 == 0 ) && ( count > 0 ) ) { // Decode every 5 codewords // Convert to Base 256 for ( int j = 0 ; j < 6 ; ++ j ) { decodedBytes . write ( ( byte ) ( value >> ( 8 * ( 5 - j ) ) ) ) ; } value = 0 ; count = 0 ; } break ; } } // if the end of all codewords is reached the last codeword needs to be added if ( codeIndex == codewords [ 0 ] && nextCode < TEXT_COMPACTION_MODE_LATCH ) { byteCompactedCodewords [ count ++ ] = nextCode ; } // If Byte Compaction mode is invoked with codeword 901, // the last group of codewords is interpreted directly // as one byte per codeword , without compaction . for ( int i = 0 ; i < count ; i ++ ) { decodedBytes . write ( ( byte ) byteCompactedCodewords [ i ] ) ; } break ; case BYTE_COMPACTION_MODE_LATCH_6 : // Total number of Byte Compaction characters to be encoded // is an integer multiple of 6 while ( codeIndex < codewords [ 0 ] && ! end ) { int code = codewords [ codeIndex ++ ] ; if ( code < TEXT_COMPACTION_MODE_LATCH ) { count ++ ; // Base 900 value = 900 * value + code ; } else { switch ( code ) { case TEXT_COMPACTION_MODE_LATCH : case BYTE_COMPACTION_MODE_LATCH : case NUMERIC_COMPACTION_MODE_LATCH : case BYTE_COMPACTION_MODE_LATCH_6 : case BEGIN_MACRO_PDF417_CONTROL_BLOCK : case BEGIN_MACRO_PDF417_OPTIONAL_FIELD : case MACRO_PDF417_TERMINATOR : codeIndex -- ; end = true ; break ; } } if ( ( count % 5 == 0 ) && ( count > 0 ) ) { // Decode every 5 codewords // Convert to Base 256 for ( int j = 0 ; j < 6 ; ++ j ) { decodedBytes . write ( ( byte ) ( value >> ( 8 * ( 5 - j ) ) ) ) ; } value = 0 ; count = 0 ; } } break ; } result . append ( new String ( decodedBytes . toByteArray ( ) , encoding ) ) ; return codeIndex ;
public class GobblinMultiTaskAttempt { /** * A method that shuts down all running tasks managed by this instance . * TODO : Call this from the right place . */ public void shutdownTasks ( ) throws InterruptedException { } }
log . info ( "Shutting down tasks" ) ; for ( Task task : this . tasks ) { task . shutdown ( ) ; } for ( Task task : this . tasks ) { task . awaitShutdown ( 1000 ) ; } for ( Task task : this . tasks ) { if ( task . cancel ( ) ) { log . info ( "Task {} cancelled." , task . getTaskId ( ) ) ; } else { log . info ( "Task {} could not be cancelled." , task . getTaskId ( ) ) ; } }
public class AbstractBpmnActivityBehavior { /** * Decides how to propagate the exception properly , e . g . as bpmn error or " normal " error . * @ param execution the current execution * @ param ex the exception to propagate * @ throws Exception if no error handler could be found */ protected void propagateException ( ActivityExecution execution , Exception ex ) throws Exception { } }
BpmnError bpmnError = checkIfCauseOfExceptionIsBpmnError ( ex ) ; if ( bpmnError != null ) { propagateBpmnError ( bpmnError , execution ) ; } else { propagateExceptionAsError ( ex , execution ) ; }
public class ModelContext { /** * Convenience method to provide an new context for an return parameter * @ param groupName - group name of the docket * @ param type - type * @ param documentationType - for documentation type * @ param alternateTypeProvider - alternate type provider * @ param genericNamingStrategy - how generic types should be named * @ param ignorableTypes - types that can be ignored * @ return new context */ public static ModelContext returnValue ( String groupName , Type type , DocumentationType documentationType , AlternateTypeProvider alternateTypeProvider , GenericTypeNamingStrategy genericNamingStrategy , Set < Class > ignorableTypes ) { } }
return new ModelContext ( groupName , type , true , documentationType , alternateTypeProvider , genericNamingStrategy , ignorableTypes ) ;
public class MCF1RGImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setCPName ( String newCPName ) { } }
String oldCPName = cpName ; cpName = newCPName ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . MCF1RG__CP_NAME , oldCPName , cpName ) ) ;
public class WorkflowsInner { /** * Validates the workflow definition . * @ param resourceGroupName The resource group name . * @ param location The workflow location . * @ param workflowName The workflow name . * @ param workflow The workflow definition . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > validateAsync ( String resourceGroupName , String location , String workflowName , WorkflowInner workflow , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromResponse ( validateWithServiceResponseAsync ( resourceGroupName , location , workflowName , workflow ) , serviceCallback ) ;
public class GravityUtils { /** * Calculates movement area position within viewport area with gravity applied . * @ param settings Image settings * @ param out Output rectangle */ public static void getMovementAreaPosition ( Settings settings , Rect out ) { } }
tmpRect1 . set ( 0 , 0 , settings . getViewportW ( ) , settings . getViewportH ( ) ) ; Gravity . apply ( settings . getGravity ( ) , settings . getMovementAreaW ( ) , settings . getMovementAreaH ( ) , tmpRect1 , out ) ;
public class ClassDescriptorDef { /** * Returns all base types . * @ return An iterator of the base types */ public Iterator getAllBaseTypes ( ) { } }
ArrayList baseTypes = new ArrayList ( ) ; baseTypes . addAll ( _directBaseTypes . values ( ) ) ; for ( int idx = baseTypes . size ( ) - 1 ; idx >= 0 ; idx -- ) { ClassDescriptorDef curClassDef = ( ClassDescriptorDef ) baseTypes . get ( idx ) ; for ( Iterator it = curClassDef . getDirectBaseTypes ( ) ; it . hasNext ( ) ; ) { ClassDescriptorDef curBaseTypeDef = ( ClassDescriptorDef ) it . next ( ) ; if ( ! baseTypes . contains ( curBaseTypeDef ) ) { baseTypes . add ( 0 , curBaseTypeDef ) ; idx ++ ; } } } return baseTypes . iterator ( ) ;
public class SystemClock { /** * Utility method to convert a value in the given unit to the internal time * @ param unit * The unit of the time parameter * @ param time * The time to convert * @ return The internal time representation of the parameter */ public static long timeToInternal ( TimeUnit unit , Double time ) { } }
return Math . round ( time * unit . getValue ( ) / TimeUnit . nanosecond . getValue ( ) ) ;
public class CmsGalleryService { /** * Returns the VFS root entries . < p > * @ return the VFS root entries * @ throws CmsRpcException if something goes wrong */ private List < CmsVfsEntryBean > getRootEntries ( ) throws CmsRpcException { } }
List < CmsVfsEntryBean > rootFolders = new ArrayList < CmsVfsEntryBean > ( ) ; CmsObject cms = getCmsObject ( ) ; try { String path = "/" ; if ( ! cms . existsResource ( path , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ) { String startFolder = getWorkplaceSettings ( ) . getUserSettings ( ) . getStartFolder ( ) ; if ( cms . existsResource ( startFolder , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ) { path = startFolder ; } else { path = null ; } } if ( path != null ) { CmsResource rootFolderResource = getCmsObject ( ) . readResource ( path , CmsResourceFilter . ONLY_VISIBLE_NO_DELETED ) ; String title = cms . readPropertyObject ( path , CmsPropertyDefinition . PROPERTY_TITLE , false ) . getValue ( ) ; rootFolders . add ( internalCreateVfsEntryBean ( getCmsObject ( ) , rootFolderResource , title , true , isEditable ( getCmsObject ( ) , rootFolderResource ) , null , false ) ) ; } } catch ( CmsException e ) { error ( e ) ; } return rootFolders ;
public class EmailGlobalSettings { /** * Use SSL by default ? * @ param bUseSSL * < code > true < / code > to use it by default , < code > false < / code > if not . * @ return { @ link EChange } */ @ Nonnull public static EChange setUseSSL ( final boolean bUseSSL ) { } }
return s_aRWLock . writeLocked ( ( ) -> { if ( s_bUseSSL == bUseSSL ) return EChange . UNCHANGED ; s_bUseSSL = bUseSSL ; return EChange . CHANGED ; } ) ;
public class GeometryFactory { /** * Create a new geometry from an existing geometry . This will basically create a clone . * @ param geometry * The original geometry . * @ return Returns a clone . */ public Geometry createGeometry ( Geometry geometry ) { } }
if ( geometry instanceof Point ) { return createPoint ( geometry . getCoordinate ( ) ) ; } else if ( geometry instanceof LinearRing ) { return createLinearRing ( geometry . getCoordinates ( ) ) ; } else if ( geometry instanceof LineString ) { return createLineString ( geometry . getCoordinates ( ) ) ; } else if ( geometry instanceof Polygon ) { Polygon polygon = ( Polygon ) geometry ; LinearRing exteriorRing = createLinearRing ( polygon . getExteriorRing ( ) . getCoordinates ( ) ) ; LinearRing [ ] interiorRings = new LinearRing [ polygon . getNumInteriorRing ( ) ] ; for ( int n = 0 ; n < polygon . getNumInteriorRing ( ) ; n ++ ) { interiorRings [ n ] = createLinearRing ( polygon . getInteriorRingN ( n ) . getCoordinates ( ) ) ; } return new Polygon ( srid , precision , exteriorRing , interiorRings ) ; } else if ( geometry instanceof MultiPoint ) { Point [ ] clones = new Point [ geometry . getNumGeometries ( ) ] ; for ( int n = 0 ; n < geometry . getNumGeometries ( ) ; n ++ ) { clones [ n ] = createPoint ( geometry . getGeometryN ( n ) . getCoordinate ( ) ) ; } return new MultiPoint ( srid , precision , clones ) ; } else if ( geometry instanceof MultiLineString ) { LineString [ ] clones = new LineString [ geometry . getNumGeometries ( ) ] ; for ( int n = 0 ; n < geometry . getNumGeometries ( ) ; n ++ ) { clones [ n ] = createLineString ( geometry . getGeometryN ( n ) . getCoordinates ( ) ) ; } return new MultiLineString ( srid , precision , clones ) ; } else if ( geometry instanceof MultiPolygon ) { Polygon [ ] clones = new Polygon [ geometry . getNumGeometries ( ) ] ; for ( int n = 0 ; n < geometry . getNumGeometries ( ) ; n ++ ) { clones [ n ] = ( Polygon ) createGeometry ( geometry . getGeometryN ( n ) ) ; } return new MultiPolygon ( srid , precision , clones ) ; } return null ;
public class MilestonesApi { /** * Get the list of issues associated with the specified group milestone . * @ param groupIdOrPath the group in the form of an Integer ( ID ) , String ( path ) , or Group instance * @ param milestoneId the milestone ID to get the issues for * @ return a List of Issue for the milestone * @ throws GitLabApiException if any exception occurs */ public List < Issue > getGroupIssues ( Object groupIdOrPath , Integer milestoneId ) throws GitLabApiException { } }
Response response = get ( Response . Status . OK , getDefaultPerPageParam ( ) , "groups" , getGroupIdOrPath ( groupIdOrPath ) , "milestones" , milestoneId , "issues" ) ; return ( response . readEntity ( new GenericType < List < Issue > > ( ) { } ) ) ;
public class Nested { /** * If { @ code o } is a { @ link PrecedencedSelfDescribing } , * calls { @ link PrecedencedSelfDescribing # getDescriptionPrecedence ( ) } , * otherwise returns { @ link PrecedencedSelfDescribing # P _ ATOMIC } . * @ param o * @ return precedence value */ public static int precedenceOf ( Object o ) { } }
if ( o instanceof PrecedencedSelfDescribing ) { return ( ( PrecedencedSelfDescribing ) o ) . getDescriptionPrecedence ( ) ; } else { return PrecedencedSelfDescribing . P_ATOMIC ; }
public class FuncFlatRate { /** * { @ inheritDoc } */ @ Override public void resolve ( final ValueStack values ) throws Exception { } }
if ( values . size ( ) < getParameterCount ( ) ) throw new Exception ( "missing operands for " + toString ( ) ) ; try { final String tableName = values . popString ( ) ; final String [ ] keys = new String [ 5 ] ; for ( int a = 0 ; a < getParameterCount ( ) - 1 ; a ++ ) keys [ a ] = values . popString ( ) ; final EquationSupport model = getEqu ( ) . getSupport ( ) ; double rate = 0D ; rate = model . resolveRate ( tableName , getEqu ( ) . getBaseDate ( ) , keys [ 0 ] , keys [ 1 ] , keys [ 2 ] , keys [ 3 ] , keys [ 4 ] ) ; values . push ( new Double ( rate ) ) ; } catch ( final ParseException e ) { e . fillInStackTrace ( ) ; throw new Exception ( toString ( ) + "; " + e . getMessage ( ) , e ) ; }
public class GenXml { /** * 读入文件夹列表 */ private void ReadInTable ( String clasz ) { } }
this . clasz = clasz ; DefaultTableModel tableModel = ( DefaultTableModel ) table . getModel ( ) ; while ( tableModel . getRowCount ( ) > 0 ) { tableModel . removeRow ( 0 ) ; } try { BeanInfo sourceBean = Introspector . getBeanInfo ( Class . forName ( clasz ) , Object . class ) ; PropertyDescriptor [ ] ps = sourceBean . getPropertyDescriptors ( ) ; for ( int i = 0 ; i < ps . length ; i ++ ) { if ( ps [ i ] . getPropertyType ( ) . equals ( Class . class ) ) { continue ; } tableModel . addRow ( new Object [ ] { ps [ i ] . getName ( ) , ps [ i ] . getPropertyType ( ) . getName ( ) , ps [ i ] . getName ( ) } ) ; } hasRead = true ; } catch ( Exception e ) { JOptionPane . showMessageDialog ( GenXml . this , "发生错误:" + e . getMessage ( ) ) ; }
public class MessageParserFactory { /** * Creates an instance of a concrete < code > AbstractMessageParser < / code > object . * @ param protocolDataUnit The reference < code > ProtocolDataUnit < / code > instance , which contains this * < code > AbstractMessageParser < / code > object . * @ param operationCode The operation code of the requested < code > AbstractMessageParser < / code > . * @ return The instance of the requested < code > AbstractMessageParser < / code > . * @ see org . jscsi . parser . OperationCode */ private static final AbstractMessageParser createParser ( final ProtocolDataUnit protocolDataUnit , final OperationCode operationCode ) { } }
switch ( operationCode ) { case LOGIN_REQUEST : return new LoginRequestParser ( protocolDataUnit ) ; case LOGIN_RESPONSE : return new LoginResponseParser ( protocolDataUnit ) ; case LOGOUT_REQUEST : return new LogoutRequestParser ( protocolDataUnit ) ; case LOGOUT_RESPONSE : return new LogoutResponseParser ( protocolDataUnit ) ; case TEXT_REQUEST : return new TextRequestParser ( protocolDataUnit ) ; case TEXT_RESPONSE : return new TextResponseParser ( protocolDataUnit ) ; case SCSI_DATA_OUT : return new DataOutParser ( protocolDataUnit ) ; case SCSI_DATA_IN : return new DataInParser ( protocolDataUnit ) ; case NOP_OUT : return new NOPOutParser ( protocolDataUnit ) ; case NOP_IN : return new NOPInParser ( protocolDataUnit ) ; case R2T : return new Ready2TransferParser ( protocolDataUnit ) ; case REJECT : return new RejectParser ( protocolDataUnit ) ; case SNACK_REQUEST : return new SNACKRequestParser ( protocolDataUnit ) ; case SCSI_TM_REQUEST : return new TaskManagementFunctionRequestParser ( protocolDataUnit ) ; case SCSI_TM_RESPONSE : return new TaskManagementFunctionResponseParser ( protocolDataUnit ) ; case SCSI_COMMAND : return new SCSICommandParser ( protocolDataUnit ) ; case SCSI_RESPONSE : return new SCSIResponseParser ( protocolDataUnit ) ; default : if ( LOGGER . isErrorEnabled ( ) ) { LOGGER . error ( "Parser not supported with this operation code " + operationCode ) ; } throw new NoSuchElementException ( ) ; }
public class Handlers { /** * Handler that appends the JVM route to the session cookie * @ param sessionCookieName The session cookie name * @ param jvmRoute The JVM route to append * @ param next The next handler * @ return The handler */ public static JvmRouteHandler jvmRoute ( final String sessionCookieName , final String jvmRoute , HttpHandler next ) { } }
return new JvmRouteHandler ( next , sessionCookieName , jvmRoute ) ;
public class PhysicsUtil { /** * Replies the new velocity according to a previous velocity and * a mouvement during a given time . * < p > < code > velocity = movement / dt < / code > * @ param movement is the movement distance . * @ param dt is the time * @ return a new speed */ @ Pure @ Inline ( value = "PhysicsUtil.getPhysicsEngine().speed(($1), ($2))" , imported = { } }
PhysicsUtil . class } ) public static double speed ( double movement , double dt ) { return engine . speed ( movement , dt ) ;
public class Token { /** * setter for wordForms - sets A Token is related to one or more WordForm * @ generated * @ param v value to set into the feature */ public void setWordForms ( FSArray v ) { } }
if ( Token_Type . featOkTst && ( ( Token_Type ) jcasType ) . casFeat_wordForms == null ) jcasType . jcas . throwFeatMissing ( "wordForms" , "com.digitalpebble.rasp.Token" ) ; jcasType . ll_cas . ll_setRefValue ( addr , ( ( Token_Type ) jcasType ) . casFeatCode_wordForms , jcasType . ll_cas . ll_getFSRef ( v ) ) ;
public class GPXPoint { /** * Set the vertical dilution of precision of a point . * @ param contentBuffer Contains the information to put in the table */ public final void setVdop ( StringBuilder contentBuffer ) { } }
ptValues [ GpxMetadata . PTVDOP ] = Double . parseDouble ( contentBuffer . toString ( ) ) ;
public class sslservicegroup { /** * Use this API to update sslservicegroup resources . */ public static base_responses update ( nitro_service client , sslservicegroup resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { sslservicegroup updateresources [ ] = new sslservicegroup [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new sslservicegroup ( ) ; updateresources [ i ] . servicegroupname = resources [ i ] . servicegroupname ; updateresources [ i ] . sessreuse = resources [ i ] . sessreuse ; updateresources [ i ] . sesstimeout = resources [ i ] . sesstimeout ; updateresources [ i ] . nonfipsciphers = resources [ i ] . nonfipsciphers ; updateresources [ i ] . ssl3 = resources [ i ] . ssl3 ; updateresources [ i ] . tls1 = resources [ i ] . tls1 ; updateresources [ i ] . serverauth = resources [ i ] . serverauth ; updateresources [ i ] . commonname = resources [ i ] . commonname ; updateresources [ i ] . sendclosenotify = resources [ i ] . sendclosenotify ; } result = update_bulk_request ( client , updateresources ) ; } return result ;
public class HttpOutboundServiceContextImpl { /** * Retrieve the next buffer of the body asynchronously . This will avoid any * body modifications , such as decompression or removal of chunked - encoding * markers . * If the read can be performed immediately , then a VirtualConnection will be returned and the provided callback will not be used . If the read is being done asychronously , then * null will be returned and the callback used when complete . The force input flag allows the caller to force the asynchronous read to always occur , and thus the callback to * always be used . * The caller is responsible for releasing these buffers when finished with them as the HTTP Channel keeps no reference to them . * @ param callback * @ param bForce * @ return VirtualConnection * @ throws BodyCompleteException * - - if the entire body has already been read */ @ Override public VirtualConnection getRawResponseBodyBuffer ( InterChannelCallback callback , boolean bForce ) throws BodyCompleteException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "getRawResponseBodyBuffer(async)" ) ; } setRawBody ( true ) ; VirtualConnection vc = getResponseBodyBuffer ( callback , bForce ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getRawResponseBodyBuffer(async): " + vc ) ; } return vc ;
public class IOUtils { /** * Instead of reading the file at once , reads the file by reading 2K blocks . Useful for reading special files , where the size of the file isn ' t * determinable , for example / proc / xxx files on linux . * @ param filename a { @ link java . lang . String } object . * @ return file content . * @ throws java . io . IOException if any . */ public static String readFileBufferedAsString ( final String filename ) throws IOException { } }
FileReader in = null ; try { StringBuilder result = new StringBuilder ( ) ; char [ ] buffer = new char [ 2048 ] ; in = new FileReader ( filename ) ; int len = 0 ; do { len = in . read ( buffer ) ; if ( len > 0 ) result . append ( buffer , 0 , len ) ; } while ( len > 0 ) ; return result . toString ( ) ; } finally { closeIgnoringException ( in ) ; }
public class ByteUtils { /** * Calculates the bit length of a given byte array * @ param bytes The byte array * @ return The number of bit */ public static int bitLength ( byte [ ] bytes ) { } }
Objects . requireNonNull ( bytes , Required . BYTES . toString ( ) ) ; int byteLength = bytes . length ; int length = 0 ; if ( byteLength <= MAX_BYTE_LENGTH && byteLength > 0 ) { length = byteLength * BYTES ; } return length ;
public class Event { /** * CHECKSTYLE . OFF : IllegalType */ private static HashMap < String , ? super Serializable > convertToSerializable ( Map < String , Object > objectMap ) { } }
HashMap < String , ? super Serializable > serializableMap = new HashMap < > ( objectMap . size ( ) ) ; for ( Map . Entry < String , Object > objectEntry : objectMap . entrySet ( ) ) { if ( objectEntry . getValue ( ) == null ) { serializableMap . put ( objectEntry . getKey ( ) , ( String ) null ) ; } else if ( objectEntry . getValue ( ) instanceof Serializable ) { serializableMap . put ( objectEntry . getKey ( ) , ( Serializable ) objectEntry . getValue ( ) ) ; } else { serializableMap . put ( objectEntry . getKey ( ) , objectEntry . getValue ( ) . toString ( ) ) ; } } return serializableMap ;
public class HttpUtil { /** * 通过GET方式请求数据 * @ param host 服务器主机 [ http ( s ) : / / ip : port ] * @ param path 服务器虚拟目录 * @ param headers 请求header设置 ( Map < String , String > ) * @ param param 参数 ( URL编码 ) * @ return 响应数据 ( String ) */ public static String get ( String host , String path , Map < String , String > headers , String param ) { } }
CloseableHttpClient httpClient = null ; CloseableHttpResponse response = null ; try { // host校验 , 一定不能为空 , 否则无法进行请求 ExceptionUtil . notNull ( host ) ; // 创建HttpClient httpClient = wrapClient ( host ) ; // 创建HttpGet HttpGet request = new HttpGet ( buildUrl ( host , path , param ) ) ; defaultHeader ( request ) ; // header处理 if ( MapUtil . isNotEmpty ( headers ) ) { for ( Map . Entry < String , String > e : headers . entrySet ( ) ) { request . addHeader ( e . getKey ( ) , e . getValue ( ) ) ; } } // 响应结果 response = httpClient . execute ( request ) ; // 响应结果处理响应内容 String result = null ; if ( response != null ) { HttpEntity resEntity = response . getEntity ( ) ; if ( resEntity != null ) { result = EntityUtils . toString ( resEntity , Charset . UTF_8 ) ; } } return result ; } catch ( IOException | ParseException e ) { e . printStackTrace ( ) ; } finally { close ( httpClient , response ) ; } return null ;
public class DRL6Expressions { /** * src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 213:1 : annotationValue [ AnnotatedDescrBuilder inDescrBuilder ] returns [ Object result ] : ( exp = expression | annos = annotationArray [ inDescrBuilder ] | anno = fullAnnotation [ inDescrBuilder ] ) ; */ public final Object annotationValue ( AnnotatedDescrBuilder inDescrBuilder ) throws RecognitionException { } }
Object result = null ; ParserRuleReturnScope exp = null ; java . util . List annos = null ; AnnotationDescr anno = null ; try { // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 214:3 : ( exp = expression | annos = annotationArray [ inDescrBuilder ] | anno = fullAnnotation [ inDescrBuilder ] ) int alt21 = 3 ; switch ( input . LA ( 1 ) ) { case BOOL : case DECIMAL : case DECR : case DIV : case DOT : case FLOAT : case HEX : case ID : case INCR : case LEFT_PAREN : case LEFT_SQUARE : case LESS : case MINUS : case NEGATION : case NULL : case PLUS : case QUESTION_DIV : case STAR : case STRING : case TILDE : case TIME_INTERVAL : { alt21 = 1 ; } break ; case LEFT_CURLY : { alt21 = 2 ; } break ; case AT : { alt21 = 3 ; } break ; default : if ( state . backtracking > 0 ) { state . failed = true ; return result ; } NoViableAltException nvae = new NoViableAltException ( "" , 21 , 0 , input ) ; throw nvae ; } switch ( alt21 ) { case 1 : // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 214:5 : exp = expression { pushFollow ( FOLLOW_expression_in_annotationValue1070 ) ; exp = expression ( ) ; state . _fsp -- ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { if ( buildDescr ) result = ( exp != null ? input . toString ( exp . start , exp . stop ) : null ) ; } } break ; case 2 : // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 215:7 : annos = annotationArray [ inDescrBuilder ] { pushFollow ( FOLLOW_annotationArray_in_annotationValue1082 ) ; annos = annotationArray ( inDescrBuilder ) ; state . _fsp -- ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { if ( buildDescr ) result = annos . toArray ( ) ; } } break ; case 3 : // src / main / resources / org / drools / compiler / lang / DRL6Expressions . g : 216:7 : anno = fullAnnotation [ inDescrBuilder ] { pushFollow ( FOLLOW_fullAnnotation_in_annotationValue1095 ) ; anno = fullAnnotation ( inDescrBuilder ) ; state . _fsp -- ; if ( state . failed ) return result ; if ( state . backtracking == 0 ) { if ( buildDescr ) result = anno ; } } break ; } } catch ( RecognitionException re ) { throw re ; } finally { // do for sure before leaving } return result ;
public class ListDeploymentInstancesResult { /** * A list of instance IDs . * @ return A list of instance IDs . */ public java . util . List < String > getInstancesList ( ) { } }
if ( instancesList == null ) { instancesList = new com . amazonaws . internal . SdkInternalList < String > ( ) ; } return instancesList ;
public class Gauge { /** * Defines if the LED is on ( if available ) * @ param ON */ public void setLedOn ( final boolean ON ) { } }
if ( null == ledOn ) { _ledOn = ON ; fireUpdateEvent ( LED_EVENT ) ; } else { ledOn . set ( ON ) ; }
public class TemplateUtil { /** * Replace each component tag with the key so it can be used in the replace writer . * @ param context the context to modify . * @ param taggedComponents the tagged components * @ return the keyed components */ public static Map < String , WComponent > mapTaggedComponents ( final Map < String , Object > context , final Map < String , WComponent > taggedComponents ) { } }
Map < String , WComponent > componentsByKey = new HashMap < > ( ) ; // Replace each component tag with the key so it can be used in the replace writer for ( Map . Entry < String , WComponent > tagged : taggedComponents . entrySet ( ) ) { String tag = tagged . getKey ( ) ; WComponent comp = tagged . getValue ( ) ; // The key needs to be something which would never be output by a Template . String key = "[WC-TemplateLayout-" + tag + "]" ; componentsByKey . put ( key , comp ) ; // Map the tag to the key in the context context . put ( tag , key ) ; } return componentsByKey ;
public class AbstractBufferedOutputStream { /** * { @ inheritDoc } */ @ Override public void flush ( ) throws IOException { } }
if ( curr > 0 ) { writeBuffer ( Arrays . copyOf ( buf , curr ) ) ; curr = 0 ; }
public class Query { /** * Checks whether the provided object is NULL or NaN . */ private static boolean isUnaryComparison ( @ Nullable Object value ) { } }
return value == null || value . equals ( Double . NaN ) || value . equals ( Float . NaN ) ;
public class Reflections { /** * get all constructors annotated with a given annotation , including annotation member values matching * < p / > depends on MethodAnnotationsScanner configured */ public Set < Constructor > getConstructorsAnnotatedWith ( final Annotation annotation ) { } }
return filter ( getConstructorsAnnotatedWith ( annotation . annotationType ( ) ) , withAnnotation ( annotation ) ) ;