signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class KAFDocument { /** * Creates a new chunk . It assigns an appropriate ID to it . The Chunk is added to the document object . * @ param head the chunk head . * @ param phrase type of the phrase . * @ param terms the list of the terms in the chunk . * @ return a new chunk . */ public Chunk newChunk ( String phrase , Span < Term > span ) { } }
String newId = idManager . getNextId ( AnnotationType . CHUNK ) ; Chunk newChunk = new Chunk ( newId , span ) ; newChunk . setPhrase ( phrase ) ; annotationContainer . add ( newChunk , Layer . CHUNKS , AnnotationType . CHUNK ) ; return newChunk ;
public class RuntimeView { /** * Returns a new view schema based from an existing one . */ public static < T > Schema < T > createFrom ( RuntimeSchema < T > ms , Instantiator < T > instantiator , Factory vf , Predicate . Factory pf , String ... args ) { } }
return vf . create ( ms , instantiator , pf , args ) ;
public class DependencyNodeUtil { /** * Returns the first { @ link DependencyNode } object found that satisfy the filter . * @ param nodeIterator A tree iterator * @ param filter the { @ link DependencyNodeFilter } being used * @ return the first element that matches the filter . null if nothing is found * @ see # breadthFirstIterator ( DependencyNode ) * @ see # depthFirstIterator ( DependencyNode ) * @ see # preorderIterator ( DependencyNode ) */ public static < T > T selectFirst ( Iterator < T > nodeIterator , Predicate < T > filter ) { } }
while ( nodeIterator . hasNext ( ) ) { T element = nodeIterator . next ( ) ; if ( filter . accept ( element ) ) { return element ; } } return null ;
public class BaseBo { /** * Get a BO ' s attribute . * @ param attrName * @ param clazz * @ return */ public < T > T getAttribute ( String attrName , Class < T > clazz ) { } }
Lock lock = lockForRead ( ) ; try { return MapUtils . getValue ( attributes , attrName , clazz ) ; } finally { lock . unlock ( ) ; }
public class SonarResultParser { /** * Converts a Sonar severity to a Sputnik severity . * @ param severityName severity to convert . */ static Severity getSeverity ( String severityName ) { } }
switch ( severityName ) { case "BLOCKER" : case "CRITICAL" : case "MAJOR" : return Severity . ERROR ; case "MINOR" : return Severity . WARNING ; case "INFO" : return Severity . INFO ; default : log . warn ( "Unknown severity: " + severityName ) ; } return Severity . WARNING ;
public class TriangularSolver_DSCC { /** * Solves for the transpose of a lower triangular matrix against a dense matrix . L < sup > T < / sup > * x = b * @ param L Lower triangular matrix . Diagonal elements are assumed to be non - zero * @ param x ( Input ) Solution matrix ' b ' . ( Output ) matrix ' x ' */ public static void solveTranL ( DMatrixSparseCSC L , double [ ] x ) { } }
final int N = L . numCols ; for ( int j = N - 1 ; j >= 0 ; j -- ) { int idx0 = L . col_idx [ j ] ; int idx1 = L . col_idx [ j + 1 ] ; for ( int p = idx0 + 1 ; p < idx1 ; p ++ ) { x [ j ] -= L . nz_values [ p ] * x [ L . nz_rows [ p ] ] ; } x [ j ] /= L . nz_values [ idx0 ] ; }
public class CmsLog { /** * Render throwable using Throwable . printStackTrace . * < p > This code copy from " org . apache . log4j . DefaultThrowableRenderer . render ( Throwable throwable ) " < / p > * @ param throwable throwable , may not be null . * @ return string representation . */ public static String [ ] render ( final Throwable throwable ) { } }
StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; try { throwable . printStackTrace ( pw ) ; } catch ( RuntimeException ex ) { // nothing to do } pw . flush ( ) ; LineNumberReader reader = new LineNumberReader ( new StringReader ( sw . toString ( ) ) ) ; ArrayList < String > lines = new ArrayList < > ( ) ; try { String line = reader . readLine ( ) ; while ( line != null ) { lines . add ( line ) ; line = reader . readLine ( ) ; } } catch ( IOException ex ) { if ( ex instanceof InterruptedIOException ) { Thread . currentThread ( ) . interrupt ( ) ; } lines . add ( ex . toString ( ) ) ; } // String [ ] tempRep = new String [ lines . size ( ) ] ; return lines . toArray ( new String [ 0 ] ) ;
public class SchemaHelper { /** * - - - - - private methods - - - - - */ private static PropertySourceGenerator getSourceGenerator ( final ErrorBuffer errorBuffer , final String className , final PropertyDefinition propertyDefinition ) throws FrameworkException { } }
final String propertyName = propertyDefinition . getPropertyName ( ) ; final Type propertyType = propertyDefinition . getPropertyType ( ) ; final Class < ? extends PropertySourceGenerator > parserClass = parserMap . get ( propertyType ) ; try { return parserClass . getConstructor ( ErrorBuffer . class , String . class , PropertyDefinition . class ) . newInstance ( errorBuffer , className , propertyDefinition ) ; } catch ( Throwable t ) { logger . warn ( "" , t ) ; } errorBuffer . add ( new InvalidPropertySchemaToken ( SchemaProperty . class . getSimpleName ( ) , propertyName , propertyName , "invalid_property_definition" , "Unknow value type " + source + ", options are " + Arrays . asList ( Type . values ( ) ) + "." ) ) ; throw new FrameworkException ( 422 , "Invalid property definition for property " + propertyDefinition . getPropertyName ( ) , errorBuffer ) ;
public class GcloudStructuredLog { /** * Parses a JSON string representing { @ code gcloud } structured log output . * @ return parsed JSON * @ throws JsonParseException if { @ code jsonString } has syntax errors or incompatible JSON element * type */ public static GcloudStructuredLog parse ( String jsonString ) throws JsonParseException { } }
Preconditions . checkNotNull ( jsonString ) ; try { GcloudStructuredLog log = new Gson ( ) . fromJson ( jsonString , GcloudStructuredLog . class ) ; if ( log == null ) { throw new JsonParseException ( "Empty input: \"" + jsonString + "\"" ) ; } return log ; } catch ( JsonSyntaxException e ) { throw new JsonParseException ( e ) ; }
public class Analytics { /** * Retrieve settings from the cache or the network : 1 . If the cache is empty , fetch new settings . * 2 . If the cache is not stale , use it . 2 . If the cache is stale , try to get new settings . */ @ Private ProjectSettings getSettings ( ) { } }
ProjectSettings cachedSettings = projectSettingsCache . get ( ) ; if ( isNullOrEmpty ( cachedSettings ) ) { return downloadSettings ( ) ; } long expirationTime = cachedSettings . timestamp ( ) + SETTINGS_REFRESH_INTERVAL ; if ( expirationTime > System . currentTimeMillis ( ) ) { return cachedSettings ; } ProjectSettings downloadedSettings = downloadSettings ( ) ; if ( isNullOrEmpty ( downloadedSettings ) ) { return cachedSettings ; } return downloadedSettings ;
public class GeoParser { /** * Takes an unstructured text document ( as a String ) , extracts the * location names contained therein , and resolves them into * geographic entities representing the best match for those * location names . * @ param inputText unstructured text to be processed * @ return list of geo entities resolved from text * @ throws Exception */ public List < ResolvedLocation > parse ( String inputText ) throws Exception { } }
return parse ( inputText , ClavinLocationResolver . DEFAULT_ANCESTRY_MODE ) ;
public class JDBCResultSet { /** * < ! - - start generic documentation - - > * Retrieves the value of the designated column in the current row * of this < code > ResultSet < / code > object as * a stream of ASCII characters . The value can then be read in chunks from the * stream . This method is particularly * suitable for retrieving large < char > LONGVARCHAR < / char > values . * The JDBC driver will * do any necessary conversion from the database format into ASCII . * < P > < B > Note : < / B > All the data in the returned stream must be * read prior to getting the value of any other column . The next * call to a getter method implicitly closes the stream . Also , a * stream may return < code > 0 < / code > when the method * < code > InputStream . available < / code > * is called whether there is data available or not . * < ! - - end generic documentation - - > * < ! - - start release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * The limitation noted above does not apply to HSQLDB . < p > * When the column is of type CHAR and its variations , it requires no * conversion since it is represented internally already as a * Java String object . When the column is not of type CHAR and its * variations , the returned stream is based on a conversion to the * Java < code > String < / code > representation of the value . In either case , * the obtained stream is always equivalent to a stream of the low order * bytes from the value ' s String representation . < p > * HSQLDB SQL < code > CHAR < / code > and its variations are all Unicode strings * internally , so the recommended alternatives to this method are * { @ link # getString ( int ) getString } , * { @ link # getUnicodeStream ( int ) getUnicodeStream } ( < b > deprecated < / b > ) * and new to 1.7.0 : { @ link # getCharacterStream ( int ) getCharacterStream } * ( now prefered over the deprecated getUnicodeStream alternative ) . * < / div > * < ! - - end release - specific documentation - - > * @ param columnIndex the first column is 1 , the second is 2 , . . . * @ return a Java input stream that delivers the database column value * as a stream of one - byte ASCII characters ; * if the value is SQL < code > NULL < / code > , the * value returned is < code > null < / code > * @ exception SQLException if a database access error occurs or this method is * called on a closed result set */ public java . io . InputStream getAsciiStream ( int columnIndex ) throws SQLException { } }
String s = getString ( columnIndex ) ; if ( s == null ) { return null ; } try { return new ByteArrayInputStream ( s . getBytes ( "US-ASCII" ) ) ; } catch ( IOException e ) { return null ; }
public class ModelRegistry { /** * A mapping might apply to any type somewhere within a resource ' s * { @ link MappableTypeHierarchy } . This cache saves the registrar from searching * this entire hierarchy each time the model is resolved by remembering a found * resource type - & gt ; model relationship . */ private void cache ( final Key key , final Collection < LookupResult > sources , final int stateId ) { } }
synchronized ( this ) { if ( stateId == this . state . get ( ) ) { this . lookupCache . put ( key , sources ) ; } }
public class HttpSimulator { /** * { @ inheritDoc } */ @ Override public void init ( ServletConfig config ) throws ServletException { } }
super . init ( config ) ; Injector injector = Guice . createInjector ( new CoreModule ( ) , new HttpModule ( ) ) ; injector . injectMembers ( this ) ;
public class BatchMethodHandler { /** * Returns request type for the given odata uri . * @ param oDataUri the odata uri * @ return the request type * @ throws ODataTargetTypeException if unable to determine request type */ private Type getRequestType ( ODataRequest oDataRequest , ODataUri oDataUri ) throws ODataTargetTypeException { } }
TargetType targetType = WriteMethodUtil . getTargetType ( oDataRequest , entityDataModel , oDataUri ) ; return entityDataModel . getType ( targetType . typeName ( ) ) ;
public class StringColumn { /** * Returns a List & lt ; String & gt ; representation of all the values in this column * NOTE : Unless you really need a string consider using the column itself for large datasets as it uses much less memory * @ return values as a list of String . */ public List < String > asList ( ) { } }
List < String > strings = new ArrayList < > ( ) ; for ( String category : this ) { strings . add ( category ) ; } return strings ;
public class ChannelBuilder { /** * Creates ChannelBuilders . * @ param classLoader classLoader * @ param serviceDomain serviceDomain * @ param implementationModel implementationModel * @ return ChannelBuilders */ public static List < ChannelBuilder > builders ( ClassLoader classLoader , ServiceDomain serviceDomain , KnowledgeComponentImplementationModel implementationModel ) { } }
List < ChannelBuilder > builders = new ArrayList < ChannelBuilder > ( ) ; if ( implementationModel != null ) { ChannelsModel channelsModel = implementationModel . getChannels ( ) ; if ( channelsModel != null ) { for ( ChannelModel channelModel : channelsModel . getChannels ( ) ) { if ( channelModel != null ) { builders . add ( new ChannelBuilder ( classLoader , serviceDomain , channelModel ) ) ; } } } } return builders ;
public class IndexAliasMap { /** * Put alias . * @ param alias the alias * @ param index the index */ synchronized public void putAlias ( String alias , int index ) { } }
JMLambda . runByBoolean ( index < size ( ) , ( ) -> aliasIndexMap . put ( alias , index ) , ( ) -> JMExceptionManager . logRuntimeException ( log , "Wrong Index !!! - " + "dataList Size = " + dataList , "setKeyIndexMap" , index ) ) ;
public class ImplSurfDescribeOps { /** * Computes the gradient for a using the derivX kernel found in { @ link boofcv . alg . transform . ii . DerivativeIntegralImage } . * Assumes that the entire region , including the surrounding pixels , are inside the image . */ public static void gradientInner ( GrayF32 ii , double tl_x , double tl_y , double samplePeriod , int regionSize , double kernelSize , float [ ] derivX , float derivY [ ] ) { } }
// add 0.5 to c _ x and c _ y to have it round when converted to an integer pixel // this is faster than the straight forward method tl_x += 0.5 ; tl_y += 0.5 ; // round the kernel size int w = ( int ) ( kernelSize + 0.5 ) ; int r = w / 2 ; if ( r <= 0 ) r = 1 ; w = r * 2 + 1 ; int i = 0 ; for ( int y = 0 ; y < regionSize ; y ++ ) { int pixelsY = ( int ) ( tl_y + y * samplePeriod ) ; int indexRow1 = ii . startIndex + ( pixelsY - r - 1 ) * ii . stride - r - 1 ; int indexRow2 = indexRow1 + r * ii . stride ; int indexRow3 = indexRow2 + ii . stride ; int indexRow4 = indexRow3 + r * ii . stride ; for ( int x = 0 ; x < regionSize ; x ++ , i ++ ) { int pixelsX = ( int ) ( tl_x + x * samplePeriod ) ; final int indexSrc1 = indexRow1 + pixelsX ; final int indexSrc2 = indexRow2 + pixelsX ; final int indexSrc3 = indexRow3 + pixelsX ; final int indexSrc4 = indexRow4 + pixelsX ; final float p0 = ii . data [ indexSrc1 ] ; final float p1 = ii . data [ indexSrc1 + r ] ; final float p2 = ii . data [ indexSrc1 + r + 1 ] ; final float p3 = ii . data [ indexSrc1 + w ] ; final float p11 = ii . data [ indexSrc2 ] ; final float p4 = ii . data [ indexSrc2 + w ] ; final float p10 = ii . data [ indexSrc3 ] ; final float p5 = ii . data [ indexSrc3 + w ] ; final float p9 = ii . data [ indexSrc4 ] ; final float p8 = ii . data [ indexSrc4 + r ] ; final float p7 = ii . data [ indexSrc4 + r + 1 ] ; final float p6 = ii . data [ indexSrc4 + w ] ; final float left = p8 - p9 - p1 + p0 ; final float right = p6 - p7 - p3 + p2 ; final float top = p4 - p11 - p3 + p0 ; final float bottom = p6 - p9 - p5 + p10 ; derivX [ i ] = right - left ; derivY [ i ] = bottom - top ; // System . out . printf ( " % 2d % 2d % 2d % 2d dx = % 6.2f dy = % 6.2f \ n " , x , y , pixelsX , pixelsY , derivX [ i ] , derivY [ i ] ) ; } }
public class TransliteratorRegistry { /** * Returns an enumeration over visible variant names for the given * source and target . * @ return An < code > Enumeration < / code > over < code > String < / code > objects */ public Enumeration < String > getAvailableVariants ( String source , String target ) { } }
CaseInsensitiveString cisrc = new CaseInsensitiveString ( source ) ; CaseInsensitiveString citrg = new CaseInsensitiveString ( target ) ; Map < CaseInsensitiveString , List < CaseInsensitiveString > > targets = specDAG . get ( cisrc ) ; if ( targets == null ) { return new IDEnumeration ( null ) ; } List < CaseInsensitiveString > variants = targets . get ( citrg ) ; if ( variants == null ) { return new IDEnumeration ( null ) ; } return new IDEnumeration ( Collections . enumeration ( variants ) ) ;
public class ChannelSelector { /** * @ see com . ibm . ws . ffdc . FFDCSelfIntrospectable # introspectSelf ( ) */ @ Override public String [ ] introspectSelf ( ) { } }
List < String > rc = new ArrayList < String > ( ) ; rc . add ( Thread . currentThread ( ) . getName ( ) ) ; rc . add ( "quit: " + this . quit ) ; rc . add ( "waitingToQuit: " + this . waitingToQuit ) ; rc . add ( "# of keys=" + this . selector . keys ( ) . size ( ) ) ; try { for ( SelectionKey key : this . selector . keys ( ) ) { rc . add ( "key: " + key . hashCode ( ) + " valid=" + key . isValid ( ) + " ops=" + key . interestOps ( ) + " " + key . channel ( ) ) ; } } catch ( Throwable x ) { // If we get any exception , just return what we have so far . // This routine is not thread safe , and could get an exception , but // since it is only invoked when we are dumping data , due to a problem or // during debug this should be acceptable . rc . add ( "Exception Occurred Gathering Dump Data: " + x ) ; } return rc . toArray ( new String [ rc . size ( ) ] ) ;
public class GetFindingsStatisticsRequest { /** * Types of finding statistics to retrieve . * @ param findingStatisticTypes * Types of finding statistics to retrieve . * @ return Returns a reference to this object so that method calls can be chained together . * @ see FindingStatisticType */ public GetFindingsStatisticsRequest withFindingStatisticTypes ( FindingStatisticType ... findingStatisticTypes ) { } }
java . util . ArrayList < String > findingStatisticTypesCopy = new java . util . ArrayList < String > ( findingStatisticTypes . length ) ; for ( FindingStatisticType value : findingStatisticTypes ) { findingStatisticTypesCopy . add ( value . toString ( ) ) ; } if ( getFindingStatisticTypes ( ) == null ) { setFindingStatisticTypes ( findingStatisticTypesCopy ) ; } else { getFindingStatisticTypes ( ) . addAll ( findingStatisticTypesCopy ) ; } return this ;
public class ImageResolutionImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public void setYResol ( Integer newYResol ) { } }
Integer oldYResol = yResol ; yResol = newYResol ; if ( eNotificationRequired ( ) ) eNotify ( new ENotificationImpl ( this , Notification . SET , AfplibPackage . IMAGE_RESOLUTION__YRESOL , oldYResol , yResol ) ) ;
public class JSDefinedClass { /** * Adds a field to the list of field members of this defined class . * @ param sName * Name of this field . * @ param aInit * Initial value of this field . * @ return Newly generated field */ @ Nonnull public JSFieldVar field ( @ Nonnull @ Nonempty final String sName , @ Nullable final IJSExpression aInit ) { } }
final JSFieldVar aField = new JSFieldVar ( this , sName , aInit ) ; return addField ( aField ) ;
public class JspTranslatorUtil { /** * PI12939 */ private static void deleteClassFiles ( JspResources [ ] compileFiles ) { } }
final boolean isAnyTraceEnabled = com . ibm . ejs . ras . TraceComponent . isAnyTracingEnabled ( ) ; if ( isAnyTraceEnabled && logger . isLoggable ( Level . FINER ) ) { logger . entering ( CLASS_NAME , "deleteClassFiles" , "deleteClassFilesBeforeRecompile is set to true" ) ; } String classfileName = null ; if ( compileFiles != null && compileFiles . length > 0 ) { // delete classfiles we will produce for ( int i = 0 ; i < compileFiles . length ; i ++ ) { classfileName = compileFiles [ i ] . getGeneratedSourceFile ( ) . toString ( ) ; int end = classfileName . lastIndexOf ( "." ) ; classfileName = classfileName . substring ( 0 , end ) ; classfileName += ".class" ; if ( isAnyTraceEnabled && logger . isLoggable ( Level . FINER ) ) { logger . logp ( Level . FINE , CLASS_NAME , "deleteClassFiles" , "removing classfile " + ( i + 1 ) + " [" + classfileName + "]" ) ; } File clFile = new File ( classfileName ) ; if ( clFile . exists ( ) && clFile . isFile ( ) ) { boolean retval = clFile . delete ( ) ; if ( isAnyTraceEnabled && logger . isLoggable ( Level . FINER ) ) { logger . logp ( Level . FINE , CLASS_NAME , "deleteClassFiles" , "Removed file " + compileFiles [ i ] . getClassName ( ) + "? : [" + retval + "]" ) ; } } else { if ( isAnyTraceEnabled && logger . isLoggable ( Level . FINER ) ) { logger . logp ( Level . FINE , CLASS_NAME , "deleteClassFiles" , "File [" + clFile . toString ( ) + "] does not exist." ) ; } } // delete inner classes for this class end = classfileName . lastIndexOf ( File . separatorChar ) ; String directoryName = classfileName . substring ( 0 , end ) ; File directory = new File ( directoryName ) ; File [ ] icList = directory . listFiles ( new InnerclassFilenameFilter ( compileFiles [ i ] . getClassName ( ) ) ) ; for ( int j = 0 ; j < icList . length ; j ++ ) { if ( isAnyTraceEnabled && logger . isLoggable ( Level . FINER ) ) { logger . logp ( Level . FINE , CLASS_NAME , "deleteClassFiles" , "removing innerclassfile " + ( j + 1 ) + " [" + icList [ j ] + "]" ) ; } if ( icList [ j ] . exists ( ) && icList [ j ] . isFile ( ) ) { boolean retval = icList [ j ] . delete ( ) ; end = icList [ j ] . toString ( ) . lastIndexOf ( File . separatorChar ) ; String innerClassName = icList [ j ] . toString ( ) . substring ( end + 1 ) ; if ( isAnyTraceEnabled && logger . isLoggable ( Level . FINER ) ) { logger . logp ( Level . FINE , CLASS_NAME , "deleteClassFiles" , "Removed innerclassfile " + innerClassName + "? : [" + retval + "]" ) ; } } else { if ( isAnyTraceEnabled && logger . isLoggable ( Level . FINER ) ) { logger . logp ( Level . FINE , CLASS_NAME , "deleteClassFiles" , "Innerclassfile [" + icList [ j ] . toString ( ) + "] does not exist." ) ; } } } } } if ( isAnyTraceEnabled && logger . isLoggable ( Level . FINER ) ) { logger . exiting ( CLASS_NAME , "deleteClassFiles" , "Exiting" ) ; }
public class SftpFile { /** * Determine whether the file is pointing to a pipe . * @ return boolean * @ throws SshException * @ throws SftpStatusException */ public boolean isFifo ( ) throws SftpStatusException , SshException { } }
// This is long hand because gcj chokes when it is not ? Investigate why if ( ( getAttributes ( ) . getPermissions ( ) . longValue ( ) & SftpFileAttributes . S_IFIFO ) == SftpFileAttributes . S_IFIFO ) return true ; return false ;
public class BigtableAsyncBufferedMutator { /** * { @ inheritDoc } */ @ Override public List < CompletableFuture < Void > > mutate ( List < ? extends Mutation > mutations ) { } }
return helper . mutate ( mutations ) . stream ( ) . map ( listenableFuture -> toCompletableFuture ( listenableFuture ) . thenApply ( r -> ( Void ) null ) ) . collect ( Collectors . toList ( ) ) ;
public class DateFormat { /** * Returns the date / time formatter with the default formatting style * for the default < code > FORMAT < / code > locale . * @ return a date / time formatter . * @ see Category # FORMAT */ public final static DateFormat getDateTimeInstance ( ) { } }
return get ( DEFAULT , DEFAULT , ULocale . getDefault ( Category . FORMAT ) , null ) ;
public class ByteArrayPersistedValueData { /** * { @ inheritDoc } */ public void writeExternal ( ObjectOutput out ) throws IOException { } }
out . writeInt ( orderNumber ) ; out . writeInt ( value . length ) ; if ( value . length > 0 ) { out . write ( value ) ; }
public class GSCPImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public boolean eIsSet ( int featureID ) { } }
switch ( featureID ) { case AfplibPackage . GSCP__XPOS : return XPOS_EDEFAULT == null ? xpos != null : ! XPOS_EDEFAULT . equals ( xpos ) ; case AfplibPackage . GSCP__YPOS : return YPOS_EDEFAULT == null ? ypos != null : ! YPOS_EDEFAULT . equals ( ypos ) ; } return super . eIsSet ( featureID ) ;
public class CmsGalleryController { /** * Opens the preview for the given resource by the given resource type . < p > * @ param resourcePath the resource path * @ param resourceType the resource type name */ public void openPreview ( String resourcePath , String resourceType ) { } }
if ( m_currentPreview != null ) { m_currentPreview . removePreview ( ) ; } String provider = getProviderName ( resourceType ) ; if ( m_previewFactoryRegistration . containsKey ( provider ) ) { m_handler . m_galleryDialog . useMaxDimensions ( ) ; m_currentPreview = m_previewFactoryRegistration . get ( provider ) . getPreview ( m_handler . m_galleryDialog ) ; m_currentPreview . openPreview ( resourcePath , ! m_resultsSelectable ) ; m_handler . hideShowPreviewButton ( false ) ; } else { CmsDebugLog . getInstance ( ) . printLine ( "Preview provider \"" + provider + "\" has not been registered properly." ) ; }
public class ListBuffer { /** * Return first element in this buffer and remove */ public A next ( ) { } }
A x = elems . head ; if ( ! elems . isEmpty ( ) ) { elems = elems . tail ; if ( elems . isEmpty ( ) ) last = null ; count -- ; } return x ;
public class MatchParserImpl { /** * Prime a MatchParser object with a String form selector so that its QueryExpr method * will return the corresponding Selector tree . * @ param parser an existing MatchParser object to be reused , or null if a new one is to * be created . * @ param selector the String - form selector to be parsed * @ param strict true if only the JMS standard syntax is to be accepted , false if the * extended syntax for identifiers , and set expressions , is to be accepted . * @ return a parser , primed with the supplied selector . This is the same parser object * that was supplied as an argument , if one was supplied . */ public static MatchParser prime ( MatchParser parser , String selector , boolean strict ) { } }
CharStream inStream = new IBMUnicodeCharStream ( new StringReader ( selector ) , 1 , 1 ) ; if ( parser == null ) parser = new MatchParserImpl ( inStream ) ; else ( ( MatchParserImpl ) parser ) . ReInit ( inStream ) ; ( ( MatchParserImpl ) parser ) . strict = strict ; return parser ;
public class PatreonAPI { /** * Get a list of campaigns the current creator is running - also contains other related data like Goals * Note : The first campaign data object is located at index 0 in the data list * @ return the list of campaigns * @ throws IOException Thrown when the GET request failed */ public JSONAPIDocument < List < Campaign > > fetchCampaigns ( ) throws IOException { } }
String path = new URIBuilder ( ) . setPath ( "current_user/campaigns" ) . addParameter ( "include" , "rewards,creator,goals" ) . toString ( ) ; return converter . readDocumentCollection ( getDataStream ( path ) , Campaign . class ) ;
public class PersonNameHelper { /** * Get the display name of the person consisting of titles , first name , middle * name and last name . { @ link # isFirstNameFirst ( ) } is considered ! * @ param aName * The name to be converted . May not be < code > null < / code > . * @ return The non - < code > null < / code > display name */ @ Nonnull public static String getAsCompleteDisplayName ( @ Nonnull final IPersonName aName ) { } }
if ( isFirstNameFirst ( ) ) return getAsCompleteDisplayNameFirstNameFirst ( aName ) ; return getAsCompleteDisplayNameLastNameFirst ( aName ) ;
public class Threshold { /** * Evaluates this threshold against the passed in metric . The returned status * is computed this way : * < ol > * < li > If at least one ok range is specified , if the value falls inside one * of the ok ranges , { @ link Status # OK } is returned . * < li > If at lease one critical range is specified , if the value falls * inside one of the critical ranges , { @ link Status # CRITICAL } is returned . * < li > If at lease one warning range is specified , if the value falls inside * one of the warning ranges , { @ link Status # WARNING } is returned . * < li > If neither of the previous match , but at least an OK range has been * specified , return { @ link Status # CRITICAL } . * < li > Otherwise return { @ link Status # OK } * < / ol > * @ param value * The value to be evaluated . * @ return The computes status . * @ see it . jnrpe . utils . thresholds . IThreshold # evaluate ( Metric ) */ public final Status evaluate ( final Metric metric ) { } }
if ( okThresholdList . isEmpty ( ) && warningThresholdList . isEmpty ( ) && criticalThresholdList . isEmpty ( ) ) { return Status . OK ; } // Perform evaluation escalation for ( Range range : okThresholdList ) { if ( range . isValueInside ( metric , prefix ) ) { return Status . OK ; } } for ( Range range : criticalThresholdList ) { if ( range . isValueInside ( metric , prefix ) ) { return Status . CRITICAL ; } } for ( Range range : warningThresholdList ) { if ( range . isValueInside ( metric , prefix ) ) { return Status . WARNING ; } } if ( ! okThresholdList . isEmpty ( ) ) { return Status . CRITICAL ; } return Status . OK ;
public class FastAdapterDiffUtil { /** * convenient function for { @ link # calculateDiff ( ModelAdapter , List , DiffCallback , boolean ) } * @ return the { @ link androidx . recyclerview . widget . DiffUtil . DiffResult } computed . */ public static < A extends ModelAdapter < Model , Item > , Model , Item extends IItem > DiffUtil . DiffResult calculateDiff ( final A adapter , final List < Item > items , final DiffCallback < Item > callback ) { } }
return calculateDiff ( adapter , items , callback , true ) ;
public class Solo { /** * Sets the status of the NavigationDrawer . Examples of status are : { @ code Solo . CLOSED } and { @ code Solo . OPENED } . * @ param status the status that the NavigationDrawer should be set to */ public void setNavigationDrawer ( final int status ) { } }
if ( config . commandLogging ) { Log . d ( config . commandLoggingTag , "setNavigationDrawer(" + status + ")" ) ; } setter . setNavigationDrawer ( status ) ;
public class EmxDataProvider { /** * Create an entity from the EMX entity * @ param entityType entity meta data * @ param emxEntity EMX entity * @ return MOLGENIS entity */ private Entity toEntity ( EntityType entityType , Entity emxEntity ) { } }
Entity entity = entityManager . create ( entityType , POPULATE ) ; for ( Attribute attr : entityType . getAtomicAttributes ( ) ) { if ( attr . getExpression ( ) == null && ! attr . isMappedBy ( ) ) { String attrName = attr . getName ( ) ; Object emxValue = emxEntity . get ( attrName ) ; AttributeType attrType = attr . getDataType ( ) ; switch ( attrType ) { case BOOL : case DATE : case DATE_TIME : case DECIMAL : case EMAIL : case ENUM : case HTML : case HYPERLINK : case INT : case LONG : case SCRIPT : case STRING : case TEXT : Object value = emxValue != null ? DataConverter . convert ( emxValue , attr ) : null ; if ( ( ! attr . isAuto ( ) || value != null ) && ( ! attr . hasDefaultValue ( ) || value != null ) ) { entity . set ( attrName , value ) ; } break ; case CATEGORICAL : case FILE : case XREF : // DataConverter . convert performs no conversion for reference types Entity refEntity = toRefEntity ( attr , emxValue ) ; // do not set generated auto refEntities to null if ( ( ! attr . isAuto ( ) || refEntity != null ) && ( ! attr . hasDefaultValue ( ) || refEntity != null ) ) { entity . set ( attrName , refEntity ) ; } break ; case CATEGORICAL_MREF : case MREF : // DataConverter . convert performs no conversion for reference types List < Entity > refEntities = toRefEntities ( attr , emxValue ) ; // do not set generated auto refEntities to null if ( ! refEntities . isEmpty ( ) ) { entity . set ( attrName , refEntities ) ; } break ; case COMPOUND : throw new IllegalAttributeTypeException ( attrType ) ; default : throw new UnexpectedEnumException ( attrType ) ; } } } return entity ;
public class ReduceOps { /** * Constructs a { @ code TerminalOp } that implements a functional reduce on * { @ code double } values , producing an optional double result . * @ param operator the combining function * @ return a { @ code TerminalOp } implementing the reduction */ public static TerminalOp < Double , OptionalDouble > makeDouble ( DoubleBinaryOperator operator ) { } }
Objects . requireNonNull ( operator ) ; class ReducingSink implements AccumulatingSink < Double , OptionalDouble , ReducingSink > , Sink . OfDouble { private boolean empty ; private double state ; public void begin ( long size ) { empty = true ; state = 0 ; } @ Override public void accept ( double t ) { if ( empty ) { empty = false ; state = t ; } else { state = operator . applyAsDouble ( state , t ) ; } } @ Override public OptionalDouble get ( ) { return empty ? OptionalDouble . empty ( ) : OptionalDouble . of ( state ) ; } @ Override public void combine ( ReducingSink other ) { if ( ! other . empty ) accept ( other . state ) ; } } return new ReduceOp < Double , OptionalDouble , ReducingSink > ( StreamShape . DOUBLE_VALUE ) { @ Override public ReducingSink makeSink ( ) { return new ReducingSink ( ) ; } } ;
public class AttributeCriterionPane { /** * Return the actual search criterion object , or null if not all fields have been properly filled . * @ return search criterion */ public SearchCriterion getSearchCriterion ( ) { } }
Object operator = operatorSelect . getValue ( ) ; Object value = valueItem . getValue ( ) ; if ( selectedAttribute != null && operator != null ) { String operatorString = getOperatorCodeFromLabel ( operator . toString ( ) ) ; String valueString = "" ; String nameString = selectedAttribute . getName ( ) ; if ( value != null ) { valueString = value . toString ( ) ; } // CQL does not recognize " contains " , so change to " like " : if ( "contains" . equals ( operatorString ) ) { operatorString = "LIKE" ; valueString = CQL_WILDCARD + valueString + CQL_WILDCARD ; } // If value was null , and no " contains " operator , return null : if ( valueString == null || valueString . length ( ) == 0 ) { return null ; } if ( selectedAttribute instanceof PrimitiveAttributeInfo ) { PrimitiveAttributeInfo attr = ( PrimitiveAttributeInfo ) selectedAttribute ; if ( attr . getType ( ) . equals ( PrimitiveType . STRING ) || attr . getType ( ) . equals ( PrimitiveType . IMGURL ) || attr . getType ( ) . equals ( PrimitiveType . URL ) ) { // In case of a string , add quotes : valueString = "'" + valueString + "'" ; } else if ( attr . getType ( ) . equals ( PrimitiveType . DATE ) ) { if ( value instanceof Date ) { // In case of a date , parse correctly for CQL : 2006-11-30T01:30:00Z DateTimeFormat format = DateTimeFormat . getFormat ( CQL_TIME_FORMAT ) ; if ( "=" . equals ( operatorString ) ) { // Date equals not supported by CQL , so we use the DURING operator instead : operatorString = "DURING" ; Date date1 = ( Date ) value ; Date date2 = new Date ( date1 . getTime ( ) + 86400000 ) ; // total milliseconds in a day valueString = format . format ( date1 ) + "/" + format . format ( date2 ) ; } else { // format the date : valueString = format . format ( ( Date ) value ) ; } } } } else if ( selectedAttribute instanceof AssociationAttributeInfo ) { AssociationAttributeInfo assInfo = ( AssociationAttributeInfo ) selectedAttribute ; if ( AssociationType . MANY_TO_ONE == assInfo . getType ( ) ) { nameString = nameString + ID_SUFFIX ; } } // Now create the criterion : SearchCriterion criterion = new SearchCriterion ( ) ; criterion . setAttributeName ( nameString ) ; criterion . setOperator ( operatorString ) ; criterion . setValue ( valueString ) ; return criterion ; } return null ;
public class LicenseClient { /** * Deletes the specified license . * < p > Sample code : * < pre > < code > * try ( LicenseClient licenseClient = LicenseClient . create ( ) ) { * ProjectGlobalLicenseName license = ProjectGlobalLicenseName . of ( " [ PROJECT ] " , " [ LICENSE ] " ) ; * Operation response = licenseClient . deleteLicense ( license . toString ( ) ) ; * < / code > < / pre > * @ param license Name of the license resource to delete . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation deleteLicense ( String license ) { } }
DeleteLicenseHttpRequest request = DeleteLicenseHttpRequest . newBuilder ( ) . setLicense ( license ) . build ( ) ; return deleteLicense ( request ) ;
public class BiconjugateGradient { /** * Returns a simple preconditioner matrix that is the * trivial diagonal part of A in some cases . */ private static Preconditioner diagonalPreconditioner ( Matrix A ) { } }
return new Preconditioner ( ) { public void asolve ( double [ ] b , double [ ] x ) { double [ ] diag = A . diag ( ) ; int n = diag . length ; for ( int i = 0 ; i < n ; i ++ ) { x [ i ] = diag [ i ] != 0.0 ? b [ i ] / diag [ i ] : b [ i ] ; } } } ;
public class PropertyImpl { /** * { @ inheritDoc } */ void loadData ( ItemData data , NodeData parent ) throws RepositoryException , ConstraintViolationException { } }
if ( data . isNode ( ) ) { throw new RepositoryException ( "Load data failed: Property expected" ) ; } this . data = data ; this . propertyData = ( PropertyData ) data ; this . type = propertyData . getType ( ) ; this . qpath = data . getQPath ( ) ; this . location = null ; initDefinitions ( this . propertyData . isMultiValued ( ) , parent ) ;
public class Client { /** * 扩展支持以pid or host - port两种方式接入 */ public static JMXConnector connect ( final String hostportOrPid , final String login , final String password ) throws IOException { } }
// . / vjmxcli . sh - 127.0.0.1:8060 gcutil if ( hostportOrPid . contains ( ":" ) ) { JMXServiceURL rmiurl = new JMXServiceURL ( "service:jmx:rmi://" + hostportOrPid + "/jndi/rmi://" + hostportOrPid + "/jmxrmi" ) ; return JMXConnectorFactory . connect ( rmiurl , formatCredentials ( login , password ) ) ; } else { // . / vjmxcli . sh - 112222 gcutil String localAddress = getLocalConnectorAddress ( hostportOrPid ) ; JMXServiceURL localRmiurl = new JMXServiceURL ( localAddress ) ; return JMXConnectorFactory . connect ( localRmiurl ) ; }
public class LocaleFilter { /** * Gets a string representation of the given locale . * This default implementation only supports language , country , and variant . * Country will only be added when language present . * Variant will only be added when both language and country are present . */ protected String toLocaleString ( Locale locale ) { } }
String language = locale . getLanguage ( ) ; if ( language . isEmpty ( ) ) return "" ; String country = locale . getCountry ( ) ; if ( country . isEmpty ( ) ) return language ; String variant = locale . getVariant ( ) ; if ( variant . isEmpty ( ) ) { return language + '-' + country ; } else { return language + '-' + country + '-' + variant ; }
public class CipherInputStream { /** * Skips < code > n < / code > bytes of input from the bytes that can be read * from this input stream without blocking . * < p > Fewer bytes than requested might be skipped . * The actual number of bytes skipped is equal to < code > n < / code > or * the result of a call to * { @ link # available ( ) < code > available < / code > } , * whichever is smaller . * If < code > n < / code > is less than zero , no bytes are skipped . * < p > The actual number of bytes skipped is returned . * @ param n the number of bytes to be skipped . * @ return the actual number of bytes skipped . * @ exception IOException if an I / O error occurs . * @ since JCE1.2 */ public long skip ( long n ) throws IOException { } }
int available = ofinish - ostart ; if ( n > available ) { n = available ; } if ( n < 0 ) { return 0 ; } ostart += n ; return n ;
public class AtomicBitflags { /** * Atomically add the given flags to the current set * @ param flags to add * @ return the previous value */ public int set ( final int flags ) { } }
for ( ; ; ) { int current = _flags . get ( ) ; int newValue = current | flags ; if ( _flags . compareAndSet ( current , newValue ) ) { return current ; } }
public class S3ProxyHandler { /** * TODO : bogus values */ private static void writeInitiatorStanza ( XMLStreamWriter xml ) throws XMLStreamException { } }
xml . writeStartElement ( "Initiator" ) ; writeSimpleElement ( xml , "ID" , FAKE_INITIATOR_ID ) ; writeSimpleElement ( xml , "DisplayName" , FAKE_INITIATOR_DISPLAY_NAME ) ; xml . writeEndElement ( ) ;
public class ExtensionsConfigFileReader { /** * / * Roughly corresponds to pbx _ config . c : 2276 */ private static String [ ] harvestApplicationWithArguments ( String arg ) { } }
List < String > args = new ArrayList < > ( ) ; if ( arg . trim ( ) . length ( ) >= 0 ) { String appl = "" , data = "" ; /* Find the first occurrence of either ' ( ' or ' , ' */ int firstc = arg . indexOf ( ',' ) ; int firstp = arg . indexOf ( '(' ) ; if ( firstc != - 1 && ( firstp == - 1 || firstc < firstp ) ) { /* comma found , no parenthesis */ /* or both found , but comma found first */ String [ ] split = arg . split ( "," ) ; appl = split [ 0 ] ; for ( int i = 1 ; i < split . length ; i ++ ) data += split [ i ] + ( i + 1 < split . length ? "," : "" ) ; } else if ( firstc == - 1 && firstp == - 1 ) { /* Neither found */ data = "" ; } else { /* Final remaining case is parenthesis found first */ String [ ] split = arg . split ( "\\(" ) ; appl = split [ 0 ] ; for ( int i = 1 ; i < split . length ; i ++ ) data += split [ i ] + ( i + 1 < split . length ? "(" : "" ) ; int end = data . lastIndexOf ( ')' ) ; if ( end == - 1 ) { // ast _ log ( LOG _ WARNING , " No closing parenthesis found ? // ' % s ( % s ' \ n " , appl , data ) ; } else if ( end == data . length ( ) - 1 ) { data = data . substring ( 0 , end ) ; } data = processQuotesAndSlashes ( data , ',' , '|' ) ; } if ( ! appl . trim ( ) . equals ( "" ) ) { args . add ( appl . trim ( ) ) ; if ( ! data . trim ( ) . equals ( "" ) ) { String [ ] dataSplit = data . split ( "\\|" ) ; for ( String aDataSplit : dataSplit ) { args . add ( aDataSplit . trim ( ) ) ; } } } } return args . toArray ( new String [ args . size ( ) ] ) ;
public class ReservationReader { /** * Make the request to the Twilio API to perform the read . * @ param client TwilioRestClient with which to make the request * @ return Reservation ResourceSet */ @ Override public ResourceSet < Reservation > read ( final TwilioRestClient client ) { } }
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
public class App { /** * An array of < code > EnvironmentVariable < / code > objects that specify environment variables to be associated with the * app . After you deploy the app , these variables are defined on the associated app server instances . For more * information , see < a href = * " http : / / docs . aws . amazon . com / opsworks / latest / userguide / workingapps - creating . html # workingapps - creating - environment " * > Environment Variables < / a > . * < note > * There is no specific limit on the number of environment variables . However , the size of the associated data * structure - which includes the variable names , values , and protected flag values - cannot exceed 10 KB ( 10240 * Bytes ) . This limit should accommodate most if not all use cases , but if you do exceed it , you will cause an * exception ( API ) with an " Environment : is too large ( maximum is 10KB ) " message . * < / note > * @ param environment * An array of < code > EnvironmentVariable < / code > objects that specify environment variables to be associated * with the app . After you deploy the app , these variables are defined on the associated app server * instances . For more information , see < a href = * " http : / / docs . aws . amazon . com / opsworks / latest / userguide / workingapps - creating . html # workingapps - creating - environment " * > Environment Variables < / a > . < / p > < note > * There is no specific limit on the number of environment variables . However , the size of the associated * data structure - which includes the variable names , values , and protected flag values - cannot exceed 10 * KB ( 10240 Bytes ) . This limit should accommodate most if not all use cases , but if you do exceed it , you * will cause an exception ( API ) with an " Environment : is too large ( maximum is 10KB ) " message . */ public void setEnvironment ( java . util . Collection < EnvironmentVariable > environment ) { } }
if ( environment == null ) { this . environment = null ; return ; } this . environment = new com . amazonaws . internal . SdkInternalList < EnvironmentVariable > ( environment ) ;
public class ArrayUtils { /** * needed because Arrays . asList ( ) won ' t to autoboxing , * so if you give it a primitive array you get a * singleton list back with just that array as an element . */ public static List < Integer > asList ( int [ ] array ) { } }
List < Integer > l = new ArrayList < Integer > ( ) ; for ( int i : array ) { l . add ( i ) ; } return l ;
public class EntityClassReader { /** * Will retrieve the value of the geometryfield in the given object . If no geometryfield * exists , null is returned . If more than one exist , a random one is returned . * @ param objectToGet the object from which the geometry is to be fetched . * @ return the geometry of the given object * @ throws InvalidObjectReaderException If the class of objectToGet does not correspond with the entityclass of this reader . * @ throws IllegalArgumentException the given object may not be null */ public Geometry getGeometry ( Object objectToGet ) throws InvalidObjectReaderException { } }
if ( objectToGet == null ) { throw new IllegalArgumentException ( "The given object may not be null" ) ; } if ( objectToGet . getClass ( ) != entityClass ) { throw new InvalidObjectReaderException ( "Class of target object does not correspond with entityclass of this reader." ) ; } if ( geometryAccessor == null ) { return null ; } else { return ( Geometry ) geometryAccessor . getValueFrom ( objectToGet ) ; }
public class SaneSession { /** * Establishes a connection to the SANE daemon running on the given host on the given port . If the * connection cannot be established within the given timeout , * { @ link java . net . SocketTimeoutException } is thrown . * @ param saneAddress * @ param port * @ param timeout Connection timeout * @ param timeUnit Connection timeout unit * @ param soTimeout Socket timeout ( for read on socket ) * @ param soTimeUnit Socket timeout unit * @ return * @ throws IOException */ public static SaneSession withRemoteSane ( InetAddress saneAddress , int port , long timeout , TimeUnit timeUnit , long soTimeout , TimeUnit soTimeUnit ) throws IOException { } }
long millis = timeUnit . toMillis ( timeout ) ; Preconditions . checkArgument ( millis >= 0 && millis <= Integer . MAX_VALUE , "Timeout must be between 0 and Integer.MAX_VALUE milliseconds" ) ; // If the user specifies a non - zero timeout that rounds to 0 milliseconds , // set the timeout to 1 millisecond instead . if ( timeout > 0 && millis == 0 ) { Logger . getLogger ( SaneSession . class . getName ( ) ) . log ( Level . WARNING , "Specified timeout of {0} {1} rounds to 0ms and was clamped to 1ms" , new Object [ ] { timeout , timeUnit } ) ; } Socket socket = new Socket ( ) ; socket . setTcpNoDelay ( true ) ; if ( soTimeUnit != null && soTimeout > 0 ) { long soTimeoutMillis = soTimeUnit . toMillis ( soTimeout ) ; Preconditions . checkArgument ( soTimeoutMillis >= 0 && soTimeoutMillis <= Integer . MAX_VALUE , "Socket timeout must be between 0 and Integer.MAX_VALUE milliseconds" ) ; socket . setSoTimeout ( ( int ) soTimeoutMillis ) ; } socket . connect ( new InetSocketAddress ( saneAddress , port ) , ( int ) millis ) ; SaneSession session = new SaneSession ( socket ) ; session . initSane ( ) ; return session ;
public class SleepingTimer { /** * Enforces the thread waits for the given interval between consecutive ticks . * @ throws InterruptedException if the thread is interrupted while waiting */ public void tick ( ) throws InterruptedException { } }
if ( mPreviousTickMs != 0 ) { long executionTimeMs = mClock . millis ( ) - mPreviousTickMs ; if ( executionTimeMs > mIntervalMs ) { mLogger . warn ( "{} last execution took {} ms. Longer than the interval {}" , mThreadName , executionTimeMs , mIntervalMs ) ; } else { mSleeper . sleep ( Duration . ofMillis ( mIntervalMs - executionTimeMs ) ) ; } } mPreviousTickMs = mClock . millis ( ) ;
public class LazySocketFactory { /** * Returns a socket that will lazily connect . */ public CheckedSocket getSocket ( Object session ) throws ConnectException , SocketException { } }
return CheckedSocket . check ( new LazySocket ( mFactory , session ) ) ;
public class ManagementLocksInner { /** * Deletes the management lock at the subscription level . * To delete management locks , you must have access to Microsoft . Authorization / * or Microsoft . Authorization / locks / * actions . Of the built - in roles , only Owner and User Access Administrator are granted those actions . * @ param lockName The name of lock to delete . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > deleteAtSubscriptionLevelAsync ( String lockName ) { } }
return deleteAtSubscriptionLevelWithServiceResponseAsync ( lockName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class Ec2MachineConfigurator { /** * Creates volume for EBS . * @ return volume ID of newly created volume */ private String createVolume ( String storageId , String snapshotId , int size ) { } }
String volumeType = Ec2IaasHandler . findStorageProperty ( this . targetProperties , storageId , VOLUME_TYPE_PREFIX ) ; if ( volumeType == null ) volumeType = "standard" ; CreateVolumeRequest createVolumeRequest = new CreateVolumeRequest ( ) . withAvailabilityZone ( this . availabilityZone ) . withVolumeType ( volumeType ) . withSize ( size ) ; // The size of the volume , in gigabytes . // EC2 snapshot IDs start with " snap - " . . . if ( ! Utils . isEmptyOrWhitespaces ( snapshotId ) && snapshotId . startsWith ( "snap-" ) ) createVolumeRequest . withSnapshotId ( snapshotId ) ; CreateVolumeResult createVolumeResult = this . ec2Api . createVolume ( createVolumeRequest ) ; return createVolumeResult . getVolume ( ) . getVolumeId ( ) ;
public class KeyVaultClientBaseImpl { /** * List secrets in a specified key vault . * The Get Secrets operation is applicable to the entire vault . However , only the base secret identifier and its attributes are provided in the response . Individual secret versions are not listed in the response . This operation requires the secrets / list permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param maxresults Maximum number of results to return in a page . If not specified , the service will return up to 25 results . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the PagedList & lt ; SecretItem & gt ; object if successful . */ public PagedList < SecretItem > getSecrets ( final String vaultBaseUrl , final Integer maxresults ) { } }
ServiceResponse < Page < SecretItem > > response = getSecretsSinglePageAsync ( vaultBaseUrl , maxresults ) . toBlocking ( ) . single ( ) ; return new PagedList < SecretItem > ( response . body ( ) ) { @ Override public Page < SecretItem > nextPage ( String nextPageLink ) { return getSecretsNextSinglePageAsync ( nextPageLink ) . toBlocking ( ) . single ( ) . body ( ) ; } } ;
public class ThriftClient { /** * ( non - Javadoc ) * @ see * com . impetus . client . cassandra . CassandraClientBase # find ( java . util . List , * com . impetus . kundera . metadata . model . EntityMetadata , boolean , * java . util . List , int , java . util . List ) */ @ Override public List find ( List < IndexClause > ixClause , EntityMetadata m , boolean isRelation , List < String > relations , int maxResult , List < String > columns ) { } }
List < Object > entities = new ArrayList < Object > ( ) ; Connection conn = null ; try { // ixClause can be 0,1 or more ! SlicePredicate slicePredicate = new SlicePredicate ( ) ; if ( columns != null && ! columns . isEmpty ( ) ) { List asList = new ArrayList ( 32 ) ; for ( String colName : columns ) { if ( colName != null ) { asList . add ( UTF8Type . instance . decompose ( colName ) ) ; } } slicePredicate . setColumn_names ( asList ) ; } else { SliceRange sliceRange = new SliceRange ( ) ; sliceRange . setStart ( ByteBufferUtil . EMPTY_BYTE_BUFFER ) ; sliceRange . setFinish ( ByteBufferUtil . EMPTY_BYTE_BUFFER ) ; slicePredicate . setSlice_range ( sliceRange ) ; } conn = getConnection ( ) ; if ( ixClause . isEmpty ( ) ) { KeyRange keyRange = new KeyRange ( maxResult ) ; keyRange . setStart_key ( ByteBufferUtil . EMPTY_BYTE_BUFFER ) ; keyRange . setEnd_key ( ByteBufferUtil . EMPTY_BYTE_BUFFER ) ; if ( m . isCounterColumnType ( ) ) { List < KeySlice > ks = conn . getClient ( ) . get_range_slices ( new ColumnParent ( m . getTableName ( ) ) , slicePredicate , keyRange , getConsistencyLevel ( ) ) ; entities = onCounterColumn ( m , isRelation , relations , ks ) ; } else { List < KeySlice > keySlices = conn . getClient ( ) . get_range_slices ( new ColumnParent ( m . getTableName ( ) ) , slicePredicate , keyRange , getConsistencyLevel ( ) ) ; if ( m . getType ( ) . isSuperColumnFamilyMetadata ( ) ) { Map < ByteBuffer , List < SuperColumn > > qResults = ThriftDataResultHelper . transformThriftResult ( ColumnFamilyType . SUPER_COLUMN , keySlices , null ) ; entities = new ArrayList < Object > ( qResults . size ( ) ) ; computeEntityViaSuperColumns ( m , isRelation , relations , entities , qResults ) ; } else { Map < ByteBuffer , List < Column > > qResults = ThriftDataResultHelper . transformThriftResult ( ColumnFamilyType . COLUMN , keySlices , null ) ; entities = new ArrayList < Object > ( qResults . size ( ) ) ; computeEntityViaColumns ( m , isRelation , relations , entities , qResults ) ; } } } else { entities = new ArrayList < Object > ( ) ; for ( IndexClause ix : ixClause ) { List < KeySlice > keySlices = conn . getClient ( ) . get_indexed_slices ( new ColumnParent ( m . getTableName ( ) ) , ix , slicePredicate , getConsistencyLevel ( ) ) ; Map < ByteBuffer , List < Column > > qResults = ThriftDataResultHelper . transformThriftResult ( ColumnFamilyType . COLUMN , keySlices , null ) ; // iterate through complete map and populate . entities = new ArrayList < Object > ( qResults . size ( ) ) ; computeEntityViaColumns ( m , isRelation , relations , entities , qResults ) ; } } } catch ( InvalidRequestException irex ) { log . error ( "Error during executing find of column family {}, Caused by: ." , m . getTableName ( ) , irex ) ; throw new PersistenceException ( irex ) ; } catch ( UnavailableException uex ) { log . error ( "Error during executing find of column family {}, Caused by: ." , m . getTableName ( ) , uex ) ; throw new PersistenceException ( uex ) ; } catch ( TimedOutException tex ) { log . error ( "Error during executing find of column family {}, Caused by: ." , m . getTableName ( ) , tex ) ; throw new PersistenceException ( tex ) ; } catch ( TException tex ) { log . error ( "Error during executing find of column family {}, Caused by: ." , m . getTableName ( ) , tex ) ; throw new PersistenceException ( tex ) ; } finally { releaseConnection ( conn ) ; } return entities ;
public class TinyAES { /** * Calculate the necessary round keys The number of calculations depends on * key size and block size AES specified a fixed block size of 128 bits and * key sizes 128/192/256 bits This code is written assuming those are the * only possible values */ private int [ ] [ ] generateWorkingKey ( byte [ ] key , boolean forEncryption ) { } }
int KC = key . length / 4 ; // key length in words int t ; boolean c1 = KC != 4 ; boolean c2 = KC != 6 ; boolean c3 = KC != 8 ; boolean c4 = ( KC * 4 ) != key . length ; if ( ( c1 && c2 && c3 ) || c4 ) { throw new IllegalArgumentException ( "Key length not 128/192/256 bits, " + key . length * 8 + " instead." ) ; } ROUNDS = KC + 6 ; // This is not always true for the generalized // Rijndael that allows larger block sizes int [ ] [ ] W = new int [ ROUNDS + 1 ] [ 4 ] ; // 4 words in a block // copy the key into the round key array t = 0 ; for ( int i = 0 ; i < key . length ; t ++ ) { W [ t >> 2 ] [ t & 3 ] = ( key [ i ] & 0xff ) | ( ( key [ i + 1 ] & 0xff ) << 8 ) | ( ( key [ i + 2 ] & 0xff ) << 16 ) | ( key [ i + 3 ] << 24 ) ; i += 4 ; } // while not enough round key material calculated // calculate new values int k = ( ROUNDS + 1 ) << 2 ; for ( int i = KC ; ( i < k ) ; i ++ ) { int temp = W [ ( i - 1 ) >> 2 ] [ ( i - 1 ) & 3 ] ; if ( ( i % KC ) == 0 ) { temp = subWord ( shift ( temp , 8 ) ) ^ rcon [ ( i / KC ) - 1 ] ; } else if ( ( KC > 6 ) && ( ( i % KC ) == 4 ) ) { temp = subWord ( temp ) ; } W [ i >> 2 ] [ i & 3 ] = W [ ( i - KC ) >> 2 ] [ ( i - KC ) & 3 ] ^ temp ; } if ( ! forEncryption ) { for ( int j = 1 ; j < ROUNDS ; j ++ ) { for ( int i = 0 ; i < 4 ; i ++ ) { W [ j ] [ i ] = inv_mcol ( W [ j ] [ i ] ) ; } } } return W ;
public class ThreadSafety { /** * ( 3 ) require the super - class to also be a container of its corresponding type parameter . */ private boolean containerOfSubtyping ( Set < String > containerTypeParameters , AnnotationInfo annotation , TypeVariableSymbol typaram , Type tyargument ) { } }
if ( ! tyargument . hasTag ( TypeTag . TYPEVAR ) ) { return false ; } if ( ! containerTypeParameters . contains ( tyargument . asElement ( ) . getSimpleName ( ) . toString ( ) ) || isTypeParameterThreadSafe ( ( TypeVariableSymbol ) tyargument . asElement ( ) , containerTypeParameters ) ) { return false ; } if ( annotation . containerOf ( ) . contains ( typaram . getSimpleName ( ) . toString ( ) ) ) { return false ; } return true ;
public class _HoloActivity { /** * Do not override this method . Use { @ link # onPreInit ( Holo , Bundle ) } and * { @ link # onPostInit ( Holo , Bundle ) } */ protected void onInit ( Holo config , Bundle savedInstanceState ) { } }
if ( mInited ) { throw new IllegalStateException ( "This instance was already inited" ) ; } mInited = true ; if ( config == null ) { config = createConfig ( savedInstanceState ) ; } if ( config == null ) { config = Holo . defaultConfig ( ) ; } onPreInit ( config , savedInstanceState ) ; if ( ! config . ignoreApplicationInstanceCheck && ! ( getApplication ( ) instanceof Application ) ) { boolean throwError = true ; if ( config . allowMockApplicationInstance ) { try { throwError = ! ( getApplication ( ) instanceof MockApplication ) ; if ( ! throwError ) { Log . w ( "HoloEverywhere" , "Application instance is MockApplication. Wow. Let's begin tests..." ) ; } } catch ( Exception e ) { } } if ( throwError ) { String text = "Application instance isn't HoloEverywhere.\n" ; if ( getApplication ( ) . getClass ( ) == android . app . Application . class ) { text += "Put attr 'android:name=\"org.holoeverywhere.app.Application\"'" + " in <application> tag of AndroidManifest.xml" ; } else { text += "Please sure that you extend " + getApplication ( ) . getClass ( ) + " from a org.holoeverywhere.app.Application" ; } throw new IllegalStateException ( text ) ; } } getLayoutInflater ( ) . setFragmentActivity ( this ) ; if ( this instanceof Activity ) { final Activity activity = ( Activity ) this ; ThemeManager . applyTheme ( activity , mLastThemeResourceId == 0 ) ; if ( ! config . ignoreThemeCheck && ThemeManager . getThemeType ( this ) == ThemeManager . INVALID ) { throw new HoloThemeException ( activity ) ; } TypedArray a = obtainStyledAttributes ( new int [ ] { android . R . attr . windowActionBarOverlay , R . attr . windowActionBarOverlay } ) ; if ( a . getBoolean ( 0 , false ) || a . getBoolean ( 1 , false ) ) { supportRequestWindowFeature ( Window . FEATURE_ACTION_BAR_OVERLAY ) ; } a . recycle ( ) ; a = obtainStyledAttributes ( new int [ ] { android . R . attr . windowActionModeOverlay , R . attr . windowActionBarOverlay } ) ; if ( a . getBoolean ( 0 , false ) || a . getBoolean ( 1 , false ) ) { supportRequestWindowFeature ( Window . FEATURE_ACTION_MODE_OVERLAY ) ; } a . recycle ( ) ; } onPostInit ( config , savedInstanceState ) ; lockAttaching ( ) ;
public class DateConverter { /** * Parse a @ see org . joda . time . DateTime from a String . * @ param dateTimeString timestamp to parse * @ return parsed @ see org . joda . time . DateTime if parseable , null otherwise */ public static DateTime iso8601DateTimeFromString ( String dateTimeString ) { } }
try { return DateTime . parse ( dateTimeString , ISO8601_DATE_TIME_FORMATTER ) ; } catch ( Exception e ) { return null ; }
public class DeepLearningTask { /** * assumption : layer 0 has _ a filled with ( horizontalized categoricals ) double values */ public static void step ( long seed , Neurons [ ] neurons , DeepLearningModel . DeepLearningModelInfo minfo , boolean training , double [ ] responses ) { } }
try { for ( int i = 1 ; i < neurons . length - 1 ; ++ i ) { neurons [ i ] . fprop ( seed , training ) ; } if ( minfo . get_params ( ) . autoencoder ) { neurons [ neurons . length - 1 ] . fprop ( seed , training ) ; if ( training ) { for ( int i = neurons . length - 1 ; i > 0 ; -- i ) { neurons [ i ] . bprop ( ) ; } } } else { if ( minfo . get_params ( ) . classification ) { ( ( Neurons . Softmax ) neurons [ neurons . length - 1 ] ) . fprop ( ) ; if ( training ) { for ( int i = 1 ; i < neurons . length - 1 ; i ++ ) Arrays . fill ( neurons [ i ] . _e . raw ( ) , 0 ) ; int target_label ; if ( Double . isNaN ( responses [ 0 ] ) ) { // missing response target_label = Neurons . missing_int_value ; } else { assert ( ( double ) ( int ) responses [ 0 ] == responses [ 0 ] ) ; // classification - > integer labels expected target_label = ( int ) responses [ 0 ] ; } ( ( Neurons . Softmax ) neurons [ neurons . length - 1 ] ) . bprop ( target_label ) ; } } else { ( ( Neurons . Linear ) neurons [ neurons . length - 1 ] ) . fprop ( ) ; if ( training ) { for ( int i = 1 ; i < neurons . length - 1 ; i ++ ) Arrays . fill ( neurons [ i ] . _e . raw ( ) , 0 ) ; float target_value ; if ( Double . isNaN ( responses [ 0 ] ) ) { // missing response target_value = Neurons . missing_real_value ; } else { target_value = ( float ) responses [ 0 ] ; } ( ( Neurons . Linear ) neurons [ neurons . length - 1 ] ) . bprop ( target_value ) ; } } if ( training ) { for ( int i = neurons . length - 2 ; i > 0 ; -- i ) neurons [ i ] . bprop ( ) ; } } } catch ( RuntimeException ex ) { Log . warn ( ex . getMessage ( ) ) ; minfo . set_unstable ( ) ; throw new Job . JobCancelledException ( "Canceling job due to numerical instability." ) ; }
public class ConsoleKnownHostsKeyVerification { /** * Prompts the user through the console to verify the host key . * @ param host * the name of the host * @ param pk * the current public key of the host * @ param actual * the actual public key supplied by the host * @ since 0.2.0 */ public void onHostKeyMismatch ( String host , SshPublicKey pk , SshPublicKey actual ) { } }
try { System . out . println ( "The host key supplied by " + host + "(" + pk . getAlgorithm ( ) + ")" + " is: " + actual . getFingerprint ( ) ) ; System . out . println ( "The current allowed key for " + host + " is: " + pk . getFingerprint ( ) ) ; getResponse ( host , actual ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; }
public class AmazonAlexaForBusinessClient { /** * Deletes a room profile by the profile ARN . * @ param deleteProfileRequest * @ return Result of the DeleteProfile operation returned by the service . * @ throws NotFoundException * The resource is not found . * @ throws ConcurrentModificationException * There is a concurrent modification of resources . * @ sample AmazonAlexaForBusiness . DeleteProfile * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / alexaforbusiness - 2017-11-09 / DeleteProfile " target = " _ top " > AWS * API Documentation < / a > */ @ Override public DeleteProfileResult deleteProfile ( DeleteProfileRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteProfile ( request ) ;
public class XNSerializables { /** * Create an XNSerializable object through the { @ code creator } function * and load it from the { @ code item } . * @ param < T > the XNSerializable object * @ param item the item to load from * @ param creator the function to create Ts * @ return the created and loaded object */ public static < T extends XNSerializable > T parseItem ( XNElement item , Supplier < T > creator ) { } }
T result = creator . get ( ) ; result . load ( item ) ; return result ;
public class BeanExtractor { private Object exctractJsonifiedValue ( ObjectToJsonConverter pConverter , Object pValue , Stack < String > pPathParts ) throws AttributeNotFoundException { } }
if ( pValue . getClass ( ) . isPrimitive ( ) || FINAL_CLASSES . contains ( pValue . getClass ( ) ) || pValue instanceof JSONAware ) { // No further diving , use these directly return pValue ; } else { // For the rest we build up a JSON map with the attributes as keys and the value are List < String > attributes = extractBeanAttributes ( pValue ) ; if ( attributes . size ( ) > 0 ) { return extractBeanValues ( pConverter , pValue , pPathParts , attributes ) ; } else { // No further attributes , return string representation return pValue . toString ( ) ; } }
public class XLog { /** * Log a message and a throwable with specific log level . * @ param logLevel the specific log level * @ param msg the message to log * @ param tr the throwable to be log * @ since 1.4.0 */ public static void log ( int logLevel , String msg , Throwable tr ) { } }
assertInitialization ( ) ; sLogger . log ( logLevel , msg , tr ) ;
public class MetatypeUtils { /** * Parse a boolean from the provided config value : checks for whether or not * the object read from the Service / Component configuration is a String * or a Metatype converted boolean . * If an exception occurs converting the object parameter : * A translated warning message will be issued using the provided propertyKey and object * as parameters . FFDC for the exception is suppressed : Callers should handle the thrown * IllegalArgumentException as appropriate . * @ param configAlias * Name of config ( pid or alias ) associated with a registered service * or DS component . * @ param propertyKey * The key used to retrieve the property value from the map . * Used in the warning message if the value is badly formed . * @ param obj * The object retrieved from the configuration property map / dictionary . * @ return boolean parsed from obj , the default value if obj is null . * @ throws IllegalArgumentException If value is not a String / Boolean , or if the String * boolean is not " true " or " false " ( ignoring case ) */ public static boolean parseBoolean ( Object configAlias , String propertyKey , Object obj , boolean defaultValue ) { } }
if ( obj != null ) { if ( obj instanceof String ) { String value = ( String ) obj ; if ( value . equalsIgnoreCase ( "true" ) ) { return true ; } else if ( value . equalsIgnoreCase ( "false" ) ) { return false ; } else { Tr . warning ( tc , "invalidBoolean" , configAlias , propertyKey , obj ) ; throw new IllegalArgumentException ( "Boolean value could not be parsed: key=" + propertyKey + ", value=" + obj ) ; } } else if ( obj instanceof Boolean ) { return ( Boolean ) obj ; } // unknown type Tr . warning ( tc , "invalidBoolean" , configAlias , propertyKey , obj ) ; throw new IllegalArgumentException ( "Boolean value could not be parsed: key=" + propertyKey + ", value=" + obj ) ; } return defaultValue ;
public class Prefser { /** * Gets value from SharedPreferences with a given key and type . * If value is not found , we can return defaultValue . * @ param key key of the preference * @ param typeTokenOfT type token of T ( e . g . { @ code new TypeToken < List < String > > { } ) * @ param defaultValue default value of the preference ( e . g . " " or " undefined " ) * @ param < T > return type of the preference ( e . g . String ) * @ return value from SharedPreferences associated with given key or default value */ public < T > T get ( @ NonNull String key , @ NonNull TypeToken < T > typeTokenOfT , T defaultValue ) { } }
Preconditions . checkNotNull ( key , KEY_IS_NULL ) ; Preconditions . checkNotNull ( typeTokenOfT , TYPE_TOKEN_OF_T_IS_NULL ) ; Type typeOfT = typeTokenOfT . getType ( ) ; for ( Map . Entry < Class < ? > , Accessor < ? > > entry : accessorProvider . getAccessors ( ) . entrySet ( ) ) { if ( typeOfT . equals ( entry . getKey ( ) ) ) { @ SuppressWarnings ( "unchecked" ) Accessor < T > accessor = ( Accessor < T > ) entry . getValue ( ) ; return accessor . get ( key , defaultValue ) ; } } if ( contains ( key ) ) { return jsonConverter . fromJson ( preferences . getString ( key , null ) , typeOfT ) ; } else { return defaultValue ; }
public class Span { /** * Concatenates two span lists adding the spans of the second parameter to the * list in first parameter . * @ param allSpans * the spans to which the other spans are added * @ param neSpans * the spans to be added to allSpans */ public static final void concatenateSpans ( final List < Span > allSpans , final Span [ ] neSpans ) { } }
for ( final Span span : neSpans ) { allSpans . add ( span ) ; }
public class CmsRelationType { /** * Returns a localized name for the given relation type . < p > * @ param messages the message bundle to use to resolve the name * @ return a localized name */ public String getLocalizedName ( CmsMessages messages ) { } }
String nameKey = "GUI_RELATION_TYPE_" + getName ( ) + "_0" ; return messages . key ( nameKey ) ;
public class CoroutineReader { /** * Deserializes a { @ link CoroutineRunner } object from a byte array . * If you ' re handling your own deserialization and you simply want to reconstruct the deserialized object to the * { @ link CoroutineRunner } , use { @ link # reconstruct ( com . offbynull . coroutines . user . SerializedState ) } . * @ param data byte array to deserialize * @ return { @ code data } deserialized to a { @ link CoroutineRunner } object * @ throws NullPointerException if any argument is { @ code null } * @ throws IllegalArgumentException if failed to deserialize or deserialized to a state for an unrecognized method ( e . g . a method that ' s * state is being deserialized for was changed but no { @ link FrameUpdatePoint } was provided to this class ' s constructor to * handle the changes ) */ public CoroutineRunner read ( byte [ ] data ) { } }
if ( data == null ) { throw new NullPointerException ( ) ; } SerializedState serializedState = deserializer . deserialize ( data ) ; return reconstruct ( serializedState ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcEllipseProfileDef ( ) { } }
if ( ifcEllipseProfileDefEClass == null ) { ifcEllipseProfileDefEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 204 ) ; } return ifcEllipseProfileDefEClass ;
public class AWSDeviceFarmClient { /** * Creates a device pool . * @ param createDevicePoolRequest * Represents a request to the create device pool operation . * @ return Result of the CreateDevicePool operation returned by the service . * @ throws ArgumentException * An invalid argument was specified . * @ throws NotFoundException * The specified entity was not found . * @ throws LimitExceededException * A limit was exceeded . * @ throws ServiceAccountException * There was a problem with the service account . * @ sample AWSDeviceFarm . CreateDevicePool * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / devicefarm - 2015-06-23 / CreateDevicePool " target = " _ top " > AWS * API Documentation < / a > */ @ Override public CreateDevicePoolResult createDevicePool ( CreateDevicePoolRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateDevicePool ( request ) ;
public class SwapFile { /** * Mark the file ready for read . */ public void spoolDone ( ) { } }
final CountDownLatch sl = this . spoolLatch . get ( ) ; this . spoolLatch . set ( null ) ; sl . countDown ( ) ;
public class ComplexExpressionExtractor { /** * If an equality ( = = ) expression has double - parentheses , remove one set . * This avoids clang ' s - Wparentheses - equality warning . */ @ Override public void endVisit ( ParenthesizedExpression node ) { } }
Expression expr = node . getExpression ( ) ; if ( expr instanceof ParenthesizedExpression ) { Expression inner = ( ( ParenthesizedExpression ) expr ) . getExpression ( ) ; if ( isEqualityExpression ( inner ) ) { node . replaceWith ( TreeUtil . remove ( expr ) ) ; } } else if ( ! ( node . getParent ( ) instanceof Expression ) && isEqualityExpression ( expr ) ) { node . replaceWith ( TreeUtil . remove ( expr ) ) ; }
public class FacetUrl { /** * Get Resource Url for DeleteFacetById * @ param facetId Unique identifier of the facet to retrieve . * @ return String Resource Url */ public static MozuUrl deleteFacetByIdUrl ( Integer facetId ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/catalog/admin/facets/{facetId}" ) ; formatter . formatUrl ( "facetId" , facetId ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class ImplEdgeNonMaxSuppression_MT { /** * Only processes the inner image . Ignoring the border . */ static public void inner4 ( GrayF32 intensity , GrayS8 direction , GrayF32 output ) { } }
final int w = intensity . width ; final int h = intensity . height - 1 ; BoofConcurrency . loopFor ( 1 , h , y -> { int indexI = intensity . startIndex + y * intensity . stride + 1 ; int indexD = direction . startIndex + y * direction . stride + 1 ; int indexO = output . startIndex + y * output . stride + 1 ; int end = indexI + w - 2 ; for ( ; indexI < end ; indexI ++ , indexD ++ , indexO ++ ) { int dir = direction . data [ indexD ] ; int dx , dy ; if ( dir == 0 ) { dx = 1 ; dy = 0 ; } else if ( dir == 1 ) { dx = 1 ; dy = 1 ; } else if ( dir == 2 ) { dx = 0 ; dy = 1 ; } else { dx = 1 ; dy = - 1 ; } float middle = intensity . data [ indexI ] ; // suppress the value if either of its neighboring values are more than or equal to it if ( intensity . data [ indexI - dx - dy * intensity . stride ] > middle || intensity . data [ indexI + dx + dy * intensity . stride ] > middle ) { output . data [ indexO ] = 0 ; } else { output . data [ indexO ] = middle ; } } } ) ;
public class FileHdr { /** * Get the table name . */ public String getTableNames ( boolean bAddQuotes ) { } }
return ( m_tableName == null ) ? Record . formatTableNames ( FILE_HDR_FILE , bAddQuotes ) : super . getTableNames ( bAddQuotes ) ;
public class ServerHistory { /** * Clean up the Tree by DFS traversal . * Remove the node that has no children and no notifications associated * with them . * @ param node */ private void cleanUpHistoryTree ( HistoryNode node ) { } }
if ( node == null || node . children == null ) { return ; } Iterator < HistoryNode > iterator = node . children . iterator ( ) ; while ( iterator . hasNext ( ) ) { HistoryNode child = iterator . next ( ) ; // clean up child cleanUpHistoryTree ( child ) ; // clean up current node ; if ( shouldRemoveNode ( child ) ) { iterator . remove ( ) ; } }
public class FileDownloader { /** * Start the download queue by the same listener . * @ param listener Used to assemble tasks which is bound by the same { @ code listener } * @ param isSerial Whether start tasks one by one rather than parallel . * @ return { @ code true } if start tasks successfully . */ public boolean start ( final FileDownloadListener listener , final boolean isSerial ) { } }
if ( listener == null ) { Util . w ( TAG , "Tasks with the listener can't start, because the listener " + "provided is null: [null, " + isSerial + "]" ) ; return false ; } List < DownloadTaskAdapter > originalTasks = FileDownloadList . getImpl ( ) . assembleTasksToStart ( listener ) ; if ( originalTasks . isEmpty ( ) ) { Util . w ( TAG , "no task for listener: " + listener + " to start" ) ; return false ; } ArrayList < DownloadTask > downloadTasks = new ArrayList < > ( ) ; for ( DownloadTaskAdapter task : originalTasks ) { downloadTasks . add ( task . getDownloadTask ( ) ) ; } final DownloadContext downloadContext = new DownloadContext . Builder ( new DownloadContext . QueueSet ( ) , downloadTasks ) . setListener ( new DownloadContextListener ( ) { @ Override public void taskEnd ( @ NonNull DownloadContext context , @ NonNull DownloadTask task , @ NonNull EndCause cause , @ Nullable Exception realCause , int remainCount ) { Util . d ( TAG , "task " + task . getId ( ) + "end" ) ; final DownloadTaskAdapter downloadTaskAdapter = FileDownloadUtils . findDownloadTaskAdapter ( task ) ; if ( downloadTaskAdapter != null ) { FileDownloadList . getImpl ( ) . remove ( downloadTaskAdapter ) ; } } @ Override public void queueEnd ( @ NonNull DownloadContext context ) { Util . d ( TAG , "queue end" ) ; } } ) . build ( ) ; final CompatListenerAdapter compatListenerAdapter = CompatListenerAdapter . create ( listener ) ; downloadContext . start ( compatListenerAdapter , isSerial ) ; return true ;
public class ApiOvhOrder { /** * Get prices and contracts information * REST : GET / order / email / exchange / { organizationName } / service / { exchangeService } / upgrade * @ param organizationName [ required ] The internal name of your exchange organization * @ param exchangeService [ required ] The internal name of your exchange service */ public OvhOrder email_exchange_organizationName_service_exchangeService_upgrade_GET ( String organizationName , String exchangeService ) throws IOException { } }
String qPath = "/order/email/exchange/{organizationName}/service/{exchangeService}/upgrade" ; StringBuilder sb = path ( qPath , organizationName , exchangeService ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOrder . class ) ;
public class RetryPolicies { /** * Set a default policy with some explicit handlers for specific exceptions . */ public static final RetryPolicy retryByException ( RetryPolicy defaultPolicy , Map < Class < ? extends Exception > , RetryPolicy > exceptionToPolicyMap ) { } }
return new ExceptionDependentRetry ( defaultPolicy , exceptionToPolicyMap ) ;
public class ProductSegmentation { /** * Sets the bandwidthSegment value for this ProductSegmentation . * @ param bandwidthSegment * The bandwidth segmentation . { @ link BandwidthGroupTargeting # isTargeted } * must be { @ code true } . * < p > This attribute is optional . */ public void setBandwidthSegment ( com . google . api . ads . admanager . axis . v201902 . BandwidthGroupTargeting bandwidthSegment ) { } }
this . bandwidthSegment = bandwidthSegment ;
public class QuartzScheduler { /** * Halts the < code > QuartzScheduler < / code > ' s firing of < code > { @ link org . quartz . triggers . Trigger } s * < / code > , and cleans up all resources associated with the QuartzScheduler . * < p > The scheduler cannot be re - started . */ @ Override public void shutdown ( ) { } }
// delay a little bit in case an added job is still taking it ' s time getting started right // before shutdown is called . try { // System . out . println ( " waiting . . . " ) ; Thread . sleep ( 100 ) ; } catch ( Exception ignore ) { } if ( shuttingDown || closed ) { return ; } shuttingDown = true ; logger . info ( "Scheduler shutting down..." ) ; standby ( ) ; this . quartzSchedulerThread . halt ( ) ; notifySchedulerListenersShuttingdown ( ) ; // notify Jobs , so they can gracefully shutdown List < JobExecutionContext > jobs = getCurrentlyExecutingJobs ( ) ; for ( JobExecutionContext job : jobs ) { if ( job . getJobInstance ( ) instanceof InterruptableJob ) { try { ( ( InterruptableJob ) job . getJobInstance ( ) ) . interrupt ( ) ; } catch ( Throwable e ) { // do nothing , this was just a courtesy effort logger . warn ( "Encountered error when interrupting job {} during shutdown: {}" , job . getJobDetail ( ) . getName ( ) , e ) ; } } } logger . info ( "Threadpool shutting down..." ) ; quartzSchedulerResources . getThreadPool ( ) . shutdown ( ) ; // Scheduler thread may have be waiting for the fire time of an acquired // trigger and need time to release the trigger once halted , so make sure // the thread is dead before continuing to shutdown the job store . try { this . quartzSchedulerThread . join ( ) ; } catch ( InterruptedException ignore ) { } closed = true ; shutdownPlugins ( ) ; notifySchedulerListenersShutdown ( ) ; logger . info ( "Scheduler shutdown complete." ) ;
public class HandlerFactory { /** * get a numeric with separators key press handler . * @ return NumericWithSeparatorsKeyPressHandler */ public static final KeyPressHandler getNumericWithSeparatorsKeyPressHandler ( ) { } }
// NOPMD if ( HandlerFactory . numericWsKeyPressHandler == null ) { synchronized ( NumericWithSeparatorsKeyPressHandler . class ) { if ( HandlerFactory . numericWsKeyPressHandler == null ) { HandlerFactory . numericWsKeyPressHandler = new NumericWithSeparatorsKeyPressHandler ( ) ; } } } return HandlerFactory . numericWsKeyPressHandler ;
public class AmazonChimeClient { /** * Retrieves details for the specified Amazon Chime account , such as account type and supported licenses . * @ param getAccountRequest * @ return Result of the GetAccount operation returned by the service . * @ throws UnauthorizedClientException * The client is not currently authorized to make the request . * @ throws NotFoundException * One or more of the resources in the request does not exist in the system . * @ throws ForbiddenException * The client is permanently forbidden from making the request . For example , when a user tries to create an * account from an unsupported region . * @ throws BadRequestException * The input parameters don ' t match the service ' s restrictions . * @ throws ThrottledClientException * The client exceeded its request rate limit . * @ throws ServiceUnavailableException * The service is currently unavailable . * @ throws ServiceFailureException * The service encountered an unexpected error . * @ sample AmazonChime . GetAccount * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / chime - 2018-05-01 / GetAccount " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetAccountResult getAccount ( GetAccountRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetAccount ( request ) ;
public class ContractsApi { /** * Get public contract items ( asynchronously ) Lists items of a public * contract - - - This route is cached for up to 3600 seconds * @ param contractId * ID of a contract ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param page * Which page of results to return ( optional , default to 1) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getContractsPublicItemsContractIdAsync ( Integer contractId , String datasource , String ifNoneMatch , Integer page , final ApiCallback < List < PublicContractsItemsResponse > > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getContractsPublicItemsContractIdValidateBeforeCall ( contractId , datasource , ifNoneMatch , page , callback ) ; Type localVarReturnType = new TypeToken < List < PublicContractsItemsResponse > > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class SeaGlassTabbedPaneUI { /** * Make sure we have laid out the pane with the current layout . */ private void ensureCurrentLayout ( ) { } }
if ( ! tabPane . isValid ( ) ) { tabPane . validate ( ) ; } /* * If tabPane doesn ' t have a peer yet , the validate ( ) call will silently * fail . We handle that by forcing a layout if tabPane is still invalid . * See bug 4237677. */ if ( ! tabPane . isValid ( ) ) { TabbedPaneLayout layout = ( TabbedPaneLayout ) tabPane . getLayout ( ) ; layout . calculateLayoutInfo ( ) ; }
public class SerializedFormBuilder { /** * Build the field deprecation information . * @ param node the XML element that specifies which components to document * @ param fieldsContentTree content tree to which the documentation will be added */ public void buildFieldDeprecationInfo ( XMLNode node , Content fieldsContentTree ) { } }
if ( ! utils . definesSerializableFields ( currentTypeElement ) ) { fieldWriter . addMemberDeprecatedInfo ( ( VariableElement ) currentMember , fieldsContentTree ) ; }
public class ErrorCodeImpl { /** * ( non - Javadoc ) * @ see org . restcomm . protocols . ss7 . tcap . asn . Encodable # decode ( org . mobicents . protocols . asn . AsnInputStream ) */ public void decode ( AsnInputStream ais ) throws ParseException { } }
try { if ( this . type == ErrorCodeType . Global ) { this . globalErrorCode = ais . readObjectIdentifier ( ) ; } else if ( this . type == ErrorCodeType . Local ) { this . localErrorCode = ais . readInteger ( ) ; } else { throw new ParseException ( null , GeneralProblemType . MistypedComponent ) ; } } catch ( IOException e ) { e . printStackTrace ( ) ; throw new ParseException ( null , GeneralProblemType . BadlyStructuredComponent , "IOException while parsing ErrorCode: " + e . getMessage ( ) , e ) ; } catch ( AsnException e ) { e . printStackTrace ( ) ; throw new ParseException ( null , GeneralProblemType . BadlyStructuredComponent , "AsnException while parsing ErrorCode: " + e . getMessage ( ) , e ) ; }
public class HSQLDBHandler { /** * adds a table to the memory database * @ param conn * @ param pc * @ param name name of the new table * @ param query data source for table * @ throws SQLException * @ throws PageException */ private static void addTable ( Connection conn , PageContext pc , String name , Query query , boolean doSimpleTypes , ArrayList < String > usedTables ) throws SQLException , PageException { } }
Statement stat ; usedTables . add ( name ) ; stat = conn . createStatement ( ) ; Key [ ] keys = CollectionUtil . keys ( query ) ; int [ ] types = query . getTypes ( ) ; int [ ] innerTypes = toInnerTypes ( types ) ; // CREATE STATEMENT String comma = "" ; StringBuilder create = new StringBuilder ( "CREATE TABLE " + name + " (" ) ; StringBuilder insert = new StringBuilder ( "INSERT INTO " + name + " (" ) ; StringBuilder values = new StringBuilder ( "VALUES (" ) ; for ( int i = 0 ; i < keys . length ; i ++ ) { String key = keys [ i ] . getString ( ) ; String type = ( doSimpleTypes ) ? "VARCHAR_IGNORECASE" : toUsableType ( types [ i ] ) ; create . append ( comma + key ) ; create . append ( " " ) ; create . append ( type ) ; insert . append ( comma + key ) ; values . append ( comma + "?" ) ; comma = "," ; } create . append ( ")" ) ; insert . append ( ")" ) ; values . append ( ")" ) ; stat . execute ( create . toString ( ) ) ; PreparedStatement prepStat = conn . prepareStatement ( insert . toString ( ) + values . toString ( ) ) ; // INSERT STATEMENT // HashMap integerTypes = getIntegerTypes ( types ) ; int count = query . getRecordcount ( ) ; QueryColumn [ ] columns = new QueryColumn [ keys . length ] ; for ( int i = 0 ; i < keys . length ; i ++ ) { columns [ i ] = query . getColumn ( keys [ i ] ) ; } for ( int y = 0 ; y < count ; y ++ ) { for ( int i = 0 ; i < keys . length ; i ++ ) { int type = innerTypes [ i ] ; Object value = columns [ i ] . get ( y + 1 , null ) ; // print . out ( " * * * " + type + " : " + Caster . toString ( value ) ) ; if ( doSimpleTypes ) { prepStat . setObject ( i + 1 , Caster . toString ( value ) ) ; } else { if ( value == null ) prepStat . setNull ( i + 1 , types [ i ] ) ; else if ( type == BINARY ) prepStat . setBytes ( i + 1 , Caster . toBinary ( value ) ) ; else if ( type == DATE ) { // print . out ( new java . util . Date ( new // Date ( DateCaster . toDateAdvanced ( value , pc . getTimeZone ( ) ) . getTime ( ) ) . getTime ( ) ) ) ; prepStat . setTimestamp ( i + 1 , ( value . equals ( "" ) ) ? null : new Timestamp ( DateCaster . toDateAdvanced ( query . getAt ( keys [ i ] , y + 1 ) , pc . getTimeZone ( ) ) . getTime ( ) ) ) ; // prepStat . setObject ( i + 1 , Caster . toDate ( value , null ) ) ; // prepStat . setDate ( i + 1 , ( value = = null | | value . equals ( " " ) ) ? null : new // Date ( DateCaster . toDateAdvanced ( value , pc . getTimeZone ( ) ) . getTime ( ) ) ) ; } else if ( type == TIME ) prepStat . setTime ( i + 1 , ( value . equals ( "" ) ) ? null : new Time ( DateCaster . toDateAdvanced ( query . getAt ( keys [ i ] , y + 1 ) , pc . getTimeZone ( ) ) . getTime ( ) ) ) ; else if ( type == TIMESTAMP ) prepStat . setTimestamp ( i + 1 , ( value . equals ( "" ) ) ? null : new Timestamp ( DateCaster . toDateAdvanced ( query . getAt ( keys [ i ] , y + 1 ) , pc . getTimeZone ( ) ) . getTime ( ) ) ) ; else if ( type == DOUBLE ) prepStat . setDouble ( i + 1 , ( value . equals ( "" ) ) ? 0 : Caster . toDoubleValue ( query . getAt ( keys [ i ] , y + 1 ) ) ) ; else if ( type == INT ) prepStat . setLong ( i + 1 , ( value . equals ( "" ) ) ? 0 : Caster . toLongValue ( query . getAt ( keys [ i ] , y + 1 ) ) ) ; else if ( type == STRING ) prepStat . setObject ( i + 1 , Caster . toString ( value ) ) ; } } prepStat . execute ( ) ; }
public class hostcpu { /** * Use this API to fetch filtered set of hostcpu resources . * filter string should be in JSON format . eg : " vm _ state : DOWN , name : [ a - z ] + " */ public static hostcpu [ ] get_filtered ( nitro_service service , String filter ) throws Exception { } }
hostcpu obj = new hostcpu ( ) ; options option = new options ( ) ; option . set_filter ( filter ) ; hostcpu [ ] response = ( hostcpu [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class CacheHeader { /** * Set expires header to given date . * @ param response Response * @ param date Expires date */ public static void setExpires ( @ NotNull HttpServletResponse response , @ Nullable Date date ) { } }
if ( date == null ) { response . setHeader ( HEADER_EXPIRES , "-1" ) ; } else { response . setHeader ( HEADER_EXPIRES , formatDate ( date ) ) ; }
public class Parser { /** * Parse a ' var ' or ' const ' statement , or a ' var ' init list in a for * statement . * @ param declType A token value : either VAR , CONST , or LET depending on * context . * @ param pos the position where the node should start . It ' s sometimes * the var / const / let keyword , and other times the beginning of the first * token in the first variable declaration . * @ return the parsed variable list */ private VariableDeclaration variables ( int declType , int pos , boolean isStatement ) throws IOException { } }
int end ; VariableDeclaration pn = new VariableDeclaration ( pos ) ; pn . setType ( declType ) ; pn . setLineno ( ts . lineno ) ; Comment varjsdocNode = getAndResetJsDoc ( ) ; if ( varjsdocNode != null ) { pn . setJsDocNode ( varjsdocNode ) ; } // Example : // var foo = { a : 1 , b : 2 } , bar = [ 3 , 4 ] ; // var { b : s2 , a : s1 } = foo , x = 6 , y , [ s3 , s4 ] = bar ; for ( ; ; ) { AstNode destructuring = null ; Name name = null ; int tt = peekToken ( ) , kidPos = ts . tokenBeg ; end = ts . tokenEnd ; if ( tt == Token . LB || tt == Token . LC ) { // Destructuring assignment , e . g . , var [ a , b ] = . . . destructuring = destructuringPrimaryExpr ( ) ; end = getNodeEnd ( destructuring ) ; if ( ! ( destructuring instanceof DestructuringForm ) ) reportError ( "msg.bad.assign.left" , kidPos , end - kidPos ) ; markDestructuring ( destructuring ) ; } else { // Simple variable name mustMatchToken ( Token . NAME , "msg.bad.var" , true ) ; name = createNameNode ( ) ; name . setLineno ( ts . getLineno ( ) ) ; if ( inUseStrictDirective ) { String id = ts . getString ( ) ; if ( "eval" . equals ( id ) || "arguments" . equals ( ts . getString ( ) ) ) { reportError ( "msg.bad.id.strict" , id ) ; } } defineSymbol ( declType , ts . getString ( ) , inForInit ) ; } int lineno = ts . lineno ; Comment jsdocNode = getAndResetJsDoc ( ) ; AstNode init = null ; if ( matchToken ( Token . ASSIGN , true ) ) { init = assignExpr ( ) ; end = getNodeEnd ( init ) ; } VariableInitializer vi = new VariableInitializer ( kidPos , end - kidPos ) ; if ( destructuring != null ) { if ( init == null && ! inForInit ) { reportError ( "msg.destruct.assign.no.init" ) ; } vi . setTarget ( destructuring ) ; } else { vi . setTarget ( name ) ; } vi . setInitializer ( init ) ; vi . setType ( declType ) ; vi . setJsDocNode ( jsdocNode ) ; vi . setLineno ( lineno ) ; pn . addVariable ( vi ) ; if ( ! matchToken ( Token . COMMA , true ) ) break ; } pn . setLength ( end - pos ) ; pn . setIsStatement ( isStatement ) ; return pn ;
public class OkapiUI { /** * GEN - LAST : event _ dataMatrixSquareOnlyCheckActionPerformed */ private void gridmatrixAutoSizeActionPerformed ( java . awt . event . ActionEvent evt ) { } }
// GEN - FIRST : event _ gridmatrixAutoSizeActionPerformed // TODO add your handling code here : gridmatrixUserSizeCombo . setEnabled ( false ) ; gridmatrixUserEccCombo . setEnabled ( false ) ; encodeData ( ) ;