signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Transform2D { /** * Concatenates this transform with a scaling transformation . * < p > This function is equivalent to : * < pre > * this = this * [ sx 0 0 ] * [ 0 sy 0 ] * [ 0 0 1 ] * < / pre > * @ param scaleX scaling along x axis . * @ param scaleY scaling along y axis . */ public void scale ( double scaleX , double scaleY ) { } }
this . m00 *= scaleX ; this . m11 *= scaleY ; this . m01 *= scaleY ; this . m10 *= scaleX ;
public class AbstractIndexWriter { /** * Add description about the Static Variable / Method / Constructor for a * member . * @ param member MemberDoc for the member within the Class Kind * @ param contentTree the content tree to which the member description will be added */ protected void addMemberDesc ( Element member , Content contentTree ) { } }
TypeElement containing = utils . getEnclosingTypeElement ( member ) ; String classdesc = utils . getTypeElementName ( containing , true ) + " " ; if ( utils . isField ( member ) ) { Content resource = contents . getContent ( utils . isStatic ( member ) ? "doclet.Static_variable_in" : "doclet.Variable_in" , classdesc ) ; contentTree . addContent ( resource ) ; } else if ( utils . isConstructor ( member ) ) { contentTree . addContent ( contents . getContent ( "doclet.Constructor_for" , classdesc ) ) ; } else if ( utils . isMethod ( member ) ) { Content resource = contents . getContent ( utils . isStatic ( member ) ? "doclet.Static_method_in" : "doclet.Method_in" , classdesc ) ; contentTree . addContent ( resource ) ; } addPreQualifiedClassLink ( LinkInfoImpl . Kind . INDEX , containing , false , contentTree ) ;
public class BugLoader { /** * Does what it says it does , hit apple r ( control r on pc ) and the analysis * is redone using the current project * @ param p * @ return the bugs from the reanalysis , or null if cancelled */ public static @ CheckForNull BugCollection doAnalysis ( @ Nonnull Project p ) { } }
requireNonNull ( p , "null project" ) ; RedoAnalysisCallback ac = new RedoAnalysisCallback ( ) ; AnalyzingDialog . show ( p , ac , true ) ; if ( ac . finished ) { return ac . getBugCollection ( ) ; } else { return null ; }
public class GreenPepperXmlRpcServer { /** * { @ inheritDoc } */ public Vector < Object > getSpecification ( Vector < Object > specificationParams ) { } }
try { Specification specification = Specification . newInstance ( ( String ) specificationParams . get ( DOCUMENT_NAME_IDX ) ) ; Vector < ? > repositoryParams = ( Vector < ? > ) specificationParams . get ( DOCUMENT_REPOSITORY_IDX ) ; Repository repository = Repository . newInstance ( ( String ) repositoryParams . get ( REPOSITORY_UID_IDX ) ) ; repository . setName ( ( String ) repositoryParams . get ( REPOSITORY_NAME_IDX ) ) ; specification . setRepository ( repository ) ; specification = service . getSpecification ( specification ) ; if ( specification == null ) { return XmlRpcDataMarshaller . errorAsVector ( SPECIFICATION_NOT_FOUND ) ; } else { log . debug ( "Specification found: " + specification . getName ( ) ) ; return specification . marshallize ( ) ; } } catch ( Exception e ) { return errorAsVector ( e , SPECIFICATION_NOT_FOUND ) ; }
public class CPOptionPersistenceImpl { /** * Removes all the cp options where uuid = & # 63 ; and companyId = & # 63 ; from the database . * @ param uuid the uuid * @ param companyId the company ID */ @ Override public void removeByUuid_C ( String uuid , long companyId ) { } }
for ( CPOption cpOption : findByUuid_C ( uuid , companyId , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( cpOption ) ; }
public class ClassName { /** * Returns all enclosing classes in this , outermost first . */ private List < ClassName > enclosingClasses ( ) { } }
List < ClassName > result = new ArrayList < > ( ) ; for ( ClassName c = this ; c != null ; c = c . enclosingClassName ) { result . add ( c ) ; } Collections . reverse ( result ) ; return result ;
public class Country { /** * Gets the value of the requested property * @ param propName * allowed object is { @ link String } * @ return * returned object is { @ link Object } */ @ Override public Object get ( String propName ) { } }
if ( propName . equals ( PROP_C ) ) { return getC ( ) ; } if ( propName . equals ( PROP_COUNTRY_NAME ) ) { return getCountryName ( ) ; } if ( propName . equals ( PROP_DESCRIPTION ) ) { return getDescription ( ) ; } return super . get ( propName ) ;
public class JSATData { /** * Returns a DataWriter object which can be used to stream a set of arbitrary datapoints into the given output stream . This works in a thread safe manner . * @ param out the location to store all the data * @ param catInfo information about the categorical features to be written * @ param dim information on how many numeric features exist * @ param predicting information on the class label , may be { @ code null } if not a classification dataset * @ param fpStore the format floating point values should be stored as * @ param type what type of data set ( simple , classification , regression ) to be written * @ return the DataWriter that the actual points can be streamed through * @ throws IOException */ public static DataWriter getWriter ( OutputStream out , CategoricalData [ ] catInfo , int dim , final CategoricalData predicting , final FloatStorageMethod fpStore , DataWriter . DataSetType type ) throws IOException { } }
return new DataWriter ( out , catInfo , dim , type ) { @ Override protected void writeHeader ( CategoricalData [ ] catInfo , int dim , DataWriter . DataSetType type , OutputStream out ) throws IOException { DataOutputStream data_out = new DataOutputStream ( out ) ; data_out . write ( JSATData . MAGIC_NUMBER ) ; int numNumeric = dim ; int numCat = catInfo . length ; DatasetTypeMarker marker = DatasetTypeMarker . STANDARD ; if ( type == type . REGRESSION ) { numNumeric ++ ; marker = DatasetTypeMarker . REGRESSION ; } if ( type == type . CLASSIFICATION ) { numCat ++ ; marker = DatasetTypeMarker . CLASSIFICATION ; } data_out . writeByte ( marker . ordinal ( ) ) ; data_out . writeByte ( fpStore . ordinal ( ) ) ; data_out . writeInt ( numNumeric ) ; data_out . writeInt ( numCat ) ; data_out . writeInt ( - 1 ) ; // -1 used to indicate a potentially variable number of files for ( CategoricalData category : catInfo ) { // first , whats the name of the i ' th category writeString ( category . getCategoryName ( ) , data_out ) ; data_out . writeInt ( category . getNumOfCategories ( ) ) ; // output the number of categories for ( int i = 0 ; i < category . getNumOfCategories ( ) ; i ++ ) // the option names writeString ( category . getOptionName ( i ) , data_out ) ; } // extra for classification dataset if ( type == DataWriter . DataSetType . CLASSIFICATION ) { CategoricalData category = predicting ; // first , whats the name of the i ' th category writeString ( category . getCategoryName ( ) , data_out ) ; data_out . writeInt ( category . getNumOfCategories ( ) ) ; // output the number of categories for ( int i = 0 ; i < category . getNumOfCategories ( ) ; i ++ ) // the option names writeString ( category . getOptionName ( i ) , data_out ) ; } data_out . flush ( ) ; } @ Override protected void pointToBytes ( double weight , DataPoint dp , double label , ByteArrayOutputStream byteOut ) { try { DataOutputStream data_out = new DataOutputStream ( byteOut ) ; fpStore . writeFP ( weight , data_out ) ; for ( int val : dp . getCategoricalValues ( ) ) data_out . writeInt ( val ) ; if ( type == DataWriter . DataSetType . CLASSIFICATION ) data_out . writeInt ( ( int ) label ) ; Vec numericVals = dp . getNumericalValues ( ) ; data_out . writeBoolean ( numericVals . isSparse ( ) ) ; if ( numericVals . isSparse ( ) ) { if ( type == DataWriter . DataSetType . REGRESSION ) data_out . writeInt ( numericVals . nnz ( ) + 1 ) ; // + 1 for the target value , which may actually be zero . . . else data_out . writeInt ( numericVals . nnz ( ) ) ; for ( IndexValue iv : numericVals ) { data_out . writeInt ( iv . getIndex ( ) ) ; fpStore . writeFP ( iv . getValue ( ) , data_out ) ; } } else { for ( int j = 0 ; j < numericVals . length ( ) ; j ++ ) fpStore . writeFP ( numericVals . get ( j ) , data_out ) ; } // append the target value if ( type == DataWriter . DataSetType . REGRESSION ) { /* * if dense , we only need to just add the extra double . If * sparse , we do the index and then the double . */ if ( numericVals . isSparse ( ) ) data_out . writeInt ( numericVals . length ( ) ) ; fpStore . writeFP ( label , data_out ) ; } data_out . flush ( ) ; } catch ( IOException ex ) { Logger . getLogger ( JSATData . class . getName ( ) ) . log ( Level . SEVERE , null , ex ) ; } } } ;
import java . util . * ; class TopFrequentChars { /** * Function to identify the top n most frequent characters in a given string with their counts . * > > > top _ frequent _ chars ( ' lkseropewdssafsdfafkpwe ' , 3) * [ ( ' s ' , 4 ) , ( ' e ' , 3 ) , ( ' f ' , 3 ) ] * > > > top _ frequent _ chars ( ' lkseropewdssafsdfafkpwe ' , 2) * [ ( ' s ' , 4 ) , ( ' e ' , 3 ) ] * > > > top _ frequent _ chars ( ' lkseropewdssafsdfafkpwe ' , 7) * [ ( ' s ' , 4 ) , ( ' e ' , 3 ) , ( ' f ' , 3 ) , ( ' k ' , 2 ) , ( ' p ' , 2 ) , ( ' w ' , 2 ) , ( ' d ' , 2 ) ] */ public static List < Map . Entry < Character , Integer > > topFrequentChars ( String text , int n ) { } }
Map < Character , Integer > freqMap = new HashMap < > ( ) ; for ( char c : text . toCharArray ( ) ) { freqMap . put ( c , freqMap . getOrDefault ( c , 0 ) + 1 ) ; } List < Map . Entry < Character , Integer > > list = new ArrayList < > ( freqMap . entrySet ( ) ) ; list . sort ( ( Map . Entry < Character , Integer > a , Map . Entry < Character , Integer > b ) -> b . getValue ( ) - a . getValue ( ) ) ; return list . subList ( 0 , n ) ;
public class CPAttachmentFileEntryPersistenceImpl { /** * Removes all the cp attachment file entries where classNameId = & # 63 ; and classPK = & # 63 ; from the database . * @ param classNameId the class name ID * @ param classPK the class pk */ @ Override public void removeByC_C ( long classNameId , long classPK ) { } }
for ( CPAttachmentFileEntry cpAttachmentFileEntry : findByC_C ( classNameId , classPK , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ) { remove ( cpAttachmentFileEntry ) ; }
public class ActivityLifecycleCallback { /** * Enables lifecycle callbacks for Android devices * @ param application App ' s Application object */ @ TargetApi ( Build . VERSION_CODES . ICE_CREAM_SANDWICH ) public static synchronized void register ( android . app . Application application ) { } }
if ( application == null ) { Logger . i ( "Application instance is null/system API is too old" ) ; return ; } if ( registered ) { Logger . v ( "Lifecycle callbacks have already been registered" ) ; return ; } registered = true ; application . registerActivityLifecycleCallbacks ( new android . app . Application . ActivityLifecycleCallbacks ( ) { @ Override public void onActivityCreated ( Activity activity , Bundle bundle ) { CleverTapAPI . onActivityCreated ( activity ) ; } @ Override public void onActivityStarted ( Activity activity ) { } @ Override public void onActivityResumed ( Activity activity ) { CleverTapAPI . onActivityResumed ( activity ) ; } @ Override public void onActivityPaused ( Activity activity ) { CleverTapAPI . onActivityPaused ( ) ; } @ Override public void onActivityStopped ( Activity activity ) { } @ Override public void onActivitySaveInstanceState ( Activity activity , Bundle bundle ) { } @ Override public void onActivityDestroyed ( Activity activity ) { } } ) ; Logger . i ( "Activity Lifecycle Callback successfully registered" ) ;
public class JsType { /** * Returns a { @ link JsType } corresponding to the given { @ link SoyType } * < p > TODO ( lukes ) : consider adding a cache for all the computed types . The same type is probably * accessed many many times . * @ param soyType the soy type * @ param isIncrementalDom whether or not this is for incremental dom . * @ param isStrict If true , generates stricter types than default ( e . g . boolean values cannot be 0 * or 1 ) . */ private static JsType forSoyType ( SoyType soyType , boolean isIncrementalDom , boolean isStrict ) { } }
switch ( soyType . getKind ( ) ) { case NULL : return NULL_OR_UNDEFINED_TYPE ; case ANY : return ANY_TYPE ; case UNKNOWN : return UNKNOWN_TYPE ; case BOOL : return isStrict ? BOOLEAN_TYPE_STRICT : BOOLEAN_TYPE ; case PROTO_ENUM : SoyProtoEnumType enumType = ( SoyProtoEnumType ) soyType ; String enumTypeName = enumType . getNameForBackend ( SoyBackendKind . JS_SRC ) ; JsType . Builder enumBuilder = builder ( ) . addType ( enumTypeName ) . addRequire ( GoogRequire . create ( enumTypeName ) ) . setPredicate ( GOOG_IS_NUMBER ) ; if ( ! isStrict ) { // TODO ( lukes ) : stop allowing number ? , just allow the enum enumBuilder . addType ( "number" ) ; } return enumBuilder . build ( ) ; case FLOAT : case INT : return NUMBER_TYPE ; case STRING : return STRING_OR_UNSANITIZED_TEXT ; case ATTRIBUTES : if ( isIncrementalDom ) { // idom has a different strategy for handling these return IDOM_ATTRIBUTES ; } // fall through case HTML : if ( isIncrementalDom ) { // idom has a different strategy for handling these return IDOM_HTML ; } // fall - through case CSS : case JS : case URI : case TRUSTED_RESOURCE_URI : return isStrict ? SANITIZED_TYPES_STRICT . get ( ( ( SanitizedType ) soyType ) . getContentKind ( ) ) : SANITIZED_TYPES . get ( ( ( SanitizedType ) soyType ) . getContentKind ( ) ) ; case LIST : ListType listType = ( ListType ) soyType ; if ( listType . getElementType ( ) . getKind ( ) == SoyType . Kind . ANY ) { return RAW_ARRAY_TYPE ; } JsType element = forSoyType ( listType . getElementType ( ) , isIncrementalDom , isStrict ) ; return builder ( ) . addType ( "!Array<" + element . typeExpr ( ) + ">" ) . addRequires ( element . getGoogRequires ( ) ) . setPredicate ( GOOG_IS_ARRAY ) . build ( ) ; case LEGACY_OBJECT_MAP : { LegacyObjectMapType mapType = ( LegacyObjectMapType ) soyType ; if ( mapType . getKeyType ( ) . getKind ( ) == SoyType . Kind . ANY && mapType . getValueType ( ) . getKind ( ) == SoyType . Kind . ANY ) { return RAW_OBJECT_TYPE ; } JsType keyTypeName = forSoyType ( mapType . getKeyType ( ) , isIncrementalDom , isStrict ) ; JsType valueTypeName = forSoyType ( mapType . getValueType ( ) , isIncrementalDom , isStrict ) ; return builder ( ) . addType ( String . format ( "!Object<%s,%s>" , keyTypeName . typeExpr ( ) , valueTypeName . typeExpr ( ) ) ) . addRequires ( keyTypeName . getGoogRequires ( ) ) . addRequires ( valueTypeName . getGoogRequires ( ) ) . setPredicate ( GOOG_IS_OBJECT ) . build ( ) ; } case MAP : { MapType mapType = ( MapType ) soyType ; SoyType keyType = mapType . getKeyType ( ) ; SoyType . Kind keyKind = keyType . getKind ( ) ; Preconditions . checkState ( MapType . isAllowedKeyType ( keyType ) ) ; // Soy key type of string should translate to a JS key type of string . // forSoyType ( StringType . getInstance ( ) ) normally translates to // string | ! goog . soy . data . UnsanitizedText , but ES6 Maps always use instance equality for // lookups . Using UnsanitizedText instances as keys in Soy maps would cause unexpected // behavior ( usually a failed map lookup ) , so don ' t generate signatures that allow it . JsType keyTypeName = keyKind == SoyType . Kind . STRING ? STRING_TYPE : forSoyType ( keyType , isIncrementalDom , isStrict ) ; JsType valueTypeName = forSoyType ( mapType . getValueType ( ) , isIncrementalDom , isStrict ) ; return builder ( ) . addType ( String . format ( "!soy.map.Map<%s,%s>" , keyTypeName . typeExpr ( ) , valueTypeName . typeExpr ( ) ) ) . addRequires ( keyTypeName . getGoogRequires ( ) ) . addRequires ( valueTypeName . getGoogRequires ( ) ) . addRequire ( GoogRequire . create ( "soy.map" ) ) . setPredicate ( SOY_MAP_IS_SOY_MAP ) . build ( ) ; } case PROTO : final SoyProtoType protoType = ( SoyProtoType ) soyType ; final String protoTypeName = protoType . getNameForBackend ( SoyBackendKind . JS_SRC ) ; return builder ( ) . addType ( isStrict ? "!" + protoTypeName // In theory this should be " ! " + protoTypeName since we don ' t actually // allow null , but it isn ' t clear that this is very useful for users . : protoTypeName ) . addRequire ( GoogRequire . create ( protoTypeName ) ) . addCoercionStrategy ( ValueCoercionStrategy . PROTO ) . setPredicate ( ( value , codeGenerator ) -> Optional . of ( value . instanceOf ( JsRuntime . protoConstructor ( protoType ) ) ) ) . build ( ) ; case RECORD : { RecordType recordType = ( RecordType ) soyType ; if ( recordType . getMembers ( ) . isEmpty ( ) ) { return RAW_OBJECT_TYPE ; } Builder builder = builder ( ) ; Map < String , String > members = new LinkedHashMap < > ( ) ; for ( Map . Entry < String , SoyType > member : recordType . getMembers ( ) . entrySet ( ) ) { JsType forSoyType = forSoyType ( member . getValue ( ) , isIncrementalDom , isStrict ) ; builder . addRequires ( forSoyType . getGoogRequires ( ) ) ; members . put ( member . getKey ( ) , forSoyType . typeExprForRecordMember ( /* isOptional = */ false ) ) ; } return builder // trailing comma is important to prevent parsing ambiguity for the unknown type . addType ( "{" + Joiner . on ( ", " ) . withKeyValueSeparator ( ": " ) . join ( members ) + ",}" ) . setPredicate ( GOOG_IS_OBJECT ) . build ( ) ; } case UNION : { UnionType unionType = ( UnionType ) soyType ; Builder builder = builder ( ) ; final Set < JsType > types = new LinkedHashSet < > ( ) ; final boolean isNullable = unionType . isNullable ( ) ; // handle null first so that if other type tests dereference the param they won ' t fail if ( isNullable ) { builder . addTypes ( NULL_OR_UNDEFINED_TYPE . typeExpressions ) ; builder . addCoercionStrategy ( ValueCoercionStrategy . NULL ) ; types . add ( NULL_OR_UNDEFINED_TYPE ) ; } for ( SoyType member : unionType . getMembers ( ) ) { if ( member . getKind ( ) == Kind . NULL ) { continue ; // handled above } JsType memberType = forSoyType ( member , isIncrementalDom , isStrict ) ; builder . addRequires ( memberType . extraRequires ) ; builder . addTypes ( memberType . typeExpressions ) ; builder . addCoercionStrategies ( memberType . coercionStrategies ) ; types . add ( memberType ) ; } return builder . setPredicate ( ( value , codeGenerator ) -> { Expression result = null ; // TODO ( lukes ) : this will cause reevaluations , resolve by conditionally // bouncing into a a temporary variable or augmenting the codechunk api to do // this automatically . for ( JsType memberType : types ) { Optional < Expression > typeAssertion = memberType . getTypeAssertion ( value , codeGenerator ) ; if ( ! typeAssertion . isPresent ( ) ) { return Optional . absent ( ) ; } if ( result == null ) { result = typeAssertion . get ( ) ; } else { result = result . or ( typeAssertion . get ( ) , codeGenerator ) ; } } return Optional . of ( result ) ; } ) . build ( ) ; } case VE : return VE_TYPE ; case VE_DATA : return VE_DATA_TYPE ; case ERROR : // continue } throw new AssertionError ( "unhandled soytype: " + soyType ) ;
public class Reference { /** * Matches locator to this reference locator . * Descriptors are matched using equal method . All other locator types are * matched using direct comparison . * @ param locator the locator to match . * @ return true if locators are matching and false it they don ' t . * @ see Descriptor */ public boolean match ( Object locator ) { } }
// Locate by direct reference matching if ( _reference . equals ( locator ) ) return true ; // Locate by type else if ( locator instanceof Class < ? > ) return ( ( Class < ? > ) locator ) . isInstance ( _reference ) ; // Locate by direct locator matching else if ( _locator != null ) return _locator . equals ( locator ) ; else return false ;
public class DdlParsers { /** * Parse the supplied DDL content and return the { @ link AstNode root node } of the AST representation . * @ param ddl content string ; may not be null * @ param fileName the approximate name of the file containing the DDL content ; may be null if this is not known * @ return the root tree { @ link AstNode } * @ throws ParsingException if there is an error parsing the supplied DDL content */ public AstNode parse ( final String ddl , final String fileName ) throws ParsingException { } }
CheckArg . isNotEmpty ( ddl , "ddl" ) ; RuntimeException firstException = null ; // Go through each parser and score the DDL content final Map < DdlParser , Integer > scoreMap = new HashMap < DdlParser , Integer > ( this . parsers . size ( ) ) ; final DdlParserScorer scorer = new DdlParserScorer ( ) ; for ( final DdlParser parser : this . parsers ) { try { parser . score ( ddl , fileName , scorer ) ; scoreMap . put ( parser , scorer . getScore ( ) ) ; } catch ( RuntimeException e ) { if ( firstException == null ) { firstException = e ; } } finally { scorer . reset ( ) ; } } if ( scoreMap . isEmpty ( ) ) { if ( firstException == null ) { throw new ParsingException ( Position . EMPTY_CONTENT_POSITION , DdlSequencerI18n . errorParsingDdlContent . text ( this . parsers . size ( ) ) ) ; } throw firstException ; } // sort the scores final List < Entry < DdlParser , Integer > > scoredParsers = new ArrayList < Entry < DdlParser , Integer > > ( scoreMap . entrySet ( ) ) ; Collections . sort ( scoredParsers , SORTER ) ; firstException = null ; AstNode astRoot = null ; for ( final Entry < DdlParser , Integer > scoredParser : scoredParsers ) { try { final DdlParser parser = scoredParser . getKey ( ) ; // create DDL root node astRoot = createDdlStatementsContainer ( parser . getId ( ) ) ; // parse parser . parse ( ddl , astRoot , null ) ; return astRoot ; // successfully parsed } catch ( final RuntimeException e ) { if ( astRoot != null ) { astRoot . removeFromParent ( ) ; } if ( firstException == null ) { firstException = e ; } } } if ( firstException == null ) { throw new ParsingException ( Position . EMPTY_CONTENT_POSITION , DdlSequencerI18n . errorParsingDdlContent . text ( ) ) ; } throw firstException ;
public class CPDefinitionVirtualSettingLocalServiceUtil { /** * Adds the cp definition virtual setting to the database . Also notifies the appropriate model listeners . * @ param cpDefinitionVirtualSetting the cp definition virtual setting * @ return the cp definition virtual setting that was added */ public static com . liferay . commerce . product . type . virtual . model . CPDefinitionVirtualSetting addCPDefinitionVirtualSetting ( com . liferay . commerce . product . type . virtual . model . CPDefinitionVirtualSetting cpDefinitionVirtualSetting ) { } }
return getService ( ) . addCPDefinitionVirtualSetting ( cpDefinitionVirtualSetting ) ;
public class PublishLayerVersionRequest { /** * A list of compatible < a href = " https : / / docs . aws . amazon . com / lambda / latest / dg / lambda - runtimes . html " > function * runtimes < / a > . Used for filtering with < a > ListLayers < / a > and < a > ListLayerVersions < / a > . * @ param compatibleRuntimes * A list of compatible < a href = " https : / / docs . aws . amazon . com / lambda / latest / dg / lambda - runtimes . html " > function * runtimes < / a > . Used for filtering with < a > ListLayers < / a > and < a > ListLayerVersions < / a > . * @ return Returns a reference to this object so that method calls can be chained together . * @ see Runtime */ public PublishLayerVersionRequest withCompatibleRuntimes ( Runtime ... compatibleRuntimes ) { } }
com . amazonaws . internal . SdkInternalList < String > compatibleRuntimesCopy = new com . amazonaws . internal . SdkInternalList < String > ( compatibleRuntimes . length ) ; for ( Runtime value : compatibleRuntimes ) { compatibleRuntimesCopy . add ( value . toString ( ) ) ; } if ( getCompatibleRuntimes ( ) == null ) { setCompatibleRuntimes ( compatibleRuntimesCopy ) ; } else { getCompatibleRuntimes ( ) . addAll ( compatibleRuntimesCopy ) ; } return this ;
public class CmsPropertyAdvanced { /** * Deletes the current resource if the dialog is in wizard mode . < p > * If the dialog is not in wizard mode , the resource is not deleted . < p > * @ throws JspException if including the error page fails */ public void actionDeleteResource ( ) throws JspException { } }
if ( ( getParamDialogmode ( ) != null ) && getParamDialogmode ( ) . startsWith ( MODE_WIZARD ) ) { // only delete resource if dialog mode is a wizard mode try { getCms ( ) . deleteResource ( getParamResource ( ) , CmsResource . DELETE_PRESERVE_SIBLINGS ) ; } catch ( Throwable e ) { // error deleting the resource , show error dialog includeErrorpage ( this , e ) ; } }
public class PropertyFileSnitch { /** * Return the rack for which an endpoint resides in * @ param endpoint the endpoint to process * @ return string of rack */ public String getRack ( InetAddress endpoint ) { } }
String [ ] info = getEndpointInfo ( endpoint ) ; assert info != null : "No location defined for endpoint " + endpoint ; return info [ 1 ] ;
public class StreamingConnectionImpl { /** * Process an ack from the STAN cluster */ void processAck ( Message msg ) { } }
PubAck pa ; Exception ex = null ; try { pa = PubAck . parseFrom ( msg . getData ( ) ) ; } catch ( InvalidProtocolBufferException e ) { // If we are speaking to a server we don ' t understand , let the // user know . System . err . println ( "Protocol error: " + e . getStackTrace ( ) ) ; return ; } // Remove AckClosure ackClosure = removeAck ( pa . getGuid ( ) ) ; if ( ackClosure != null ) { // Capture error if it exists . String ackError = pa . getError ( ) ; if ( ackClosure . ah != null ) { if ( ! ackError . isEmpty ( ) ) { ex = new IOException ( ackError ) ; } // Perform the ackHandler callback ackClosure . ah . onAck ( pa . getGuid ( ) , ex ) ; } else if ( ackClosure . ch != null ) { try { ackClosure . ch . put ( ackError ) ; } catch ( InterruptedException e ) { // ignore } } }
public class TextUnit { /** * Compress spaces around a list of instructions , following these rules : * - The first instruction that is on the left usually make contact with a component . * @ param instructionBuffer * @ param size * @ return */ final static int compressSpaces ( List < Instruction > instructionBuffer , int size ) { } }
boolean addleftspace = true ; boolean addrightspace = false ; boolean skipnext = false ; for ( int i = 0 ; i < size ; i ++ ) { String text = null ; String newText = null ; int instructionType = 0 ; if ( skipnext ) { skipnext = false ; continue ; } Instruction ins = instructionBuffer . get ( i ) ; if ( i + 1 == size ) { addrightspace = true ; } if ( ins instanceof LiteralTextInstruction ) { text = ( ( LiteralTextInstruction ) ins ) . getText ( ) ; instructionType = 1 ; } else if ( ins instanceof LiteralNonExcapedTextInstruction ) { text = ( ( LiteralTextInstruction ) ins ) . getText ( ) ; instructionType = 2 ; } else if ( ins instanceof LiteralXMLInstruction ) { skipnext = true ; continue ; } if ( text != null && text . length ( ) > 0 ) { int firstCharLocation = - 1 ; int leftChar = 0 ; // 0 = first char on left 1 = \ n 2 = \ r 3 = \ r \ n int lenght = text . length ( ) ; String leftText = null ; for ( int j = 0 ; j < lenght ; j ++ ) { char c = text . charAt ( j ) ; if ( leftChar == 0 ) { if ( c == '\r' ) { leftChar = 2 ; if ( j + 1 < lenght ) { if ( text . charAt ( j + 1 ) == '\n' ) { leftChar = 3 ; } } } if ( c == '\n' ) { leftChar = 1 ; } } if ( Character . isWhitespace ( c ) ) { continue ; } else { firstCharLocation = j ; break ; } } if ( firstCharLocation == - 1 ) { firstCharLocation = lenght ; } // Define the character on the left if ( firstCharLocation > 0 ) { switch ( leftChar ) { case 1 : leftText = "\n" ; break ; case 2 : leftText = "\r" ; break ; case 3 : leftText = "\r\n" ; break ; default : leftText = ( lenght > 1 ) ? text . substring ( 0 , 1 ) : text ; break ; } } else { leftText = "" ; } if ( firstCharLocation == lenght && lenght > 1 ) { // All the instruction is space , replace with an instruction // with only one space if ( addleftspace || addrightspace ) { newText = leftText ; } else { instructionBuffer . remove ( i ) ; i -- ; size -- ; } } else { int lastCharLocation = getLastTextCharLocationIgnoringSpacesTabsAndCarriageReturn ( text ) ; // If right space , increment in 1 if ( lastCharLocation + 1 < text . length ( ) ) { lastCharLocation = lastCharLocation + 1 ; } if ( firstCharLocation > 0 ) { newText = leftText + text . substring ( firstCharLocation , lastCharLocation + 1 ) ; } else { newText = text . substring ( firstCharLocation , lastCharLocation + 1 ) ; } } if ( newText != null ) { if ( instructionType == 1 ) { instructionBuffer . set ( i , new LiteralTextInstruction ( newText ) ) ; } else if ( instructionType == 2 ) { instructionBuffer . set ( i , new LiteralNonExcapedTextInstruction ( newText ) ) ; } } } addleftspace = false ; } return size ;
public class X509CRLImpl { /** * Encodes the " to - be - signed " CRL to the OutputStream . * @ param out the OutputStream to write to . * @ exception CRLException on encoding errors . */ public void encodeInfo ( OutputStream out ) throws CRLException { } }
try { DerOutputStream tmp = new DerOutputStream ( ) ; DerOutputStream rCerts = new DerOutputStream ( ) ; DerOutputStream seq = new DerOutputStream ( ) ; if ( version != 0 ) // v2 crl encode version tmp . putInteger ( version ) ; infoSigAlgId . encode ( tmp ) ; if ( ( version == 0 ) && ( issuer . toString ( ) == null ) ) throw new CRLException ( "Null Issuer DN not allowed in v1 CRL" ) ; issuer . encode ( tmp ) ; if ( thisUpdate . getTime ( ) < YR_2050 ) tmp . putUTCTime ( thisUpdate ) ; else tmp . putGeneralizedTime ( thisUpdate ) ; if ( nextUpdate != null ) { if ( nextUpdate . getTime ( ) < YR_2050 ) tmp . putUTCTime ( nextUpdate ) ; else tmp . putGeneralizedTime ( nextUpdate ) ; } if ( ! revokedList . isEmpty ( ) ) { for ( X509CRLEntry entry : revokedList ) { ( ( X509CRLEntryImpl ) entry ) . encode ( rCerts ) ; } tmp . write ( DerValue . tag_Sequence , rCerts ) ; } if ( extensions != null ) extensions . encode ( tmp , isExplicit ) ; seq . write ( DerValue . tag_Sequence , tmp ) ; tbsCertList = seq . toByteArray ( ) ; out . write ( tbsCertList ) ; } catch ( IOException e ) { throw new CRLException ( "Encoding error: " + e . getMessage ( ) ) ; }
public class Variables { /** * Remove the top { @ link Variables } layer from the the stack . */ public Map < String , Iterable < ? extends WindupVertexFrame > > pop ( ) { } }
Map < String , Iterable < ? extends WindupVertexFrame > > frame = deque . pop ( ) ; return frame ;
public class When { /** * Sets up automatic binding and unbinding of { @ code target } ' s items to / from * { @ code source } ' s items , based on the changing value of the encapsulated * condition . In other words , whenever the encapsulated condition is * { @ code true } , { @ code target } ' s content is synced with { @ code source } . * Whenever the encapsulated condition is { @ code false } , the sync is * interrupted . This keeps happening until { @ code unsubscribe ( ) } is called * on the returned subscription . Unsubscribing the returned subscription may * be skipped safely only when the lifetimes of all the encapsulated * condition , { @ code source } and { @ code target } are the same . * @ param target target of the conditional binding * @ param source source of the conditional binding * @ return a subscription that can be used to dispose the conditional * binding set up by this method , i . e . to stop observing the encapsulated * condition and , if the last observed value of the encapsulated condition * was { @ code true } , stop the synchronization { @ code target } ' s content with * { @ code source } ' s content . */ public < T > Subscription listBind ( List < ? super T > target , ObservableList < ? extends T > source ) { } }
return bind ( ( ) -> EasyBind . listBind ( target , source ) ) ;
public class ClassPathResource { /** * Remove any leading explicit classpath resource prefixes . * @ param sPath * The source path to strip the class path prefixes from . May be * < code > null < / code > . * @ return < code > null < / code > if the parameter was < code > null < / code > . * @ see # CLASSPATH _ PREFIX _ LONG * @ see # CLASSPATH _ PREFIX _ SHORT */ @ Nullable public static String getWithoutClassPathPrefix ( @ Nullable final String sPath ) { } }
if ( StringHelper . startsWith ( sPath , CLASSPATH_PREFIX_LONG ) ) return sPath . substring ( CLASSPATH_PREFIX_LONG . length ( ) ) ; if ( StringHelper . startsWith ( sPath , CLASSPATH_PREFIX_SHORT ) ) return sPath . substring ( CLASSPATH_PREFIX_SHORT . length ( ) ) ; return sPath ;
public class FastJsonProvider { /** * Check whether a class can be serialized or deserialized . It can check * based on packages , annotations on entities or explicit classes . * @ param type class need to check * @ return true if valid */ protected boolean isValidType ( Class < ? > type , Annotation [ ] classAnnotations ) { } }
if ( type == null ) return false ; if ( annotated ) { return checkAnnotation ( type ) ; } else if ( scanpackages != null ) { String classPackage = type . getPackage ( ) . getName ( ) ; for ( String pkg : scanpackages ) { if ( classPackage . startsWith ( pkg ) ) { if ( annotated ) { return checkAnnotation ( type ) ; } else return true ; } } return false ; } else if ( clazzes != null ) { for ( Class < ? > cls : clazzes ) { // must strictly equal . Don ' t check // inheritance if ( cls == type ) return true ; } return false ; } return true ;
public class Logger { /** * Issue a log message and throwable at the given log level and a specific logger class name . * @ param level the level * @ param loggerFqcn the logger class name * @ param message the message * @ param t the throwable */ public void log ( Level level , String loggerFqcn , Object message , Throwable t ) { } }
doLog ( level , loggerFqcn , message , null , t ) ;
public class ProcessDefinitionManager { /** * Cascades the deletion of the process definition to the process instances . * Skips the custom listeners if the flag was set to true . * @ param processDefinitionId the process definition id * @ param skipCustomListeners true if the custom listeners should be skipped at process instance deletion * @ param skipIoMappings specifies whether input / output mappings for tasks should be invoked */ protected void cascadeDeleteProcessInstancesForProcessDefinition ( String processDefinitionId , boolean skipCustomListeners , boolean skipIoMappings ) { } }
getProcessInstanceManager ( ) . deleteProcessInstancesByProcessDefinition ( processDefinitionId , "deleted process definition" , true , skipCustomListeners , skipIoMappings ) ;
public class KeyValuePairs { /** * Batched */ public static BatchedKeyStringValueString string ( KeyValuePair < byte [ ] , ? > raw , byte [ ] value , int batch , boolean last ) { } }
BatchedKeyStringValueString kv = new BatchedKeyStringValueString ( ) ; copy ( raw , kv , batch , last ) ; kv . setValue ( value ) ; return kv ;
public class ApplicationExceptionFactory { /** * Recreates ApplicationException object from serialized ErrorDescription . * It tries to restore original exception type using type or error category fields . * @ param descriptiona serialized error description received as a result of remote call * @ return new ApplicationException object from serialized ErrorDescription . */ public ApplicationException create ( ErrorDescription description ) { } }
if ( description == null ) throw new NullPointerException ( "Description cannot be null" ) ; ApplicationException error = null ; String category = description . getCategory ( ) ; String code = description . getCode ( ) ; String message = description . getMessage ( ) ; String correlationId = description . getCorrelationId ( ) ; // Create well - known exception type based on error category if ( ErrorCategory . Unknown . equals ( category ) ) error = new UnknownException ( correlationId , code , message ) ; else if ( ErrorCategory . Internal . equals ( category ) ) error = new InternalException ( correlationId , code , message ) ; else if ( ErrorCategory . Misconfiguration . equals ( category ) ) error = new ConfigException ( correlationId , code , message ) ; else if ( ErrorCategory . NoResponse . equals ( category ) ) error = new ConnectionException ( correlationId , code , message ) ; else if ( ErrorCategory . FailedInvocation . equals ( category ) ) error = new InvocationException ( correlationId , code , message ) ; else if ( ErrorCategory . FileError . equals ( category ) ) error = new FileException ( correlationId , code , message ) ; else if ( ErrorCategory . BadRequest . equals ( category ) ) error = new BadRequestException ( correlationId , code , message ) ; else if ( ErrorCategory . Unauthorized . equals ( category ) ) error = new UnauthorizedException ( correlationId , code , message ) ; else if ( ErrorCategory . Conflict . equals ( category ) ) error = new ConflictException ( correlationId , code , message ) ; else if ( ErrorCategory . NotFound . equals ( category ) ) error = new NotFoundException ( correlationId , code , message ) ; else if ( ErrorCategory . InvalidState . equals ( category ) ) error = new InvalidStateException ( correlationId , code , message ) ; else if ( ErrorCategory . Unsupported . equals ( category ) ) error = new UnsupportedException ( correlationId , code , message ) ; else { error = new UnknownException ( ) ; error . setCategory ( category ) ; error . setStatus ( description . getStatus ( ) ) ; } // Fill error with details error . setDetails ( description . getDetails ( ) ) ; error . setCauseString ( description . getCause ( ) ) ; error . setStackTraceString ( description . getStackTrace ( ) ) ; return error ;
public class GetTagsResult { /** * The requested tags . * @ param tags * The requested tags . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetTagsResult withTags ( java . util . Map < String , String > tags ) { } }
setTags ( tags ) ; return this ;
public class ConstructorBasedConverter { /** * Converts the given input to an object by using the constructor approach . Notice that the constructor must * expect receiving a { @ literal null } value . * @ param input the input , can be { @ literal null } * @ return the instance of T * @ throws IllegalArgumentException if the instance of T cannot be created from the input . */ @ Override public T fromString ( String input ) throws IllegalArgumentException { } }
try { return constructor . newInstance ( input ) ; } catch ( InstantiationException | IllegalAccessException | InvocationTargetException e ) { LoggerFactory . getLogger ( this . getClass ( ) ) . error ( "Cannot create an instance of {} from \"{}\"" , constructor . getDeclaringClass ( ) . getName ( ) , input , e ) ; if ( e . getCause ( ) != null ) { throw new IllegalArgumentException ( e . getCause ( ) ) ; } else { throw new IllegalArgumentException ( e ) ; } }
public class AstBuilder { /** * } statement - - - - - */ @ Override public ClassNode visitTypeDeclaration ( TypeDeclarationContext ctx ) { } }
if ( asBoolean ( ctx . classDeclaration ( ) ) ) { // e . g . class A { } ctx . classDeclaration ( ) . putNodeMetaData ( TYPE_DECLARATION_MODIFIERS , this . visitClassOrInterfaceModifiersOpt ( ctx . classOrInterfaceModifiersOpt ( ) ) ) ; return configureAST ( this . visitClassDeclaration ( ctx . classDeclaration ( ) ) , ctx ) ; } throw createParsingFailedException ( "Unsupported type declaration: " + ctx . getText ( ) , ctx ) ;
public class SnapshotsInner { /** * Deletes a snapshot . * @ param resourceGroupName The name of the resource group . * @ param snapshotName The name of the snapshot that is being created . The name can ' t be changed after the snapshot is created . Supported characters for the name are a - z , A - Z , 0-9 and _ . The max name length is 80 characters . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void delete ( String resourceGroupName , String snapshotName ) { } }
deleteWithServiceResponseAsync ( resourceGroupName , snapshotName ) . toBlocking ( ) . last ( ) . body ( ) ;
public class Converters { /** * Registers the { @ link Duration } converter . * @ param builder The GSON builder to register the converter with . * @ return A reference to { @ code builder } . */ public static GsonBuilder registerDuration ( GsonBuilder builder ) { } }
if ( builder == null ) { throw new NullPointerException ( "builder cannot be null" ) ; } builder . registerTypeAdapter ( DURATION_TYPE , new DurationConverter ( ) ) ; return builder ;
public class InfinispanDialect { /** * Get a strategy instance which knows how to acquire a database - level lock * of the specified / mode for this dialect . * @ param lockable The persister for the entity to be locked . * @ param lockMode The type of lock to be acquired . * @ return The appropriate locking strategy . * @ since 3.2 */ @ Override public LockingStrategy getLockingStrategy ( Lockable lockable , LockMode lockMode ) { } }
if ( lockMode == LockMode . PESSIMISTIC_FORCE_INCREMENT ) { return new PessimisticForceIncrementLockingStrategy ( lockable , lockMode ) ; } else if ( lockMode == LockMode . PESSIMISTIC_WRITE ) { return new InfinispanPessimisticWriteLockingStrategy < EK > ( lockable , lockMode ) ; } else if ( lockMode == LockMode . PESSIMISTIC_READ ) { // TODO find a more efficient pessimistic read return new InfinispanPessimisticWriteLockingStrategy < EK > ( lockable , lockMode ) ; } else if ( lockMode == LockMode . OPTIMISTIC ) { return new OptimisticLockingStrategy ( lockable , lockMode ) ; } else if ( lockMode == LockMode . OPTIMISTIC_FORCE_INCREMENT ) { return new OptimisticForceIncrementLockingStrategy ( lockable , lockMode ) ; } else { return null ; }
public class Operator { /** * compares a Date with a Date * @ param left * @ param right * @ return difference as int */ public static int compare ( Date left , Date right ) { } }
return compare ( left . getTime ( ) / 1000 , right . getTime ( ) / 1000 ) ;
public class AssertCoverage { /** * Populates a list of classes from a given java source directory . All source files must have a " . class " file in the class path . * @ param classes * Set to populate . * @ param baseDir * Root directory like " src / main / java " . * @ param srcDir * A directory inside the root directory like " a / b / c " ( path of the package " a . b . c " ) . * @ param recursive * If sub directories should be included < code > true < / code > else < code > false < / code > . * @ param classFilter * Filter that decides if a class should have a corresponding test or not . */ static void analyzeDir ( final Set < Class < ? > > classes , final File baseDir , final File srcDir , final boolean recursive , final ClassFilter classFilter ) { } }
final FileProcessor fileProcessor = new FileProcessor ( new FileHandler ( ) { @ Override public final FileHandlerResult handleFile ( final File file ) { if ( file . isDirectory ( ) ) { // Directory if ( recursive ) { return FileHandlerResult . CONTINUE ; } return FileHandlerResult . SKIP_SUBDIRS ; } // File final String name = file . getName ( ) ; if ( name . endsWith ( ".java" ) && ! name . equals ( "package-info.java" ) ) { final String packageName = Utils4J . getRelativePath ( baseDir , file . getParentFile ( ) ) . replace ( File . separatorChar , '.' ) ; final String simpleName = name . substring ( 0 , name . length ( ) - 5 ) ; final String className = packageName + "." + simpleName ; final Class < ? > clasz = classForName ( className ) ; if ( isInclude ( clasz , classFilter ) ) { classes . add ( clasz ) ; } } return FileHandlerResult . CONTINUE ; } } ) ; fileProcessor . process ( srcDir ) ;
public class BatchObjectUpdater { /** * Update the given object , which must exist , using the given set of current scalar values . */ private ObjectResult updateObject ( DBObject dbObj , Map < String , String > currScalarMap , Map < String , Map < String , Integer > > targObjShardNos ) { } }
ObjectResult objResult = null ; if ( Utils . isEmpty ( dbObj . getObjectID ( ) ) ) { objResult = ObjectResult . newErrorResult ( "Object ID is required" , null ) ; } else if ( currScalarMap == null ) { objResult = ObjectResult . newErrorResult ( "No object found" , dbObj . getObjectID ( ) ) ; } else { ObjectUpdater objUpdater = new ObjectUpdater ( m_tableDef ) ; if ( targObjShardNos . size ( ) > 0 ) { objUpdater . setTargetObjectShardNumbers ( targObjShardNos ) ; } objResult = objUpdater . updateObject ( m_parentTran , dbObj , currScalarMap ) ; } return objResult ;
public class DigitalOceanClient { /** * Easy method for HTTP header values . defaults to first one . */ private String getSimpleHeaderValue ( String header , HttpResponse httpResponse ) { } }
return getSimpleHeaderValue ( header , httpResponse , true ) ;
public class OutboundHandshake { /** * Gets and verifies the server digest . * @ return true if the server digest is found and verified , false otherwise */ private boolean getServerDigestPosition ( ) { } }
boolean result = false ; // log . trace ( " BigEndian bytes : { } " , Hex . encodeHexString ( s1 ) ) ; log . trace ( "Trying algorithm: {}" , algorithm ) ; digestPosServer = getDigestOffset ( algorithm , s1 , 0 ) ; log . debug ( "Server digest position offset: {}" , digestPosServer ) ; if ( ! ( result = verifyDigest ( digestPosServer , s1 , GENUINE_FMS_KEY , 36 ) ) ) { // try a different position algorithm ^= 1 ; log . trace ( "Trying algorithm: {}" , algorithm ) ; digestPosServer = getDigestOffset ( algorithm , s1 , 0 ) ; log . debug ( "Server digest position offset: {}" , digestPosServer ) ; if ( ! ( result = verifyDigest ( digestPosServer , s1 , GENUINE_FMS_KEY , 36 ) ) ) { log . warn ( "Server digest verification failed" ) ; // if we dont mind that verification routines failed if ( ! forceVerification ) { return true ; } } else { log . debug ( "Server digest verified" ) ; } } else { log . debug ( "Server digest verified" ) ; } return result ;
public class BeanCopierFactory { /** * 会被动态类使用 */ public static PropConverter < ? , ? > getConverter ( int sequence , String propName ) { } }
Map < String , PropConverter < ? , ? > > map = SEQ_PROP_CVT_MAP . get ( sequence ) ; return map . get ( propName ) ;
public class DeleteMetricFilterRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteMetricFilterRequest deleteMetricFilterRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteMetricFilterRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteMetricFilterRequest . getLogGroupName ( ) , LOGGROUPNAME_BINDING ) ; protocolMarshaller . marshall ( deleteMetricFilterRequest . getFilterName ( ) , FILTERNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class Signer { /** * Returns pre - signed post policy string for given stringToSign , secret key , date and region . */ public static String postPresignV4 ( String stringToSign , String secretKey , DateTime date , String region ) throws NoSuchAlgorithmException , InvalidKeyException { } }
Signer signer = new Signer ( null , null , date , region , null , secretKey , null ) ; signer . stringToSign = stringToSign ; signer . setSigningKey ( ) ; signer . setSignature ( ) ; return signer . signature ;
public class ValidationDataGroup { /** * Add rule . * @ param vd the vd */ public void addRule ( ValidationData vd ) { } }
this . rules . addAll ( vd . getValidationRules ( ) . stream ( ) . filter ( vr -> vr . isUse ( ) ) . collect ( Collectors . toList ( ) ) ) ;
public class MysqlExportService { /** * This will get the final output * sql file name . * @ return String */ public String getSqlFilename ( ) { } }
return isSqlFileNamePropertySet ( ) ? properties . getProperty ( SQL_FILE_NAME ) + ".sql" : new SimpleDateFormat ( "d_M_Y_H_mm_ss" ) . format ( new Date ( ) ) + "_" + database + "_database_dump.sql" ;
public class DatumWriterGenerator { /** * Returns the encode method for the given type and schema . The same method will be returned if the same * type and schema has been passed to the method before . * @ param outputType Type information of the data type for output * @ param schema Schema to use for output . * @ return A method for encoding the given output type and schema . */ private Method getEncodeMethod ( TypeToken < ? > outputType , Schema schema ) { } }
String key = String . format ( "%s%s" , normalizeTypeName ( outputType ) , schema . getSchemaHash ( ) ) ; Method method = encodeMethods . get ( key ) ; if ( method != null ) { return method ; } // Generate the encode method ( value , encoder , schema , set ) TypeToken < ? > callOutputType = getCallTypeToken ( outputType , schema ) ; String methodName = String . format ( "encode%s" , key ) ; method = getMethod ( void . class , methodName , callOutputType . getRawType ( ) , Encoder . class , Schema . class , Set . class ) ; // Put the method into map first before generating the body in order to support recursive data type . encodeMethods . put ( key , method ) ; String methodSignature = Signatures . getMethodSignature ( method , new TypeToken [ ] { callOutputType , null , null , new TypeToken < Set < Object > > ( ) { } } ) ; GeneratorAdapter mg = new GeneratorAdapter ( Opcodes . ACC_PRIVATE , method , methodSignature , new Type [ ] { Type . getType ( IOException . class ) } , classWriter ) ; generateEncodeBody ( mg , schema , outputType , 0 , 1 , 2 , 3 ) ; mg . returnValue ( ) ; mg . endMethod ( ) ; return method ;
public class AuditUtils { /** * Get the method from the request - generally " GET " or " POST " * @ param req * @ return the method */ public static String getRequestMethod ( HttpServletRequest req ) { } }
String method ; if ( req . getMethod ( ) != null ) method = req . getMethod ( ) . toUpperCase ( ) ; else method = AuditEvent . TARGET_METHOD_GET ; return method ;
public class SplitTaskFactory { /** * Process the timephased resource assignment data to work out the * split structure of the task . * @ param task parent task * @ param timephasedComplete completed resource assignment work * @ param timephasedPlanned planned resource assignment work */ public void processSplitData ( Task task , List < TimephasedWork > timephasedComplete , List < TimephasedWork > timephasedPlanned ) { } }
Date splitsComplete = null ; TimephasedWork lastComplete = null ; TimephasedWork firstPlanned = null ; if ( ! timephasedComplete . isEmpty ( ) ) { lastComplete = timephasedComplete . get ( timephasedComplete . size ( ) - 1 ) ; splitsComplete = lastComplete . getFinish ( ) ; } if ( ! timephasedPlanned . isEmpty ( ) ) { firstPlanned = timephasedPlanned . get ( 0 ) ; } LinkedList < DateRange > splits = new LinkedList < DateRange > ( ) ; TimephasedWork lastAssignment = null ; DateRange lastRange = null ; for ( TimephasedWork assignment : timephasedComplete ) { if ( lastAssignment != null && lastRange != null && lastAssignment . getTotalAmount ( ) . getDuration ( ) != 0 && assignment . getTotalAmount ( ) . getDuration ( ) != 0 ) { splits . removeLast ( ) ; lastRange = new DateRange ( lastRange . getStart ( ) , assignment . getFinish ( ) ) ; } else { lastRange = new DateRange ( assignment . getStart ( ) , assignment . getFinish ( ) ) ; } splits . add ( lastRange ) ; lastAssignment = assignment ; } // We may not have a split , we may just have a partially // complete split . Date splitStart = null ; if ( lastComplete != null && firstPlanned != null && lastComplete . getTotalAmount ( ) . getDuration ( ) != 0 && firstPlanned . getTotalAmount ( ) . getDuration ( ) != 0 ) { lastRange = splits . removeLast ( ) ; splitStart = lastRange . getStart ( ) ; } lastAssignment = null ; lastRange = null ; for ( TimephasedWork assignment : timephasedPlanned ) { if ( splitStart == null ) { if ( lastAssignment != null && lastRange != null && lastAssignment . getTotalAmount ( ) . getDuration ( ) != 0 && assignment . getTotalAmount ( ) . getDuration ( ) != 0 ) { splits . removeLast ( ) ; lastRange = new DateRange ( lastRange . getStart ( ) , assignment . getFinish ( ) ) ; } else { lastRange = new DateRange ( assignment . getStart ( ) , assignment . getFinish ( ) ) ; } } else { lastRange = new DateRange ( splitStart , assignment . getFinish ( ) ) ; } splits . add ( lastRange ) ; splitStart = null ; lastAssignment = assignment ; } // We must have a minimum of 3 entries for this to be a valid split task if ( splits . size ( ) > 2 ) { task . getSplits ( ) . addAll ( splits ) ; task . setSplitCompleteDuration ( splitsComplete ) ; } else { task . setSplits ( null ) ; task . setSplitCompleteDuration ( null ) ; }
public class Deserializer { /** * Deserializes the input parameter and returns an Object which must then be cast to a core data type * @ param < T > * type * @ param in * input * @ param target * target * @ return Object object */ @ SuppressWarnings ( { } }
"unchecked" , "rawtypes" } ) public static < T > T deserialize ( Input in , Type target ) { if ( BLACK_LIST == null ) { // log . info ( " Black list is not yet initialized " ) ; try { loadBlackList ( ) ; } catch ( IOException e ) { throw new RuntimeException ( "Failed to init black-list" ) ; } } byte type = in . readDataType ( ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Type: {} target: {}" , type , ( target != null ? target . toString ( ) : "Target not specified" ) ) ; } else if ( log . isDebugEnabled ( ) ) { log . debug ( "Datatype: {}" , DataTypes . toStringValue ( type ) ) ; } Object result = null ; switch ( type ) { case DataTypes . CORE_NULL : result = in . readNull ( ) ; break ; case DataTypes . CORE_BOOLEAN : result = in . readBoolean ( ) ; break ; case DataTypes . CORE_NUMBER : result = in . readNumber ( ) ; break ; case DataTypes . CORE_STRING : try { if ( target != null && ( ( Class ) target ) . isEnum ( ) ) { log . warn ( "Enum target specified" ) ; String name = in . readString ( ) ; result = Enum . valueOf ( ( Class ) target , name ) ; } else { result = in . readString ( ) ; } } catch ( RuntimeException e ) { log . error ( "failed to deserialize {}" , target , e ) ; throw e ; } break ; case DataTypes . CORE_DATE : result = in . readDate ( ) ; break ; case DataTypes . CORE_ARRAY : result = in . readArray ( target ) ; break ; case DataTypes . CORE_MAP : result = in . readMap ( ) ; break ; case DataTypes . CORE_XML : result = in . readXML ( ) ; break ; case DataTypes . CORE_OBJECT : result = in . readObject ( ) ; break ; case DataTypes . CORE_BYTEARRAY : result = in . readByteArray ( ) ; break ; case DataTypes . CORE_VECTOR_INT : result = in . readVectorInt ( ) ; break ; case DataTypes . CORE_VECTOR_UINT : result = in . readVectorUInt ( ) ; break ; case DataTypes . CORE_VECTOR_NUMBER : result = in . readVectorNumber ( ) ; break ; case DataTypes . CORE_VECTOR_OBJECT : result = in . readVectorObject ( ) ; break ; case DataTypes . OPT_REFERENCE : result = in . readReference ( ) ; break ; case DataTypes . CORE_END_OBJECT : // end - of - object returned , not sure that we should ever get here log . debug ( "End-of-object detected" ) ; break ; default : result = in . readCustom ( ) ; break ; } return ( T ) result ;
public class CmsChangePasswordDialog { /** * Submits the password change . < p > */ void submit ( ) { } }
String password1 = m_form . getPassword1 ( ) ; String password2 = m_form . getPassword2 ( ) ; if ( validatePasswords ( password1 , password2 ) ) { String oldPassword = m_form . getOldPassword ( ) ; boolean error = false ; if ( oldPassword . equals ( password1 ) ) { m_form . setErrorPassword1 ( new UserError ( Messages . get ( ) . getBundle ( m_locale ) . key ( Messages . GUI_PWCHANGE_DIFFERENT_PASSWORD_REQUIRED_0 ) ) , OpenCmsTheme . SECURITY_INVALID ) ; error = true ; } else { try { m_cms . setPassword ( m_user . getName ( ) , oldPassword , password1 ) ; } catch ( CmsException e ) { m_form . setErrorOldPassword ( new UserError ( e . getLocalizedMessage ( m_locale ) ) , OpenCmsTheme . SECURITY_INVALID ) ; error = true ; LOG . debug ( e . getLocalizedMessage ( ) , e ) ; } } if ( ! error ) { if ( m_context != null ) { close ( ) ; } else { // this will be the case for forced password changes after login CmsVaadinUtils . showAlert ( Messages . get ( ) . getBundle ( m_locale ) . key ( Messages . GUI_PWCHANGE_SUCCESS_HEADER_0 ) , Messages . get ( ) . getBundle ( m_locale ) . key ( Messages . GUI_PWCHANGE_GUI_PWCHANGE_SUCCESS_CONTENT_0 ) , new Runnable ( ) { public void run ( ) { A_CmsUI . get ( ) . getPage ( ) . setLocation ( OpenCms . getLinkManager ( ) . substituteLinkForUnknownTarget ( CmsLoginUI . m_adminCms , CmsWorkplaceLoginHandler . LOGIN_HANDLER + "?ocUname=" + m_user . getSimpleName ( ) + "&ocOuFqn=" + m_user . getOuFqn ( ) , false ) ) ; } } ) ; } } }
public class FessMessages { /** * Add the created action message for the key ' errors . failed _ to _ start _ crawl _ process ' with parameters . * < pre > * message : Failed to start a crawl process . * < / pre > * @ param property The property name for the message . ( NotNull ) * @ return this . ( NotNull ) */ public FessMessages addErrorsFailedToStartCrawlProcess ( String property ) { } }
assertPropertyNotNull ( property ) ; add ( property , new UserMessage ( ERRORS_failed_to_start_crawl_process ) ) ; return this ;
public class HSaslThriftClient { /** * { @ inheritDoc } */ public HSaslThriftClient open ( ) { } }
if ( isOpen ( ) ) { throw new IllegalStateException ( "Open called on already open SASL connection. You should not have gotten here." ) ; } if ( log . isDebugEnabled ( ) ) { log . debug ( "Creating a new SASL thrift connection to {}" , cassandraHost ) ; } TSocket socket ; try { if ( params == null ) socket = new TSocket ( cassandraHost . getHost ( ) , cassandraHost . getPort ( ) , timeout ) ; else socket = TSSLTransportFactory . getClientSocket ( cassandraHost . getHost ( ) , cassandraHost . getPort ( ) , timeout , params ) ; } catch ( TTransportException e ) { throw new HectorTransportException ( "Could not get client socket: " , e ) ; } if ( cassandraHost . getUseSocketKeepalive ( ) ) { try { socket . getSocket ( ) . setKeepAlive ( true ) ; } catch ( SocketException se ) { throw new HectorTransportException ( "Could not set SO_KEEPALIVE on socket: " , se ) ; } } try { transport = openKerberosTransport ( socket , servicePrincipalName ) ; } catch ( LoginException e ) { log . error ( "Kerberos login failed: " , e ) ; close ( ) ; throw new HectorTransportException ( "Kerberos context couldn't be established with client: " , e ) ; } catch ( TTransportException e ) { log . error ( "Failed to open Kerberos transport." , e ) ; close ( ) ; throw new HectorTransportException ( "Kerberos context couldn't be established with client: " , e ) ; } transport = maybeWrapWithTFramedTransport ( transport ) ; return this ;
public class QrCode { /** * Resets the QR - Code so that it ' s in its initial state . */ public void reset ( ) { } }
for ( int i = 0 ; i < 4 ; i ++ ) { ppCorner . get ( i ) . set ( 0 , 0 ) ; ppDown . get ( i ) . set ( 0 , 0 ) ; ppRight . get ( i ) . set ( 0 , 0 ) ; } this . threshCorner = 0 ; this . threshDown = 0 ; this . threshRight = 0 ; version = - 1 ; error = L ; mask = QrCodeMaskPattern . M111 ; alignment . reset ( ) ; mode = Mode . UNKNOWN ; failureCause = Failure . NONE ; rawbits = null ; corrected = null ; message = null ;
public class CommonOps_DDRM { /** * Given a symmetric matrix which is represented by a lower triangular matrix convert it back into * a full symmetric matrix . * @ param A ( Input ) Lower triangular matrix ( Output ) symmetric matrix */ public static void symmLowerToFull ( DMatrixRMaj A ) { } }
if ( A . numRows != A . numCols ) throw new MatrixDimensionException ( "Must be a square matrix" ) ; final int cols = A . numCols ; for ( int row = 0 ; row < A . numRows ; row ++ ) { for ( int col = row + 1 ; col < cols ; col ++ ) { A . data [ row * cols + col ] = A . data [ col * cols + row ] ; } }
public class SortedMapSubject { /** * Fails if the map ' s last key is not equal to the given key . */ public void hasLastKey ( @ NullableDecl Object key ) { } }
if ( actualAsNavigableMap ( ) . isEmpty ( ) ) { failWithActual ( "expected to have last key" , key ) ; return ; } if ( ! Objects . equal ( actualAsNavigableMap ( ) . lastKey ( ) , key ) ) { if ( actualAsNavigableMap ( ) . containsKey ( key ) ) { failWithoutActual ( simpleFact ( lenientFormat ( "Not true that %s has last key <%s>. " + "It does contain this key, but the last key is <%s>" , actualAsString ( ) , key , actualAsNavigableMap ( ) . lastKey ( ) ) ) ) ; return ; } failWithoutActual ( simpleFact ( lenientFormat ( "Not true that %s has last key <%s>. " + "It does not contain this key, and the last key is <%s>" , actualAsString ( ) , key , actualAsNavigableMap ( ) . lastKey ( ) ) ) ) ; }
public class RU { /** * / * GLOBAL FORMAT FUNCTION */ public static String format ( Object obj , String mask , double round ) { } }
if ( obj == null ) { return "" ; } if ( obj instanceof Date ) { return formatDate ( ( Date ) obj , mask ) ; } if ( obj instanceof Number ) { return formatNumber ( ( Number ) obj , mask , round ) ; } return obj . toString ( ) ;
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcWindowPanelProperties ( ) { } }
if ( ifcWindowPanelPropertiesEClass == null ) { ifcWindowPanelPropertiesEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 647 ) ; } return ifcWindowPanelPropertiesEClass ;
public class S6aLocalSessionDataFactory { /** * ( non - Javadoc ) * @ see org . jdiameter . common . api . app . IAppSessionDataFactory # getAppSessionData ( java . lang . Class , java . lang . String ) */ @ Override public IS6aSessionData getAppSessionData ( Class < ? extends AppSession > clazz , String sessionId ) { } }
if ( clazz . equals ( ClientS6aSession . class ) ) { ClientS6aSessionDataLocalImpl data = new ClientS6aSessionDataLocalImpl ( ) ; data . setSessionId ( sessionId ) ; return data ; } else if ( clazz . equals ( ServerS6aSession . class ) ) { ServerS6aSessionDataLocalImpl data = new ServerS6aSessionDataLocalImpl ( ) ; data . setSessionId ( sessionId ) ; return data ; } else { throw new IllegalArgumentException ( "Invalid Session Class: " + clazz . toString ( ) ) ; }
public class XPathContext { /** * Reset for new run . */ public void reset ( ) { } }
releaseDTMXRTreeFrags ( ) ; // These couldn ' t be disposed of earlier ( see comments in release ( ) ) ; zap them now . if ( m_rtfdtm_stack != null ) for ( java . util . Enumeration e = m_rtfdtm_stack . elements ( ) ; e . hasMoreElements ( ) ; ) m_dtmManager . release ( ( DTM ) e . nextElement ( ) , true ) ; m_rtfdtm_stack = null ; // drop our references too m_which_rtfdtm = - 1 ; if ( m_global_rtfdtm != null ) m_dtmManager . release ( m_global_rtfdtm , true ) ; m_global_rtfdtm = null ; m_dtmManager = DTMManager . newInstance ( org . apache . xpath . objects . XMLStringFactoryImpl . getFactory ( ) ) ; m_saxLocations . removeAllElements ( ) ; m_axesIteratorStack . removeAllElements ( ) ; m_contextNodeLists . removeAllElements ( ) ; m_currentExpressionNodes . removeAllElements ( ) ; m_currentNodes . removeAllElements ( ) ; m_iteratorRoots . RemoveAllNoClear ( ) ; m_predicatePos . removeAllElements ( ) ; m_predicateRoots . RemoveAllNoClear ( ) ; m_prefixResolvers . removeAllElements ( ) ; m_prefixResolvers . push ( null ) ; m_currentNodes . push ( DTM . NULL ) ; m_currentExpressionNodes . push ( DTM . NULL ) ; m_saxLocations . push ( null ) ;
public class MemcachedBackupSessionManager { /** * { @ inheritDoc } */ @ Override public void remove ( final Session session ) { } }
remove ( session , session . getNote ( MemcachedSessionService . NODE_FAILURE ) != Boolean . TRUE ) ;
public class TSDB { /** * Attempts to assign a UID to a name for the given type * Used by the UniqueIdRpc call to generate IDs for new metrics , tagks or * tagvs . The name must pass validation and if it ' s already assigned a UID , * this method will throw an error with the proper UID . Otherwise if it can * create the UID , it will be returned * @ param type The type of uid to assign , metric , tagk or tagv * @ param name The name of the uid object * @ return A byte array with the UID if the assignment was successful * @ throws IllegalArgumentException if the name is invalid or it already * exists * @ since 2.0 */ public byte [ ] assignUid ( final String type , final String name ) { } }
Tags . validateString ( type , name ) ; if ( type . toLowerCase ( ) . equals ( "metric" ) ) { try { final byte [ ] uid = this . metrics . getId ( name ) ; throw new IllegalArgumentException ( "Name already exists with UID: " + UniqueId . uidToString ( uid ) ) ; } catch ( NoSuchUniqueName nsue ) { return this . metrics . getOrCreateId ( name ) ; } } else if ( type . toLowerCase ( ) . equals ( "tagk" ) ) { try { final byte [ ] uid = this . tag_names . getId ( name ) ; throw new IllegalArgumentException ( "Name already exists with UID: " + UniqueId . uidToString ( uid ) ) ; } catch ( NoSuchUniqueName nsue ) { return this . tag_names . getOrCreateId ( name ) ; } } else if ( type . toLowerCase ( ) . equals ( "tagv" ) ) { try { final byte [ ] uid = this . tag_values . getId ( name ) ; throw new IllegalArgumentException ( "Name already exists with UID: " + UniqueId . uidToString ( uid ) ) ; } catch ( NoSuchUniqueName nsue ) { return this . tag_values . getOrCreateId ( name ) ; } } else { LOG . warn ( "Unknown type name: " + type ) ; throw new IllegalArgumentException ( "Unknown type name" ) ; }
public class Vector2d { /** * / * ( non - Javadoc ) * @ see org . joml . Vector2dc # mul ( double , org . joml . Vector2d ) */ public Vector2d mul ( double scalar , Vector2d dest ) { } }
dest . x = x * scalar ; dest . y = y * scalar ; return dest ;
public class MappedText { /** * Legacy */ public void setPath ( String path ) { } }
if ( StringUtils . isEmpty ( url ) ) { url = path ; } else { url = path + "/" + url ; }
public class DebugRepositoryLookupFailureCallback { /** * ( non - Javadoc ) * @ see * edu . umd . cs . findbugs . ba . RepositoryLookupFailureCallback # reportMissingClass * ( java . lang . ClassNotFoundException ) */ @ Override @ SuppressFBWarnings ( "DM_EXIT" ) public void reportMissingClass ( ClassNotFoundException ex ) { } }
String missing = AbstractBugReporter . getMissingClassName ( ex ) ; if ( missing == null || missing . charAt ( 0 ) == '[' ) { return ; } System . out . println ( "Missing class" ) ; ex . printStackTrace ( ) ; System . exit ( 1 ) ;
public class KeyChainGroup { /** * Returns the key chain that ' s used for generation of fresh / current keys of the given type . If it ' s not the default * type and no active chain for this type exists , { @ code null } is returned . No upgrade or downgrade is tried . */ public final DeterministicKeyChain getActiveKeyChain ( Script . ScriptType outputScriptType , long keyRotationTimeSecs ) { } }
checkState ( isSupportsDeterministicChains ( ) , "doesn't support deterministic chains" ) ; for ( DeterministicKeyChain chain : ImmutableList . copyOf ( chains ) . reverse ( ) ) if ( chain . getOutputScriptType ( ) == outputScriptType && chain . getEarliestKeyCreationTime ( ) >= keyRotationTimeSecs ) return chain ; return null ;
public class ExpressionUtils { /** * Create a { @ code any col } expression * @ param col subquery expression * @ return any col */ @ SuppressWarnings ( "unchecked" ) public static < T > Expression < T > any ( SubQueryExpression < ? extends T > col ) { } }
return new OperationImpl < T > ( col . getType ( ) , Ops . QuantOps . ANY , ImmutableList . < Expression < ? > > of ( col ) ) ;
public class QrCode { /** * Adds format information to eval . */ private static void addFormatInfoEval ( byte [ ] eval , int size , EccLevel ecc_level , int pattern ) { } }
int format = pattern ; int seq ; int i ; switch ( ecc_level ) { case L : format += 0x08 ; break ; case Q : format += 0x18 ; break ; case H : format += 0x10 ; break ; } seq = QR_ANNEX_C [ format ] ; for ( i = 0 ; i < 6 ; i ++ ) { eval [ ( i * size ) + 8 ] = ( byte ) ( ( ( ( seq >> i ) & 0x01 ) != 0 ) ? ( 0x01 >> pattern ) : 0x00 ) ; } for ( i = 0 ; i < 8 ; i ++ ) { eval [ ( 8 * size ) + ( size - i - 1 ) ] = ( byte ) ( ( ( ( seq >> i ) & 0x01 ) != 0 ) ? ( 0x01 >> pattern ) : 0x00 ) ; } for ( i = 0 ; i < 6 ; i ++ ) { eval [ ( 8 * size ) + ( 5 - i ) ] = ( byte ) ( ( ( ( seq >> ( i + 9 ) ) & 0x01 ) != 0 ) ? ( 0x01 >> pattern ) : 0x00 ) ; } for ( i = 0 ; i < 7 ; i ++ ) { eval [ ( ( ( size - 7 ) + i ) * size ) + 8 ] = ( byte ) ( ( ( ( seq >> ( i + 8 ) ) & 0x01 ) != 0 ) ? ( 0x01 >> pattern ) : 0x00 ) ; } eval [ ( 7 * size ) + 8 ] = ( byte ) ( ( ( ( seq >> 6 ) & 0x01 ) != 0 ) ? ( 0x01 >> pattern ) : 0x00 ) ; eval [ ( 8 * size ) + 8 ] = ( byte ) ( ( ( ( seq >> 7 ) & 0x01 ) != 0 ) ? ( 0x01 >> pattern ) : 0x00 ) ; eval [ ( 8 * size ) + 7 ] = ( byte ) ( ( ( ( seq >> 8 ) & 0x01 ) != 0 ) ? ( 0x01 >> pattern ) : 0x00 ) ;
public class NetworkSecurityGroupsInner { /** * Creates or updates a network security group in the specified resource group . * @ param resourceGroupName The name of the resource group . * @ param networkSecurityGroupName The name of the network security group . * @ param parameters Parameters supplied to the create or update network security group operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the NetworkSecurityGroupInner object if successful . */ public NetworkSecurityGroupInner createOrUpdate ( String resourceGroupName , String networkSecurityGroupName , NetworkSecurityGroupInner parameters ) { } }
return createOrUpdateWithServiceResponseAsync ( resourceGroupName , networkSecurityGroupName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class HtmlTag { /** * Generate html tags for use in { @ link HtmlTag . Html # head & lt ; head & gt ; } . * Allows adding attributes to a head tag . * @ param tag the name and contents of the head tag . * @ param attributes the attributes of the head tag . * @ return an object that generates the tag in the head . */ public static Attribute < HtmlTag > head ( Attribute < HtmlTag > tag , Attribute < Void > ... attributes ) { } }
return head ( tag . name ( ) , tag :: value , attributes ) ;
public class BuildInfoDeployer { /** * Adding environment and system variables to build info . * @ param builder */ @ Override protected void addBuildInfoProperties ( BuildInfoBuilder builder ) { } }
if ( envVars != null ) { for ( Map . Entry < String , String > entry : envVars . entrySet ( ) ) { builder . addProperty ( BuildInfoProperties . BUILD_INFO_ENVIRONMENT_PREFIX + entry . getKey ( ) , entry . getValue ( ) ) ; } } if ( sysVars != null ) { for ( Map . Entry < String , String > entry : sysVars . entrySet ( ) ) { builder . addProperty ( entry . getKey ( ) , entry . getValue ( ) ) ; } }
public class IncidentEntity { /** * Instantiate recursive a new incident a super execution * ( i . e . super process instance ) which is affected from this * incident . * For example : a super process instance called via CallActivity * a new process instance on which an incident happened , so that * the super process instance has an incident too . */ protected void createRecursiveIncidents ( String rootCauseIncidentId , List < IncidentEntity > createdIncidents ) { } }
final ExecutionEntity execution = getExecution ( ) ; if ( execution != null ) { ExecutionEntity superExecution = execution . getProcessInstance ( ) . getSuperExecution ( ) ; if ( superExecution != null ) { // create a new incident IncidentEntity newIncident = create ( incidentType ) ; newIncident . setExecution ( superExecution ) ; newIncident . setActivityId ( superExecution . getCurrentActivityId ( ) ) ; newIncident . setProcessDefinitionId ( superExecution . getProcessDefinitionId ( ) ) ; newIncident . setTenantId ( superExecution . getTenantId ( ) ) ; // set cause and root cause newIncident . setCauseIncidentId ( id ) ; newIncident . setRootCauseIncidentId ( rootCauseIncidentId ) ; // insert new incident ( and create a new historic incident ) insert ( newIncident ) ; // add new incident to result set createdIncidents . add ( newIncident ) ; newIncident . createRecursiveIncidents ( rootCauseIncidentId , createdIncidents ) ; } }
public class CmsSitemapController { /** * Removes the entry with the given site - path from navigation . < p > * @ param entryId the entry id */ public void removeFromNavigation ( CmsUUID entryId ) { } }
CmsClientSitemapEntry entry = getEntryById ( entryId ) ; CmsClientSitemapEntry parent = getEntry ( CmsResource . getParentFolder ( entry . getSitePath ( ) ) ) ; CmsSitemapChange change = new CmsSitemapChange ( entry . getId ( ) , entry . getSitePath ( ) , ChangeType . remove ) ; change . setParentId ( parent . getId ( ) ) ; change . setDefaultFileId ( entry . getDefaultFileId ( ) ) ; CmsSitemapClipboardData data = CmsSitemapView . getInstance ( ) . getController ( ) . getData ( ) . getClipboardData ( ) . copy ( ) ; data . addModified ( entry ) ; change . setClipBoardData ( data ) ; // TODO : handle detail page delete commitChange ( change , null ) ;
public class ConfigFetchMgrImpl { /** * 根据详细参数获取配置 */ @ Override public Config getConfByParameter ( Long appId , Long envId , String version , String key , DisConfigTypeEnum disConfigTypeEnum ) { } }
return configDao . getByParameter ( appId , envId , version , key , disConfigTypeEnum ) ;
public class base_resource { /** * This method , forms the http GET request , applies on the netscaler . * Reads the response from the netscaler and converts it to corresponding * resource type . * @ param service * @ param option * @ return Array of requested resources . */ private base_resource [ ] get_request ( nitro_service service , options option ) throws Exception { } }
StringBuilder responseStr = new StringBuilder ( ) ; HttpURLConnection httpURLConnection = null ; try { String urlstr ; String ipaddress = service . get_ipaddress ( ) ; String version = service . get_version ( ) ; String sessionid = service . get_sessionid ( ) ; String objtype = get_object_type ( ) ; String protocol = service . get_protocol ( ) ; // build URL urlstr = protocol + "://" + ipaddress + "/nitro/" + version + "/config/" + objtype ; String name = this . get_object_name ( ) ; if ( name != null && name . length ( ) > 0 ) { urlstr = urlstr + "/" + nitro_util . encode ( nitro_util . encode ( name ) ) ; } if ( option != null || ( service . get_warning ( ) != null && service . get_warning ( ) ) ) { String optionstr = null ; if ( option != null ) { optionstr = option . to_string ( ) ; if ( optionstr . length ( ) > 0 ) { urlstr = urlstr + "?" ; urlstr = urlstr + optionstr ; } } if ( service . get_warning ( ) != null && service . get_warning ( ) ) { if ( option != null && optionstr . length ( ) > 0 ) { urlstr = urlstr + "&" ; } else { urlstr = urlstr + "?" ; } urlstr = urlstr + "warning=yes" ; } } URL url = new URL ( urlstr ) ; httpURLConnection = ( HttpURLConnection ) url . openConnection ( ) ; httpURLConnection . setRequestMethod ( "GET" ) ; httpURLConnection . setRequestProperty ( "Cookie" , "sessionid=" + nitro_util . encode ( sessionid ) ) ; if ( ( option != null ) && ( option . get_compression ( ) ) ) httpURLConnection . setRequestProperty ( "Accept-Encoding" , "gzip, deflate" ) ; if ( httpURLConnection instanceof HttpsURLConnection ) { if ( service . get_certvalidation ( ) ) { SocketFactory sslSocketFactory = SSLSocketFactory . getDefault ( ) ; HttpsURLConnection secured = ( HttpsURLConnection ) httpURLConnection ; secured . setSSLSocketFactory ( ( SSLSocketFactory ) sslSocketFactory ) ; if ( ! service . get_hostnameverification ( ) ) { /* * override defualt hostNameverifier ' s verify method * with EmptyHostnameVerifier ' s verify method to ignore hostname verification check . */ secured . setHostnameVerifier ( new EmptyHostnameVerifier ( ) ) ; } } else { SSLContext sslContext = SSLContext . getInstance ( "SSL" ) ; // we are using an empty trust manager , because NetScaler currently presents // a test certificate not issued by any signing authority , so we need to bypass // the credentials check sslContext . init ( null , new TrustManager [ ] { new EmptyTrustManager ( ) } , null ) ; SocketFactory sslSocketFactory = sslContext . getSocketFactory ( ) ; HttpsURLConnection secured = ( HttpsURLConnection ) httpURLConnection ; secured . setSSLSocketFactory ( ( SSLSocketFactory ) sslSocketFactory ) ; if ( ! service . get_hostnameverification ( ) ) { /* * overriding defualt hostNameverifier ' s verify method * with EmptyHostnameVerifier ' s verify method to bypass hostname check . */ secured . setHostnameVerifier ( new EmptyHostnameVerifier ( ) ) ; } } } InputStream input ; try { input = httpURLConnection . getInputStream ( ) ; } catch ( Exception e ) { input = httpURLConnection . getErrorStream ( ) ; } String contentEncoding = httpURLConnection . getContentEncoding ( ) ; // get correct input stream for compressed data : if ( contentEncoding != null ) { if ( contentEncoding . equalsIgnoreCase ( "gzip" ) ) input = new GZIPInputStream ( input ) ; // reads 2 bytes to determine GZIP stream ! else if ( contentEncoding . equalsIgnoreCase ( "deflate" ) ) input = new InflaterInputStream ( input ) ; } int numOfTotalBytesRead ; byte [ ] buffer = new byte [ 1024 ] ; while ( ( numOfTotalBytesRead = input . read ( buffer , 0 , buffer . length ) ) != - 1 ) { responseStr . append ( new String ( buffer , 0 , numOfTotalBytesRead ) ) ; } httpURLConnection . disconnect ( ) ; input . close ( ) ; } catch ( MalformedURLException mue ) { throw mue ; } catch ( IOException ioe ) { throw ioe ; } catch ( Exception e ) { throw e ; } base_resource [ ] result = get_nitro_response ( service , responseStr . toString ( ) ) ; return result ;
public class MutableInodeDirectory { /** * Converts the entry to an { @ link MutableInodeDirectory } . * @ param entry the entry to convert * @ return the { @ link MutableInodeDirectory } representation */ public static MutableInodeDirectory fromJournalEntry ( InodeDirectoryEntry entry ) { } }
// If journal entry has no mode set , set default mode for backwards - compatibility . MutableInodeDirectory ret = new MutableInodeDirectory ( entry . getId ( ) ) . setCreationTimeMs ( entry . getCreationTimeMs ( ) ) . setName ( entry . getName ( ) ) . setParentId ( entry . getParentId ( ) ) . setPersistenceState ( PersistenceState . valueOf ( entry . getPersistenceState ( ) ) ) . setPinned ( entry . getPinned ( ) ) . setLastModificationTimeMs ( entry . getLastModificationTimeMs ( ) , true ) . setMountPoint ( entry . getMountPoint ( ) ) . setTtl ( entry . getTtl ( ) ) . setTtlAction ( ProtobufUtils . fromProtobuf ( entry . getTtlAction ( ) ) ) . setDirectChildrenLoaded ( entry . getDirectChildrenLoaded ( ) ) ; if ( entry . hasAcl ( ) ) { ret . mAcl = ProtoUtils . fromProto ( entry . getAcl ( ) ) ; } else { // Backward compatibility . AccessControlList acl = new AccessControlList ( ) ; acl . setOwningUser ( entry . getOwner ( ) ) ; acl . setOwningGroup ( entry . getGroup ( ) ) ; short mode = entry . hasMode ( ) ? ( short ) entry . getMode ( ) : Constants . DEFAULT_FILE_SYSTEM_MODE ; acl . setMode ( mode ) ; ret . mAcl = acl ; } if ( entry . hasDefaultAcl ( ) ) { ret . mDefaultAcl = ( DefaultAccessControlList ) ProtoUtils . fromProto ( entry . getDefaultAcl ( ) ) ; } else { ret . mDefaultAcl = new DefaultAccessControlList ( ) ; } return ret ;
public class VerySimpleClient { /** * And , check the benchmark went fine afterwards : */ public static void main ( String [ ] args ) throws Exception { } }
if ( Jvm . isDebug ( ) ) { VerySimpleClient main = new VerySimpleClient ( ) ; main . setUp ( ) ; for ( Method m : VerySimpleClient . class . getMethods ( ) ) { if ( m . getAnnotation ( Benchmark . class ) != null ) { for ( int i = 0 ; i < 100 ; i ++ ) { for ( int j = 0 ; j < 100 ; j ++ ) { m . invoke ( main ) ; } } } } main . tearDown ( ) ; } else { int time = Boolean . getBoolean ( "longTest" ) ? 30 : 2 ; Options opt = new OptionsBuilder ( ) . include ( VerySimpleClient . class . getSimpleName ( ) ) . warmupIterations ( 5 ) // . measurementIterations ( 5) . forks ( 1 ) . mode ( Mode . SampleTime ) . measurementTime ( TimeValue . seconds ( time ) ) . timeUnit ( TimeUnit . NANOSECONDS ) . build ( ) ; new Runner ( opt ) . run ( ) ; }
public class Packet { /** * Queues this { @ link Packet } to one ( or more ) { @ link Client } ( s ) . * < br > < br > * No { @ link Client } will receive this { @ link Packet } until { @ link Client # flush ( ) } is called for that respective * { @ link Client } . * @ param < T > A { @ link Client } or any of its children . * @ param clients A variable amount of { @ link Client } s . */ @ SafeVarargs public final < T extends Client > void write ( T ... clients ) { } }
if ( clients . length == 0 ) { throw new IllegalArgumentException ( "You must send this packet to at least one client!" ) ; } for ( var client : clients ) { write ( client ) ; }
public class RevokeFlowEntitlementRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RevokeFlowEntitlementRequest revokeFlowEntitlementRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( revokeFlowEntitlementRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( revokeFlowEntitlementRequest . getEntitlementArn ( ) , ENTITLEMENTARN_BINDING ) ; protocolMarshaller . marshall ( revokeFlowEntitlementRequest . getFlowArn ( ) , FLOWARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GoogleCloudStorageReadChannel { /** * Opens the underlying stream , sets its position to the { @ link # currentPosition } . * < p > If the file encoding in GCS is gzip ( and therefore the HTTP client will decompress it ) , the * entire file is always requested and we seek to the position requested . If the file encoding is * not gzip , only the remaining bytes to be read are requested from GCS . * @ param bytesToRead number of bytes to read from new stream . Ignored if { @ link * GoogleCloudStorageReadOptions # getFadvise ( ) } is equal to { @ link Fadvise # SEQUENTIAL } . * @ throws IOException on IO error */ protected InputStream openStream ( long bytesToRead ) throws IOException { } }
checkArgument ( bytesToRead > 0 , "bytesToRead should be greater than 0, but was %s" , bytesToRead ) ; checkState ( contentChannel == null && contentChannelEnd < 0 , "contentChannel and contentChannelEnd should be not initialized yet for '%s'" , resourceIdString ) ; if ( size == 0 ) { return new ByteArrayInputStream ( new byte [ 0 ] ) ; } String rangeHeader ; if ( ! metadataInitialized ) { contentChannelPosition = getContentChannelPositionForFirstRead ( bytesToRead ) ; rangeHeader = "bytes=" + contentChannelPosition + "-" ; if ( readOptions . getFadvise ( ) == Fadvise . RANDOM ) { long maxBytesToRead = Math . max ( readOptions . getMinRangeRequestSize ( ) , bytesToRead ) ; rangeHeader += ( contentChannelPosition + maxBytesToRead - 1 ) ; } } else if ( gzipEncoded ) { // Do not set range for gzip - encoded files - it ' s not supported . rangeHeader = null ; // Always read gzip - encoded files till the end - they do not support range reads . contentChannelPosition = 0 ; contentChannelEnd = size ; } else { if ( readOptions . getFadvise ( ) != Fadvise . SEQUENTIAL && isFooterRead ( ) ) { // Pre - fetch footer if reading end of file . contentChannelPosition = Math . max ( 0 , size - readOptions . getMinRangeRequestSize ( ) ) ; } else { contentChannelPosition = currentPosition ; } // Set rangeSize to the size of the file reminder from currentPosition . long rangeSize = size - contentChannelPosition ; if ( randomAccess ) { long randomRangeSize = Math . max ( bytesToRead , readOptions . getMinRangeRequestSize ( ) ) ; // Limit rangeSize to the randomRangeSize . rangeSize = Math . min ( randomRangeSize , rangeSize ) ; } contentChannelEnd = contentChannelPosition + rangeSize ; // Do not read footer again , if it was already pre - fetched . if ( footerContent != null ) { contentChannelEnd = Math . min ( contentChannelEnd , size - footerContent . length ) ; } checkState ( currentPosition < contentChannelEnd , "currentPosition (%s) should be less than contentChannelEnd (%s) for '%s'" , currentPosition , contentChannelEnd , resourceIdString ) ; checkState ( contentChannelPosition <= currentPosition , "contentChannelPosition (%s) should be less or equal to currentPosition (%s) for '%s'" , contentChannelPosition , currentPosition , resourceIdString ) ; rangeHeader = "bytes=" + contentChannelPosition + "-" ; if ( randomAccess || contentChannelEnd != size ) { rangeHeader += ( contentChannelEnd - 1 ) ; } } checkState ( ! metadataInitialized || contentChannelEnd > 0 , "contentChannelEnd should be initialized already for '%s'" , resourceIdString ) ; Get getObject = createDataRequest ( rangeHeader ) ; HttpResponse response ; try { response = getObject . executeMedia ( ) ; // TODO ( b / 110832992 ) : validate response range header against expected / request range } catch ( IOException e ) { if ( ! metadataInitialized && errorExtractor . rangeNotSatisfiable ( e ) && currentPosition == 0 ) { // We don ' t know the size yet ( metadataInitialized = = false ) and we ' re seeking to byte 0, // but got ' range not satisfiable ' ; the object must be empty . logger . atInfo ( ) . log ( "Got 'range not satisfiable' for reading '%s' at position 0; assuming empty." , resourceIdString ) ; size = 0 ; return new ByteArrayInputStream ( new byte [ 0 ] ) ; } response = handleExecuteMediaException ( e , getObject , shouldRetryWithLiveVersion ( ) ) ; } if ( ! metadataInitialized ) { initMetadata ( response . getHeaders ( ) ) ; checkState ( metadataInitialized , "metadata should be initialized already for '%s'" , resourceIdString ) ; if ( size == 0 ) { resetContentChannel ( ) ; return new ByteArrayInputStream ( new byte [ 0 ] ) ; } if ( gzipEncoded ) { // Initialize ` contentChannelEnd ` to ` size ` ( initialized to Long . MAX _ VALUE in ` initMetadata ` // method for gzipped objetcs ) because value of HTTP Content - Length header is usually // smaller than decompressed object size . if ( currentPosition == 0 ) { contentChannelEnd = size ; } else { resetContentChannel ( ) ; return openStream ( bytesToRead ) ; } } } if ( contentChannelEnd < 0 ) { String contentRange = response . getHeaders ( ) . getContentRange ( ) ; if ( contentRange != null ) { String contentEnd = contentRange . substring ( contentRange . lastIndexOf ( '-' ) + 1 , contentRange . lastIndexOf ( '/' ) ) ; contentChannelEnd = Long . parseLong ( contentEnd ) + 1 ; } else { contentChannelEnd = response . getHeaders ( ) . getContentLength ( ) ; } } checkState ( contentChannelEnd > 0 , "contentChannelEnd should be initialized already for '%s'" , resourceIdString ) ; if ( ! gzipEncoded && readOptions . getFadvise ( ) != Fadvise . SEQUENTIAL && contentChannelEnd == size && contentChannelEnd - contentChannelPosition <= readOptions . getMinRangeRequestSize ( ) ) { for ( int retriesCount = 0 ; retriesCount < maxRetries ; retriesCount ++ ) { try { cacheFooter ( response ) ; if ( retriesCount != 0 ) { logger . atInfo ( ) . log ( "Successfully cached footer after %s retries for '%s'" , retriesCount , resourceIdString ) ; } break ; } catch ( IOException e ) { logger . atInfo ( ) . withCause ( e ) . log ( "Failed to prefetch footer (retry #%s/%s) for '%s'" , retriesCount + 1 , maxRetries , resourceIdString ) ; if ( retriesCount == 0 ) { readBackOff . get ( ) . reset ( ) ; } if ( retriesCount == maxRetries ) { resetContentChannel ( ) ; throw e ; } try { response = getObject . executeMedia ( ) ; // TODO ( b / 110832992 ) : validate response range header against expected / request range . } catch ( IOException e1 ) { response = handleExecuteMediaException ( e1 , getObject , shouldRetryWithLiveVersion ( ) ) ; } } } checkState ( footerContent != null , "footerContent should not be null after successful footer prefetch for '%s'" , resourceIdString ) ; resetContentChannel ( ) ; return openFooterStream ( ) ; } try { InputStream contentStream = response . getContent ( ) ; if ( readOptions . getBufferSize ( ) > 0 ) { int bufferSize = readOptions . getBufferSize ( ) ; // limit buffer size to the channel end bufferSize = Math . toIntExact ( Math . min ( bufferSize , contentChannelEnd - contentChannelPosition ) ) ; logger . atFine ( ) . log ( "Opened stream from %d position with %s range, %d bytesToRead" + " and %d bytes buffer for '%s'" , currentPosition , rangeHeader , bytesToRead , bufferSize , resourceIdString ) ; contentStream = new BufferedInputStream ( contentStream , bufferSize ) ; } else { logger . atFine ( ) . log ( "Opened stream from %d position with %s range and %d bytesToRead for '%s'" , currentPosition , rangeHeader , bytesToRead , resourceIdString ) ; } if ( contentChannelPosition < currentPosition ) { long bytesToSkip = currentPosition - contentChannelPosition ; logger . atFine ( ) . log ( "Skipping %d bytes from %d position to %d position for '%s'" , bytesToSkip , contentChannelPosition , currentPosition , resourceIdString ) ; while ( bytesToSkip > 0 ) { long skippedBytes = contentStream . skip ( bytesToSkip ) ; logger . atFine ( ) . log ( "Skipped %d bytes from %d position for '%s'" , skippedBytes , contentChannelPosition , resourceIdString ) ; bytesToSkip -= skippedBytes ; contentChannelPosition += skippedBytes ; } } checkState ( contentChannelPosition == currentPosition , "contentChannelPosition (%s) should be equal to currentPosition (%s) for '%s'" , contentChannelPosition , currentPosition , resourceIdString ) ; return contentStream ; } catch ( IOException e ) { try { response . disconnect ( ) ; } catch ( IOException closeException ) { e . addSuppressed ( closeException ) ; } throw e ; }
public class ExecutorManager { /** * Checks whether the given flow has an active ( running , non - dispatched ) executions { @ inheritDoc } * @ see azkaban . executor . ExecutorManagerAdapter # isFlowRunning ( int , java . lang . String ) */ @ Override public boolean isFlowRunning ( final int projectId , final String flowId ) { } }
boolean isRunning = false ; isRunning = isRunning || isFlowRunningHelper ( projectId , flowId , this . queuedFlows . getAllEntries ( ) ) ; isRunning = isRunning || isFlowRunningHelper ( projectId , flowId , this . runningExecutions . get ( ) . values ( ) ) ; return isRunning ;
public class Util { /** * Closes { @ code serverSocket } , ignoring any checked exceptions . Does nothing if { @ code * serverSocket } is null . */ public static void closeQuietly ( ServerSocket serverSocket ) { } }
if ( serverSocket != null ) { try { serverSocket . close ( ) ; } catch ( RuntimeException rethrown ) { throw rethrown ; } catch ( Exception ignored ) { } }
public class ThriftClientFactory { /** * / * ( non - Javadoc ) * @ see com . impetus . kundera . loader . GenericClientFactory # instantiateClient ( java . lang . String ) */ @ Override protected Client instantiateClient ( String persistenceUnit ) { } }
ConnectionPool pool = getPoolUsingPolicy ( ) ; return new ThriftClient ( this , indexManager , reader , persistenceUnit , pool , externalProperties , kunderaMetadata , timestampGenerator ) ;
public class XmLepResourceService { /** * { @ inheritDoc } */ @ Override public LepResourceDescriptor getResourceDescriptor ( ContextsHolder contextsHolder , LepResourceKey resourceKey ) { } }
Objects . requireNonNull ( resourceKey , "resourceKey can't be null" ) ; Resource scriptResource = getScriptResource ( contextsHolder , resourceKey ) ; if ( ! scriptResource . exists ( ) ) { log . debug ( "No LEP resource for key {}" , resourceKey ) ; return null ; } return getLepResourceDescriptor ( resourceKey , scriptResource ) ;
public class JsonMarshaller { /** * Formats a log level into one of the accepted string representation of a log level . * @ param level log level to format . * @ return log level as a String . */ private String formatLevel ( Event . Level level ) { } }
if ( level == null ) { return null ; } switch ( level ) { case DEBUG : return "debug" ; case FATAL : return "fatal" ; case WARNING : return "warning" ; case INFO : return "info" ; case ERROR : return "error" ; default : logger . error ( "The level '{}' isn't supported, this should NEVER happen, contact Sentry developers" , level . name ( ) ) ; return null ; }
public class DateISO8601Codec { /** * { @ inheritDoc } */ @ Override public Date decode ( String value ) { } }
Calendar calendar = GregorianCalendar . getInstance ( ) ; String s = value . replace ( "Z" , "+00:00" ) ; try { s = s . substring ( 0 , 22 ) + s . substring ( 23 ) ; } catch ( IndexOutOfBoundsException e ) { throw new RuntimeException ( e ) ; } Date date ; try { date = new SimpleDateFormat ( "yyyy-MM-dd'T'HH:mm:ssZ" ) . parse ( s ) ; calendar . setTime ( date ) ; return calendar . getTime ( ) ; } catch ( ParseException e ) { throw new RuntimeException ( e ) ; }
public class DescribeJobDefinitionsRequest { /** * A list of up to 100 job definition names or full Amazon Resource Name ( ARN ) entries . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setJobDefinitions ( java . util . Collection ) } or { @ link # withJobDefinitions ( java . util . Collection ) } if you want * to override the existing values . * @ param jobDefinitions * A list of up to 100 job definition names or full Amazon Resource Name ( ARN ) entries . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeJobDefinitionsRequest withJobDefinitions ( String ... jobDefinitions ) { } }
if ( this . jobDefinitions == null ) { setJobDefinitions ( new java . util . ArrayList < String > ( jobDefinitions . length ) ) ; } for ( String ele : jobDefinitions ) { this . jobDefinitions . add ( ele ) ; } return this ;
public class AnnotationLookup { /** * / * @ Nullable */ public JvmAnnotationReference removeAnnotation ( /* @ NonNull */ JvmAnnotationTarget annotationTarget , /* @ NonNull */ Class < ? extends Annotation > type ) { } }
JvmAnnotationReference result = findAnnotation ( annotationTarget , type ) ; if ( result != null ) { annotationTarget . getAnnotations ( ) . remove ( result ) ; return result ; } return null ;
public class GeometryDeserializer { /** * Parses the JSON as a linestring geometry * @ param coords The coordinates for the linestring , which is a list of coordinates ( which in turn are lists of * two values , x and y ) * @ param crsId * @ return An instance of linestring * @ throws IOException if the given json does not correspond to a linestring or can be parsed as such . */ private LineString asLineString ( List < List > coords , CrsId crsId ) throws IOException { } }
if ( coords == null || coords . size ( ) < 2 ) { throw new IOException ( "A linestring requires a valid series of coordinates (at least two coordinates)" ) ; } PointSequence coordinates = getPointSequence ( coords , crsId ) ; return new LineString ( coordinates ) ;
public class Agg { /** * Get a { @ link Collector } that calculates the < code > LAST < / code > function . * Note that unlike in ( Oracle ) SQL , where the < code > FIRST < / code > function * is an ordered set aggregate function that produces a set of results , this * collector just produces the first value in the order of stream traversal . * For matching behaviour to Oracle ' s [ aggregate function ] KEEP * ( DENSE _ RANK LAST ORDER BY . . . ) , use { @ link # minAll ( Comparator ) } instead . */ public static < T > Collector < T , ? , Optional < T > > last ( ) { } }
return Collectors . reducing ( ( v1 , v2 ) -> v2 ) ;
public class CmsFile { /** * Sets the contents of this file . < p > * This will also set the date content , but only if the content is already set . < p > * @ param value the content of this file */ public void setContents ( byte [ ] value ) { } }
if ( value == null ) { value = new byte [ ] { } ; } long dateContent = System . currentTimeMillis ( ) ; if ( ( m_fileContent == null ) || ( m_fileContent . length == 0 ) ) { dateContent = m_dateContent ; } m_fileContent = new byte [ value . length ] ; System . arraycopy ( value , 0 , m_fileContent , 0 , value . length ) ; if ( m_fileContent . length > 0 ) { m_length = m_fileContent . length ; } else { m_length = 0 ; } m_dateContent = dateContent ;
public class I18nEngine { /** * Formatting Pending notification text * @ param pendingNotification pending notification * @ return formatted notification */ @ ObjectiveCName ( "formatNotificationText:" ) public String formatNotificationText ( Notification pendingNotification ) { } }
return formatContentText ( pendingNotification . getSender ( ) , pendingNotification . getContentDescription ( ) . getContentType ( ) , pendingNotification . getContentDescription ( ) . getText ( ) , pendingNotification . getContentDescription ( ) . getRelatedUser ( ) , pendingNotification . isChannel ( ) ) ;
public class PCA { /** * Return a reduced basis set that covers a certain fraction of the variance of the data * @ param variance The desired fractional variance ( 0 to 1 ) , it will always be greater than the value . * @ return The basis vectors as columns , size < i > N < / i > rows by < i > ndims < / i > columns , where < i > ndims < / i > is less than or equal to < i > N < / i > */ public INDArray reducedBasis ( double variance ) { } }
INDArray vars = Transforms . pow ( eigenvalues , - 0.5 , true ) ; double res = vars . sumNumber ( ) . doubleValue ( ) ; double total = 0.0 ; int ndims = 0 ; for ( int i = 0 ; i < vars . columns ( ) ; i ++ ) { ndims ++ ; total += vars . getDouble ( i ) ; if ( total / res > variance ) break ; } INDArray result = Nd4j . create ( eigenvectors . rows ( ) , ndims ) ; for ( int i = 0 ; i < ndims ; i ++ ) result . putColumn ( i , eigenvectors . getColumn ( i ) ) ; return result ;
public class Util { /** * get the nth element from the path - first is 0. * @ param index of element we want * @ param path from which we extract the element * @ return element or null */ public static String pathElement ( final int index , final String path ) { } }
final String [ ] paths = path . split ( "/" ) ; int idx = index ; if ( ( paths [ 0 ] == null ) || ( paths [ 0 ] . length ( ) == 0 ) ) { // skip empty first part - leading " / " idx ++ ; } if ( idx >= paths . length ) { return null ; } return paths [ idx ] ;
public class dnscnamerec { /** * Use this API to fetch all the dnscnamerec resources that are configured on netscaler . * This uses dnscnamerec _ args which is a way to provide additional arguments while fetching the resources . */ public static dnscnamerec [ ] get ( nitro_service service , dnscnamerec_args args ) throws Exception { } }
dnscnamerec obj = new dnscnamerec ( ) ; options option = new options ( ) ; option . set_args ( nitro_util . object_to_string_withoutquotes ( args ) ) ; dnscnamerec [ ] response = ( dnscnamerec [ ] ) obj . get_resources ( service , option ) ; return response ;
public class BatchListPolicyAttachmentsMarshaller { /** * Marshall the given parameter object . */ public void marshall ( BatchListPolicyAttachments batchListPolicyAttachments , ProtocolMarshaller protocolMarshaller ) { } }
if ( batchListPolicyAttachments == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( batchListPolicyAttachments . getPolicyReference ( ) , POLICYREFERENCE_BINDING ) ; protocolMarshaller . marshall ( batchListPolicyAttachments . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( batchListPolicyAttachments . getMaxResults ( ) , MAXRESULTS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class ZipShort { /** * Helper method to get the value as a java int from two bytes starting at given array offset * @ param bytes the array of bytes * @ param offset the offset to start * @ return the corresponding java int value */ public static int getValue ( byte [ ] bytes , int offset ) { } }
int value = ( bytes [ offset + 1 ] << BYTE_1_SHIFT ) & BYTE_1_MASK ; value += ( bytes [ offset ] & BYTE_MASK ) ; return value ;
public class CloudSchedulerClient { /** * Gets a job . * < p > Sample code : * < pre > < code > * try ( CloudSchedulerClient cloudSchedulerClient = CloudSchedulerClient . create ( ) ) { * JobName name = JobName . of ( " [ PROJECT ] " , " [ LOCATION ] " , " [ JOB ] " ) ; * Job response = cloudSchedulerClient . getJob ( name ) ; * < / code > < / pre > * @ param name Required . * < p > The job name . For example : ` projects / PROJECT _ ID / locations / LOCATION _ ID / jobs / JOB _ ID ` . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ public final Job getJob ( JobName name ) { } }
GetJobRequest request = GetJobRequest . newBuilder ( ) . setName ( name == null ? null : name . toString ( ) ) . build ( ) ; return getJob ( request ) ;
public class SARLQuickfixProvider { /** * Add the fixes with suppress - warning annotations . * @ param issue the issue . * @ param acceptor the resolution acceptor . */ @ Fix ( "*" ) public void fixSuppressWarnings ( Issue issue , IssueResolutionAcceptor acceptor ) { } }
if ( isIgnorable ( issue . getCode ( ) ) ) { SuppressWarningsAddModification . accept ( this , issue , acceptor ) ; }
public class CNFactory { /** * 增加词典 * @ param words * @ param pos */ public static void addDict ( Collection < String > words , String pos ) { } }
for ( String w : words ) { dict . add ( w , pos ) ; } setDict ( ) ;
public class HashUtils { /** * Hashes a string using the SHA - 224 algorithm . * @ since 1.1 * @ param data the string to hash * @ param charset the charset of the string * @ return the SHA - 224 hash of the string * @ throws NoSuchAlgorithmException the algorithm is not supported by existing providers */ public static byte [ ] sha224Hash ( String data , Charset charset ) throws NoSuchAlgorithmException { } }
return sha224Hash ( data . getBytes ( charset ) ) ;