signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ProtobufIDLProxy { /** * Creates the enum classes . * @ param enumTypes the enum types * @ param packageMapping the package mapping * @ param generateSouceOnly the generate souce only * @ param sourceOutputDir the source output dir * @ param compiledClass the compiled class * @ param mappedUniName the mapped uni name * @ param isUniName the is uni name * @ return the list */ private static List < Class < ? > > createEnumClasses ( Map < String , EnumElement > enumTypes , Map < String , String > packageMapping , boolean generateSouceOnly , File sourceOutputDir , Set < String > compiledClass , Map < String , String > mappedUniName , boolean isUniName ) { } }
List < Class < ? > > ret = new ArrayList < Class < ? > > ( ) ; Set < String > enumNames = new HashSet < String > ( ) ; Collection < EnumElement > enums = enumTypes . values ( ) ; for ( EnumElement enumType : enums ) { String name = enumType . name ( ) ; if ( enumNames . contains ( name ) ) { continue ; } enumNames . add ( name ) ; String packageName = packageMapping . get ( name ) ; Class cls = checkClass ( packageName , enumType , mappedUniName , isUniName ) ; if ( cls != null ) { ret . add ( cls ) ; continue ; } CodeDependent codeDependent = createCodeByType ( enumType , true , packageName , mappedUniName , isUniName ) ; compiledClass . add ( codeDependent . name ) ; compiledClass . add ( packageName + PACKAGE_SPLIT_CHAR + codeDependent . name ) ; if ( ! generateSouceOnly ) { Class < ? > newClass = JDKCompilerHelper . getJdkCompiler ( ) . compile ( codeDependent . getClassName ( ) , codeDependent . code , ProtobufIDLProxy . class . getClassLoader ( ) , null , - 1 ) ; ret . add ( newClass ) ; } else { // need to output source code to target path writeSourceCode ( codeDependent , sourceOutputDir ) ; } } return ret ;
public class DefaultRuntimeSpringConfiguration { /** * Initialises the ApplicationContext instance . */ protected void initialiseApplicationContext ( ) { } }
if ( context != null ) { return ; } context = createApplicationContext ( parent ) ; if ( parent != null && classLoader == null ) { trySettingClassLoaderOnContextIfFoundInParent ( parent ) ; } else if ( classLoader != null ) { setClassLoaderOnContext ( classLoader ) ; } Assert . notNull ( context , "ApplicationContext cannot be null" ) ;
public class ExpressionUtil { /** * write out expression without LNT * @ param value * @ param bc * @ param mode * @ throws TransformerException */ public static void writeOutSilent ( Expression value , BytecodeContext bc , int mode ) throws TransformerException { } }
Position start = value . getStart ( ) ; Position end = value . getEnd ( ) ; value . setStart ( null ) ; value . setEnd ( null ) ; value . writeOut ( bc , mode ) ; value . setStart ( start ) ; value . setEnd ( end ) ;
public class UpdateRateBasedRuleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateRateBasedRuleRequest updateRateBasedRuleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateRateBasedRuleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateRateBasedRuleRequest . getRuleId ( ) , RULEID_BINDING ) ; protocolMarshaller . marshall ( updateRateBasedRuleRequest . getChangeToken ( ) , CHANGETOKEN_BINDING ) ; protocolMarshaller . marshall ( updateRateBasedRuleRequest . getUpdates ( ) , UPDATES_BINDING ) ; protocolMarshaller . marshall ( updateRateBasedRuleRequest . getRateLimit ( ) , RATELIMIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PartitionBalanceUtils { /** * Prints the details of cluster xml in various formats . Some information is * repeated in different forms . This is intentional so that it is easy to * find the specific view of the cluster xml that you want . * @ param cluster * @ return pretty string of cluster balance */ public static String verboseClusterDump ( final Cluster cluster ) { } }
StringBuilder builder = new StringBuilder ( ) ; builder . append ( "CLUSTER XML SUMMARY\n" ) ; Map < Integer , Integer > zoneIdToPartitionCount = Maps . newHashMap ( ) ; Map < Integer , Integer > zoneIdToNodeCount = Maps . newHashMap ( ) ; for ( Zone zone : cluster . getZones ( ) ) { zoneIdToPartitionCount . put ( zone . getId ( ) , 0 ) ; zoneIdToNodeCount . put ( zone . getId ( ) , 0 ) ; } for ( Node node : cluster . getNodes ( ) ) { zoneIdToPartitionCount . put ( node . getZoneId ( ) , zoneIdToPartitionCount . get ( node . getZoneId ( ) ) + node . getNumberOfPartitions ( ) ) ; zoneIdToNodeCount . put ( node . getZoneId ( ) , zoneIdToNodeCount . get ( node . getZoneId ( ) ) + 1 ) ; } builder . append ( "\n" ) ; builder . append ( "Number of partitions per zone:\n" ) ; for ( Zone zone : cluster . getZones ( ) ) { builder . append ( "\tZone: " + zone . getId ( ) + " - " + zoneIdToPartitionCount . get ( zone . getId ( ) ) + "\n" ) ; } builder . append ( "\n" ) ; builder . append ( "Number of nodes per zone:\n" ) ; for ( Zone zone : cluster . getZones ( ) ) { builder . append ( "\tZone: " + zone . getId ( ) + " - " + zoneIdToNodeCount . get ( zone . getId ( ) ) + "\n" ) ; } builder . append ( "\n" ) ; builder . append ( "Nodes in each zone:\n" ) ; for ( Zone zone : cluster . getZones ( ) ) { builder . append ( "\tZone: " + zone . getId ( ) + " - " + cluster . getNodeIdsInZone ( zone . getId ( ) ) + "\n" ) ; } builder . append ( "\n" ) ; builder . append ( "Number of partitions per node:\n" ) ; for ( Node node : cluster . getNodes ( ) ) { builder . append ( "\tNode ID: " + node . getId ( ) + " - " + node . getNumberOfPartitions ( ) + " (" + node . getHost ( ) + ")\n" ) ; } builder . append ( "\n" ) ; if ( cluster . getZones ( ) . size ( ) > 1 ) { builder . append ( "ZONE-PARTITION SUMMARY:\n" ) ; builder . append ( "\n" ) ; builder . append ( "Partitions in each zone:\n" ) ; for ( Zone zone : cluster . getZones ( ) ) { builder . append ( "\tZone: " + zone . getId ( ) + " - " + compressedListOfPartitionsInZone ( cluster , zone . getId ( ) ) + "\n" ) ; } builder . append ( "\n" ) ; builder . append ( "Contiguous partition run lengths in each zone ('{run length : count}'):\n" ) ; for ( Zone zone : cluster . getZones ( ) ) { builder . append ( "\tZone: " + zone . getId ( ) + " - " + getPrettyMapOfContiguousPartitionRunLengths ( cluster , zone . getId ( ) ) + "\n" ) ; } builder . append ( "\n" ) ; builder . append ( "The following nodes have hot partitions:\n" ) ; builder . append ( getHotPartitionsDueToContiguity ( cluster , 5 ) ) ; builder . append ( "\n" ) ; } return builder . toString ( ) ;
public class SpiLoadUtil { /** * Method that conveniently turn the { @ code Iterable } s returned by the other methods in this class to a * { @ code List } . * @ see # findServices ( Class ) * @ see # findServices ( Class , ClassLoader ) */ public static < T > List < T > readAllFromIterator ( Iterable < T > findServices ) { } }
List < T > list = new ArrayList < T > ( ) ; for ( T t : findServices ) list . add ( t ) ; return list ;
public class IfcHeaderImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) @ Override public EList < String > getAuthor ( ) { } }
return ( EList < String > ) eGet ( StorePackage . Literals . IFC_HEADER__AUTHOR , true ) ;
public class GridActivity { /** * 刷新数据 */ private void refresh ( ) { } }
mDataList = new ArrayList < > ( ) ; for ( int i = 0 ; i < 100 ; i ++ ) { Group group = new Group ( ) ; group . setName ( "我是爸爸,我的号是 " + i ) ; group . setMemberList ( new ArrayList < GroupMember > ( ) ) ; for ( int j = 0 ; j < 10 ; j ++ ) { GroupMember member = new GroupMember ( ) ; member . setName ( "我是儿子,我的号是 " + j ) ; group . getMemberList ( ) . add ( member ) ; } mDataList . add ( group ) ; } mAdapter . setGroupList ( mDataList ) ; mAdapter . notifyDataSetChanged ( ) ;
public class NotABundleResourceURLConnection { /** * Simple little helper to get a unique string key for any bundle . * Currently uses the string form of the url for META - INF / MANIFEST . MF */ private final static String getManifestKeyForBundle ( Bundle owningBundle ) { } }
URL manifestKeyURL = owningBundle . getEntry ( MANIFESTPATH ) ; if ( manifestKeyURL == null ) { // ' bundle ' did not have a manifest . throw new IllegalArgumentException ( owningBundle . getSymbolicName ( ) ) ; } String manifestKey = manifestKeyURL . toExternalForm ( ) ; return manifestKey ;
public class LLogicalOpDelta { /** * < editor - fold desc = " object " > */ public static boolean argEquals ( LLogicalOpDelta the , Object that ) { } }
return Null . < LLogicalOpDelta > equals ( the , that , ( one , two ) -> { if ( one . getClass ( ) != two . getClass ( ) ) { return false ; } LLogicalOpDelta other = ( LLogicalOpDelta ) two ; return LBiObjBoolTriple . argEquals ( one . function , one . deltaFunction , one . lastValue ( ) , other . function , other . deltaFunction , other . lastValue ( ) ) ; } ) ;
public class SecurityPolicyClient { /** * Deletes the specified policy . * < p > Sample code : * < pre > < code > * try ( SecurityPolicyClient securityPolicyClient = SecurityPolicyClient . create ( ) ) { * ProjectGlobalSecurityPolicyName securityPolicy = ProjectGlobalSecurityPolicyName . of ( " [ PROJECT ] " , " [ SECURITY _ POLICY ] " ) ; * Operation response = securityPolicyClient . deleteSecurityPolicy ( securityPolicy ) ; * < / code > < / pre > * @ param securityPolicy Name of the security policy to delete . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation deleteSecurityPolicy ( ProjectGlobalSecurityPolicyName securityPolicy ) { } }
DeleteSecurityPolicyHttpRequest request = DeleteSecurityPolicyHttpRequest . newBuilder ( ) . setSecurityPolicy ( securityPolicy == null ? null : securityPolicy . toString ( ) ) . build ( ) ; return deleteSecurityPolicy ( request ) ;
public class CSSReader { /** * Read the CSS from the passed String using a character stream . An eventually * contained < code > @ charset < / code > rule is ignored . * @ param sCSS * The source string containing the CSS to be parsed . May not be * < code > null < / code > . * @ param eVersion * The CSS version to use . May not be < code > null < / code > . * @ param aCustomErrorHandler * An optional custom error handler that can be used to collect the * recoverable parsing errors . May be < code > null < / code > . * @ return < code > null < / code > if reading failed , the CSS declarations * otherwise . * @ since 3.7.3 */ @ Nullable public static CascadingStyleSheet readFromString ( @ Nonnull final String sCSS , @ Nonnull final ECSSVersion eVersion , @ Nullable final ICSSParseErrorHandler aCustomErrorHandler ) { } }
return readFromStringReader ( sCSS , new CSSReaderSettings ( ) . setCSSVersion ( eVersion ) . setCustomErrorHandler ( aCustomErrorHandler ) ) ;
public class PredicateExpression { /** * Matches an array value if it contains all the elements of the argument array * @ param rhs The arguments * @ return PredicateExpression : $ all rhs */ public static PredicateExpression all ( Object ... rhs ) { } }
PredicateExpression ex = new PredicateExpression ( "$all" , rhs ) ; if ( rhs . length == 1 ) { ex . single = true ; } return ex ;
public class ClassLocator { /** * Checks whether the " otherclass " is a subclass of the given " superclass " . * @ param superclass the superclass to check against * @ param otherclass this class is checked whether it is a subclass * of the the superclass * @ return TRUE if " otherclass " is a true subclass */ public static boolean isSubclass ( String superclass , String otherclass ) { } }
String key ; key = superclass + "-" + otherclass ; if ( m_CheckSubClass . containsKey ( key ) ) return m_CheckSubClass . get ( key ) ; try { return isSubclass ( Class . forName ( superclass ) , Class . forName ( otherclass ) ) ; } catch ( Throwable t ) { return false ; }
public class DateArrayProperty { /** * - - - - - private methods - - - - - */ private Date [ ] convertLongArrayToDateArray ( final Long [ ] source ) { } }
final ArrayList < Date > result = new ArrayList < > ( ) ; for ( final Long o : source ) { result . add ( new Date ( o ) ) ; } return ( Date [ ] ) result . toArray ( new Date [ result . size ( ) ] ) ;
public class JDBCResultSetMetaData { /** * < ! - - start generic documentation - - > * Gets the designated column ' s number of digits to right of the decimal point . * 0 is returned for data types where the scale is not applicable . * < ! - - end generic documentation - - > * < ! - - start Release - specific documentation - - > * < div class = " ReleaseSpecificDocumentation " > * < h3 > HSQLDB - Specific Information : < / h3 > < p > * Starting with 1.8.0 , HSQLDB reports the declared * scale for table columns . < p > * From 1.9.0 , HSQLDB , reports the correct scale for * computed columns according to the SQL Standard . < p > * < pre > * sql . enforce _ strict _ size * < / pre > * For datetime and interval types such as Timestamp or Time , the * fractional second precision is reported . * < / div > * < ! - - end release - specific documentation - - > * @ param column the first column is 1 , the second is 2 , . . . * @ return scale * @ exception SQLException if a database access error occurs */ public int getScale ( int column ) throws SQLException { } }
Type type = resultMetaData . columnTypes [ -- column ] ; return type . scale ;
public class XLogger { /** * Log an exception being caught . The generated log event uses Level ERROR . * @ param throwable * the exception being caught . */ public void catching ( Throwable throwable ) { } }
if ( instanceofLAL ) { ( ( LocationAwareLogger ) logger ) . log ( CATCHING_MARKER , FQCN , LocationAwareLogger . ERROR_INT , "catching" , null , throwable ) ; }
public class ProcessExecutor { /** * Handles the process ' standard error stream . * @ param stderr The error stream reader . * @ throws IOException If reading stderr failed . */ protected void handleError ( InputStream stderr ) throws IOException { } }
byte [ ] buffer = new byte [ 4 * 1024 ] ; int b ; while ( ( b = stderr . read ( buffer ) ) > 0 ) { err . write ( buffer , 0 , b ) ; }
public class dnsview_gslbservice_binding { /** * Use this API to fetch filtered set of dnsview _ gslbservice _ binding resources . * filter string should be in JSON format . eg : " port : 80 , servicetype : HTTP " . */ public static dnsview_gslbservice_binding [ ] get_filtered ( nitro_service service , String viewname , String filter ) throws Exception { } }
dnsview_gslbservice_binding obj = new dnsview_gslbservice_binding ( ) ; obj . set_viewname ( viewname ) ; options option = new options ( ) ; option . set_filter ( filter ) ; dnsview_gslbservice_binding [ ] response = ( dnsview_gslbservice_binding [ ] ) obj . getfiltered ( service , option ) ; return response ;
public class PlanNode { /** * Set the node ' s value for the supplied property . * @ param propertyId the property identifier * @ param value the value , or null if the property is to be removed * @ return the previous value that was overwritten by this call , or null if there was prior value */ public Object setProperty ( Property propertyId , Object value ) { } }
if ( value == null ) { // Removing this property . . . return nodeProperties != null ? nodeProperties . remove ( propertyId ) : null ; } // Otherwise , we ' re adding the property if ( nodeProperties == null ) nodeProperties = new TreeMap < Property , Object > ( ) ; return nodeProperties . put ( propertyId , value ) ;
public class BatchTransactionServiceImpl { /** * ( non - Javadoc ) * @ see * com . ibm . jbatch . container . services . IBatchServiceBase # init ( com . ibm . batch * . container . IBatchConfig ) */ @ Override public void init ( IBatchConfig batchConfig ) throws BatchContainerServiceException { } }
logger . entering ( CLASSNAME , "init" , batchConfig ) ; this . batchConfig = batchConfig ; logger . exiting ( CLASSNAME , "init" ) ;
public class Cypher { /** * TODO : add in alpha06 * @ Context * ProcedureTransaction procedureTransaction ; */ @ Procedure @ Description ( "apoc.cypher.run(fragment, params) yield value - executes reading fragment with the given parameters" ) public Stream < MapResult > run ( @ Name ( "cypher" ) String statement , @ Name ( "params" ) Map < String , Object > params ) { } }
if ( params == null ) params = Collections . emptyMap ( ) ; return db . execute ( withParamMapping ( statement , params . keySet ( ) ) , params ) . stream ( ) . map ( MapResult :: new ) ;
public class CoreJBossASClient { /** * Adds a new subsystem ( along with some optional settings for that subsystem ) to the core system * If settings is null , the subsystem will be created with no additional settings associated with it . * @ param name the name of the new subsystem * @ param settings a node with some additional settings used to initialize the subsystem * @ throws Exception any error */ public void addSubsystem ( String name , ModelNode settings ) throws Exception { } }
// / subsystem = < name > : add ( ) final ModelNode request = createRequest ( ADD , Address . root ( ) . add ( SUBSYSTEM , name ) , settings ) ; final ModelNode response = execute ( request ) ; if ( ! isSuccess ( response ) ) { throw new FailureException ( response , "Failed to add new subsystem [" + name + "]" ) ; } return ;
public class DataSetUtils { /** * < b > showDataSet < / b > < br > * public void showDataSet ( int mtLv , String itemCode , DataSet ds , < br > * int in _ Digits , int ot _ Digits , int r _ End _ I , int c _ End _ I ) < br > * Shows content of DataSet . < br > * @ param mtLv - method level * @ param itemCode - item = DataSet * @ param ds - DataSet * @ param in _ Digits - input digits * @ param ot _ Digits - output digits * @ param r _ End _ I - rows end index * @ param c _ End _ I - columns end index */ public void showDataSet ( int mtLv , String itemCode , DataSet ds , int in_Digits , int ot_Digits , int r_End_I , int c_End_I ) { } }
mtLv ++ ; String oinfo = "" ; String methodName = moduleCode + "." + "showDataSet" ; if ( ds == null ) { oinfo = "" ; oinfo += BTools . getMtLvESS ( mtLv ) ; oinfo += methodName + ": " ; oinfo += "\"" + itemCode + "\": " ; oinfo += " == null !!!; " ; oinfo += BTools . getSLcDtTm ( ) ; sis . info ( oinfo ) ; return ; } oinfo = "" ; oinfo += BTools . getMtLvESS ( mtLv ) ; oinfo += methodName + ": " ; oinfo += "\"" + itemCode + "\": " ; oinfo += "in_Digits: " + in_Digits + "; " ; oinfo += "ot_Digits: " + ot_Digits + "; " ; sis . info ( oinfo ) ; oinfo = "" ; oinfo += BTools . getMtLvESS ( mtLv ) ; oinfo += BTools . getMtLvISS ( ) ; oinfo += "r_End_I: " + r_End_I + "; " ; oinfo += "c_End_I: " + c_End_I + "; " ; oinfo += BTools . getSLcDtTm ( ) ; sis . info ( oinfo ) ; oinfo = "" ; oinfo += BTools . getMtLvESS ( mtLv ) ; oinfo += BTools . getMtLvISS ( ) ; oinfo += "ds: " ; oinfo += ".numInputs: " + ds . numInputs ( ) + "; " ; oinfo += ".numOutcomes: " + ds . numOutcomes ( ) + "; " ; oinfo += ".numExamples: " + ds . numExamples ( ) + "; " ; oinfo += ".hasMaskArrays: " + BTools . getSBln ( ds . hasMaskArrays ( ) ) + "; " ; sis . info ( oinfo ) ; if ( in_Digits < 0 ) in_Digits = 0 ; if ( ot_Digits < 0 ) ot_Digits = 0 ; INDArray in_INDA ; // I = Input INDArray ot_INDA ; // O = Output in_INDA = ds . getFeatures ( ) ; ot_INDA = ds . getLabels ( ) ; oinfo = "" ; oinfo += BTools . getMtLvESS ( mtLv ) ; oinfo += BTools . getMtLvISS ( ) ; oinfo += "in_INDA: " ; oinfo += ".rows: " + in_INDA . rows ( ) + "; " ; oinfo += ".columns: " + in_INDA . columns ( ) + "; " ; oinfo += ".rank: " + in_INDA . rank ( ) + "; " ; oinfo += ".shape: " + BTools . getSIntA ( ArrayUtil . toInts ( in_INDA . shape ( ) ) ) + "; " ; oinfo += ".length: " + in_INDA . length ( ) + "; " ; oinfo += ".size( 0 ): " + in_INDA . size ( 0 ) + "; " ; sis . info ( oinfo ) ; if ( ot_INDA != null ) { oinfo = "" ; oinfo += BTools . getMtLvESS ( mtLv ) ; oinfo += BTools . getMtLvISS ( ) ; oinfo += "ot_INDA: " ; oinfo += ".rows: " + ot_INDA . rows ( ) + "; " ; oinfo += ".columns: " + ot_INDA . columns ( ) + "; " ; oinfo += ".rank: " + ot_INDA . rank ( ) + "; " ; oinfo += ".shape: " + BTools . getSIntA ( ArrayUtil . toInts ( ot_INDA . shape ( ) ) ) + "; " ; oinfo += ".length: " + ot_INDA . length ( ) + "; " ; oinfo += ".size( 0 ): " + ot_INDA . size ( 0 ) + "; " ; sis . info ( oinfo ) ; } else { oinfo = "" ; oinfo += BTools . getMtLvESS ( mtLv ) ; oinfo += BTools . getMtLvISS ( ) ; oinfo += "ot_INDA == null ! " ; sis . info ( oinfo ) ; } if ( in_INDA . rows ( ) != ot_INDA . rows ( ) ) { oinfo = "===" ; oinfo += methodName + ": " ; oinfo += "in_INDA.rows() != ot_INDA.rows() !!! ; " ; oinfo += BTools . getSLcDtTm ( ) ; sis . info ( oinfo ) ; return ; } boolean wasShownTitle = false ; InfoLine il ; InfoValues iv ; double j_Dbl = - 1 ; // FIXME : int cast int i_CharsCount = BTools . getIndexCharsCount ( ( int ) in_INDA . rows ( ) - 1 ) ; oinfo = "" ; oinfo += BTools . getMtLvESS ( mtLv ) ; oinfo += BTools . getMtLvISS ( ) ; oinfo += "Data: j: IN->I0; " ; sis . info ( oinfo ) ; for ( int i = 0 ; i < in_INDA . rows ( ) ; i ++ ) { if ( i > r_End_I ) break ; il = new InfoLine ( ) ; iv = new InfoValues ( "i" , "" ) ; il . ivL . add ( iv ) ; iv . vsL . add ( BTools . getSInt ( i , i_CharsCount ) ) ; iv = new InfoValues ( "" , "" , "" ) ; il . ivL . add ( iv ) ; iv . vsL . add ( "" ) ; int c_I = 0 ; for ( int j = ( int ) in_INDA . columns ( ) - 1 ; j >= 0 ; j -- ) { if ( c_I > c_End_I ) break ; j_Dbl = in_INDA . getDouble ( i , j ) ; iv = new InfoValues ( "In" , "j" , BTools . getSInt ( j ) ) ; il . ivL . add ( iv ) ; iv . vsL . add ( BTools . getSDbl ( j_Dbl , in_Digits , true , in_Digits + 4 ) ) ; c_I ++ ; } iv = new InfoValues ( "" , "" , "" ) ; il . ivL . add ( iv ) ; iv . vsL . add ( "" ) ; c_I = 0 ; if ( ot_INDA != null ) { // FIXME : int cast for ( int j = ( int ) ot_INDA . columns ( ) - 1 ; j >= 0 ; j -- ) { if ( c_I > c_End_I ) break ; j_Dbl = ot_INDA . getDouble ( i , j ) ; iv = new InfoValues ( "Ot" , "j" , BTools . getSInt ( j ) ) ; il . ivL . add ( iv ) ; iv . vsL . add ( BTools . getSDbl ( j_Dbl , ot_Digits , true , ot_Digits + 4 ) ) ; c_I ++ ; } } if ( ! wasShownTitle ) { oinfo = il . getTitleLine ( mtLv , 0 ) ; sis . info ( oinfo ) ; oinfo = il . getTitleLine ( mtLv , 1 ) ; sis . info ( oinfo ) ; oinfo = il . getTitleLine ( mtLv , 2 ) ; sis . info ( oinfo ) ; // oinfo = il . getTitleLine ( mtLv , 3 ) ; sis . info ( oinfo ) ; // oinfo = il . getTitleLine ( mtLv , 4 ) ; sis . info ( oinfo ) ; wasShownTitle = true ; } oinfo = il . getValuesLine ( mtLv ) ; sis . info ( oinfo ) ; }
public class AccountManager { /** * Creates a new account using the specified username and password . The server may * require a number of extra account attributes such as an email address and phone * number . In that case , Smack will attempt to automatically set all required * attributes with blank values , which may or may not be accepted by the server . * Therefore , it ' s recommended to check the required account attributes and to let * the end - user populate them with real values instead . * @ param username the username . * @ param password the password . * @ throws XMPPErrorException * @ throws NoResponseException * @ throws NotConnectedException * @ throws InterruptedException */ public void createAccount ( Localpart username , String password ) throws NoResponseException , XMPPErrorException , NotConnectedException , InterruptedException { } }
// Create a map for all the required attributes , but give them blank values . Map < String , String > attributes = new HashMap < > ( ) ; for ( String attributeName : getAccountAttributes ( ) ) { attributes . put ( attributeName , "" ) ; } createAccount ( username , password , attributes ) ;
public class GeometryUtil { /** * Returns the dimension of a molecule ( width / height ) . * @ param atomCon of which the dimension should be returned * @ return array containing { width , height } */ public static double [ ] get2DDimension ( IAtomContainer atomCon ) { } }
double [ ] minmax = getMinMax ( atomCon ) ; double maxX = minmax [ 2 ] ; double maxY = minmax [ 3 ] ; double minX = minmax [ 0 ] ; double minY = minmax [ 1 ] ; return new double [ ] { maxX - minX , maxY - minY } ;
public class Smarts2MQLVisitor { /** * logical bonds */ public Object visit ( ASTImplicitHighAndBond node , Object data ) { } }
return node . jjtGetChild ( 0 ) . jjtAccept ( this , data ) ;
public class OWLLiteralImplFloat_CustomFieldSerializer { /** * Deserializes the content of the object from the * { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } . * @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the * object ' s content from * @ param instance the object instance to deserialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the deserialization operation is not * successful */ @ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLLiteralImplFloat instance ) throws SerializationException { } }
deserialize ( streamReader , instance ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcLightSourceDirectional ( ) { } }
if ( ifcLightSourceDirectionalEClass == null ) { ifcLightSourceDirectionalEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 346 ) ; } return ifcLightSourceDirectionalEClass ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcLibraryInformation ( ) { } }
if ( ifcLibraryInformationEClass == null ) { ifcLibraryInformationEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 338 ) ; } return ifcLibraryInformationEClass ;
public class UCaseProps { /** * Get the full lowercase mapping for c . * @ param c Character to be mapped . * @ param iter Character iterator , used for context - sensitive mappings . * See ContextIterator for details . * If iter = = null then a context - independent result is returned . * @ param out If the mapping result is a string , then it is appended to out . * @ param caseLocale Case locale value from ucase _ getCaseLocale ( ) . * @ return Output code point or string length , see MAX _ STRING _ LENGTH . * @ see ContextIterator * @ see # MAX _ STRING _ LENGTH * @ hide draft / provisional / internal are hidden on Android */ public final int toFullLower ( int c , ContextIterator iter , Appendable out , int caseLocale ) { } }
int result , props ; result = c ; props = trie . get ( c ) ; if ( ! propsHasException ( props ) ) { if ( getTypeFromProps ( props ) >= UPPER ) { result = c + getDelta ( props ) ; } } else { int excOffset = getExceptionsOffset ( props ) , excOffset2 ; int excWord = exceptions . charAt ( excOffset ++ ) ; int full ; excOffset2 = excOffset ; if ( ( excWord & EXC_CONDITIONAL_SPECIAL ) != 0 ) { /* use hardcoded conditions and mappings */ /* * Test for conditional mappings first * ( otherwise the unconditional default mappings are always taken ) , * then test for characters that have unconditional mappings in SpecialCasing . txt , * then get the UnicodeData . txt mappings . */ if ( caseLocale == LOC_LITHUANIAN && /* base characters , find accents above */ ( ( ( c == 0x49 || c == 0x4a || c == 0x12e ) && isFollowedByMoreAbove ( iter ) ) || /* precomposed with accent above , no need to find one */ ( c == 0xcc || c == 0xcd || c == 0x128 ) ) ) { /* # Lithuanian # Lithuanian retains the dot in a lowercase i when followed by accents . # Introduce an explicit dot above when lowercasing capital I ' s and J ' s # whenever there are more accents above . # ( of the accents used in Lithuanian : grave , acute , tilde above , and ogonek ) 0049 ; 0069 0307 ; 0049 ; 0049 ; lt More _ Above ; # LATIN CAPITAL LETTER I 004A ; 006A 0307 ; 004A ; 004A ; lt More _ Above ; # LATIN CAPITAL LETTER J 012E ; 012F 0307 ; 012E ; 012E ; lt More _ Above ; # LATIN CAPITAL LETTER I WITH OGONEK 00CC ; 0069 0307 0300 ; 00CC ; 00CC ; lt ; # LATIN CAPITAL LETTER I WITH GRAVE 00CD ; 0069 0307 0301 ; 00CD ; 00CD ; lt ; # LATIN CAPITAL LETTER I WITH ACUTE 0128 ; 0069 0307 0303 ; 0128 ; 0128 ; lt ; # LATIN CAPITAL LETTER I WITH TILDE */ try { switch ( c ) { case 0x49 : /* LATIN CAPITAL LETTER I */ out . append ( iDot ) ; return 2 ; case 0x4a : /* LATIN CAPITAL LETTER J */ out . append ( jDot ) ; return 2 ; case 0x12e : /* LATIN CAPITAL LETTER I WITH OGONEK */ out . append ( iOgonekDot ) ; return 2 ; case 0xcc : /* LATIN CAPITAL LETTER I WITH GRAVE */ out . append ( iDotGrave ) ; return 3 ; case 0xcd : /* LATIN CAPITAL LETTER I WITH ACUTE */ out . append ( iDotAcute ) ; return 3 ; case 0x128 : /* LATIN CAPITAL LETTER I WITH TILDE */ out . append ( iDotTilde ) ; return 3 ; default : return 0 ; /* will not occur */ } } catch ( IOException e ) { throw new ICUUncheckedIOException ( e ) ; } /* # Turkish and Azeri */ } else if ( caseLocale == LOC_TURKISH && c == 0x130 ) { /* # I and i - dotless ; I - dot and i are case pairs in Turkish and Azeri # The following rules handle those cases . 0130 ; 0069 ; 0130 ; 0130 ; tr # LATIN CAPITAL LETTER I WITH DOT ABOVE 0130 ; 0069 ; 0130 ; 0130 ; az # LATIN CAPITAL LETTER I WITH DOT ABOVE */ return 0x69 ; } else if ( caseLocale == LOC_TURKISH && c == 0x307 && isPrecededBy_I ( iter ) ) { /* # When lowercasing , remove dot _ above in the sequence I + dot _ above , which will turn into i . # This matches the behavior of the canonically equivalent I - dot _ above 0307 ; ; 0307 ; 0307 ; tr After _ I ; # COMBINING DOT ABOVE 0307 ; ; 0307 ; 0307 ; az After _ I ; # COMBINING DOT ABOVE */ return 0 ; /* remove the dot ( continue without output ) */ } else if ( caseLocale == LOC_TURKISH && c == 0x49 && ! isFollowedByDotAbove ( iter ) ) { /* # When lowercasing , unless an I is before a dot _ above , it turns into a dotless i . 0049 ; 0131 ; 0049 ; 0049 ; tr Not _ Before _ Dot ; # LATIN CAPITAL LETTER I 0049 ; 0131 ; 0049 ; 0049 ; az Not _ Before _ Dot ; # LATIN CAPITAL LETTER I */ return 0x131 ; } else if ( c == 0x130 ) { /* # Preserve canonical equivalence for I with dot . Turkic is handled below . 0130 ; 0069 0307 ; 0130 ; 0130 ; # LATIN CAPITAL LETTER I WITH DOT ABOVE */ try { out . append ( iDot ) ; return 2 ; } catch ( IOException e ) { throw new ICUUncheckedIOException ( e ) ; } } else if ( c == 0x3a3 && ! isFollowedByCasedLetter ( iter , 1 ) && isFollowedByCasedLetter ( iter , - 1 ) /* -1 = preceded */ ) { /* greek capital sigma maps depending on surrounding cased letters ( see SpecialCasing . txt ) */ /* # Special case for final form of sigma 03A3 ; 03C2 ; 03A3 ; 03A3 ; Final _ Sigma ; # GREEK CAPITAL LETTER SIGMA */ return 0x3c2 ; /* greek small final sigma */ } else { /* no known conditional special case mapping , use a normal mapping */ } } else if ( hasSlot ( excWord , EXC_FULL_MAPPINGS ) ) { long value = getSlotValueAndOffset ( excWord , EXC_FULL_MAPPINGS , excOffset ) ; full = ( int ) value & FULL_LOWER ; if ( full != 0 ) { /* start of full case mapping strings */ excOffset = ( int ) ( value >> 32 ) + 1 ; try { // append the lowercase mapping out . append ( exceptions , excOffset , excOffset + full ) ; /* return the string length */ return full ; } catch ( IOException e ) { throw new ICUUncheckedIOException ( e ) ; } } } if ( hasSlot ( excWord , EXC_LOWER ) ) { result = getSlotValue ( excWord , EXC_LOWER , excOffset2 ) ; } } return ( result == c ) ? ~ result : result ;
public class SessionDataManager { /** * { @ inheritDoc } */ public int getChildNodesCount ( NodeData parent ) throws RepositoryException { } }
int childsCount = changesLog . getChildNodesCount ( parent . getIdentifier ( ) ) + ( isNew ( parent . getIdentifier ( ) ) ? 0 : transactionableManager . getChildNodesCount ( parent ) ) ; if ( childsCount < 0 ) { throw new InvalidItemStateException ( "Node's child nodes were changed in another Session " + parent . getQPath ( ) . getAsString ( ) ) ; } return childsCount ;
public class IdentityRateProvider { /** * ( non - Javadoc ) * @ see * javax . money . convert . ExchangeRateProvider # getReversed ( javax . money . convert * . ExchangeRate ) */ @ Override public ExchangeRate getReversed ( ExchangeRate rate ) { } }
if ( rate . getContext ( ) . getProviderName ( ) . equals ( CONTEXT . getProviderName ( ) ) ) { return new ExchangeRateBuilder ( rate . getContext ( ) ) . setTerm ( rate . getBaseCurrency ( ) ) . setBase ( rate . getCurrency ( ) ) . setFactor ( new DefaultNumberValue ( BigDecimal . ONE ) ) . build ( ) ; } return null ;
public class InternalXtypeParser { /** * InternalXtype . g : 264:1 : ruleJvmParameterizedTypeReference returns [ EObject current = null ] : ( ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? ) ; */ public final EObject ruleJvmParameterizedTypeReference ( ) throws RecognitionException { } }
EObject current = null ; Token otherlv_1 = null ; Token otherlv_3 = null ; Token otherlv_5 = null ; Token otherlv_7 = null ; Token otherlv_9 = null ; Token otherlv_11 = null ; Token otherlv_13 = null ; EObject lv_arguments_2_0 = null ; EObject lv_arguments_4_0 = null ; EObject lv_arguments_10_0 = null ; EObject lv_arguments_12_0 = null ; enterRule ( ) ; try { // InternalXtype . g : 270:2 : ( ( ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? ) ) // InternalXtype . g : 271:2 : ( ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? ) { // InternalXtype . g : 271:2 : ( ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? ) // InternalXtype . g : 272:3 : ( ( ruleQualifiedName ) ) ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? { // InternalXtype . g : 272:3 : ( ( ruleQualifiedName ) ) // InternalXtype . g : 273:4 : ( ruleQualifiedName ) { // InternalXtype . g : 273:4 : ( ruleQualifiedName ) // InternalXtype . g : 274:5 : ruleQualifiedName { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getTypeJvmTypeCrossReference_0_0 ( ) ) ; } pushFollow ( FOLLOW_9 ) ; ruleQualifiedName ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } // InternalXtype . g : 288:3 : ( ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * ) ? int alt10 = 2 ; int LA10_0 = input . LA ( 1 ) ; if ( ( LA10_0 == 16 ) && ( synpred2_InternalXtype ( ) ) ) { alt10 = 1 ; } switch ( alt10 ) { case 1 : // InternalXtype . g : 289:4 : ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 5 = ' > ' ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * { // InternalXtype . g : 289:4 : ( ( ' < ' ) = > otherlv _ 1 = ' < ' ) // InternalXtype . g : 290:5 : ( ' < ' ) = > otherlv _ 1 = ' < ' { otherlv_1 = ( Token ) match ( input , 16 , FOLLOW_10 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_1 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getLessThanSignKeyword_1_0 ( ) ) ; } } // InternalXtype . g : 296:4 : ( ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) ) // InternalXtype . g : 297:5 : ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) { // InternalXtype . g : 297:5 : ( lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference ) // InternalXtype . g : 298:6 : lv _ arguments _ 2_0 = ruleJvmArgumentTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_1_0 ( ) ) ; } pushFollow ( FOLLOW_11 ) ; lv_arguments_2_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } add ( current , "arguments" , lv_arguments_2_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXtype . g : 315:4 : ( otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) ) * loop6 : do { int alt6 = 2 ; int LA6_0 = input . LA ( 1 ) ; if ( ( LA6_0 == 13 ) ) { alt6 = 1 ; } switch ( alt6 ) { case 1 : // InternalXtype . g : 316:5 : otherlv _ 3 = ' , ' ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) { otherlv_3 = ( Token ) match ( input , 13 , FOLLOW_10 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_3 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getCommaKeyword_1_2_0 ( ) ) ; } // InternalXtype . g : 320:5 : ( ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) ) // InternalXtype . g : 321:6 : ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) { // InternalXtype . g : 321:6 : ( lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference ) // InternalXtype . g : 322:7 : lv _ arguments _ 4_0 = ruleJvmArgumentTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_11 ) ; lv_arguments_4_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } add ( current , "arguments" , lv_arguments_4_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop6 ; } } while ( true ) ; otherlv_5 = ( Token ) match ( input , 17 , FOLLOW_12 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_5 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getGreaterThanSignKeyword_1_3 ( ) ) ; } // InternalXtype . g : 344:4 : ( ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? ) * loop9 : do { int alt9 = 2 ; int LA9_0 = input . LA ( 1 ) ; if ( ( LA9_0 == 18 ) && ( synpred3_InternalXtype ( ) ) ) { alt9 = 1 ; } switch ( alt9 ) { case 1 : // InternalXtype . g : 345:5 : ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) ( ( ruleValidID ) ) ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? { // InternalXtype . g : 345:5 : ( ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) ) // InternalXtype . g : 346:6 : ( ( ( ) ' . ' ) ) = > ( ( ) otherlv _ 7 = ' . ' ) { // InternalXtype . g : 352:6 : ( ( ) otherlv _ 7 = ' . ' ) // InternalXtype . g : 353:7 : ( ) otherlv _ 7 = ' . ' { // InternalXtype . g : 353:7 : ( ) // InternalXtype . g : 354:8: { if ( state . backtracking == 0 ) { current = forceCreateModelElementAndSet ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getJvmInnerTypeReferenceOuterAction_1_4_0_0_0 ( ) , current ) ; } } otherlv_7 = ( Token ) match ( input , 18 , FOLLOW_13 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_7 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getFullStopKeyword_1_4_0_0_1 ( ) ) ; } } } // InternalXtype . g : 366:5 : ( ( ruleValidID ) ) // InternalXtype . g : 367:6 : ( ruleValidID ) { // InternalXtype . g : 367:6 : ( ruleValidID ) // InternalXtype . g : 368:7 : ruleValidID { if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElement ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } } if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getTypeJvmTypeCrossReference_1_4_1_0 ( ) ) ; } pushFollow ( FOLLOW_14 ) ; ruleValidID ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { afterParserOrEnumRuleCall ( ) ; } } } // InternalXtype . g : 382:5 : ( ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' ) ? int alt8 = 2 ; int LA8_0 = input . LA ( 1 ) ; if ( ( LA8_0 == 16 ) && ( synpred4_InternalXtype ( ) ) ) { alt8 = 1 ; } switch ( alt8 ) { case 1 : // InternalXtype . g : 383:6 : ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * otherlv _ 13 = ' > ' { // InternalXtype . g : 383:6 : ( ( ' < ' ) = > otherlv _ 9 = ' < ' ) // InternalXtype . g : 384:7 : ( ' < ' ) = > otherlv _ 9 = ' < ' { otherlv_9 = ( Token ) match ( input , 16 , FOLLOW_10 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_9 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getLessThanSignKeyword_1_4_2_0 ( ) ) ; } } // InternalXtype . g : 390:6 : ( ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) ) // InternalXtype . g : 391:7 : ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) { // InternalXtype . g : 391:7 : ( lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference ) // InternalXtype . g : 392:8 : lv _ arguments _ 10_0 = ruleJvmArgumentTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_4_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_11 ) ; lv_arguments_10_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } add ( current , "arguments" , lv_arguments_10_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } // InternalXtype . g : 409:6 : ( otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) ) * loop7 : do { int alt7 = 2 ; int LA7_0 = input . LA ( 1 ) ; if ( ( LA7_0 == 13 ) ) { alt7 = 1 ; } switch ( alt7 ) { case 1 : // InternalXtype . g : 410:7 : otherlv _ 11 = ' , ' ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) { otherlv_11 = ( Token ) match ( input , 13 , FOLLOW_10 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_11 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getCommaKeyword_1_4_2_2_0 ( ) ) ; } // InternalXtype . g : 414:7 : ( ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) ) // InternalXtype . g : 415:8 : ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) { // InternalXtype . g : 415:8 : ( lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference ) // InternalXtype . g : 416:9 : lv _ arguments _ 12_0 = ruleJvmArgumentTypeReference { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getArgumentsJvmArgumentTypeReferenceParserRuleCall_1_4_2_2_1_0 ( ) ) ; } pushFollow ( FOLLOW_11 ) ; lv_arguments_12_0 = ruleJvmArgumentTypeReference ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { if ( current == null ) { current = createModelElementForParent ( grammarAccess . getJvmParameterizedTypeReferenceRule ( ) ) ; } add ( current , "arguments" , lv_arguments_12_0 , "org.eclipse.xtext.xbase.Xtype.JvmArgumentTypeReference" ) ; afterParserOrEnumRuleCall ( ) ; } } } } break ; default : break loop7 ; } } while ( true ) ; otherlv_13 = ( Token ) match ( input , 17 , FOLLOW_12 ) ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { newLeafNode ( otherlv_13 , grammarAccess . getJvmParameterizedTypeReferenceAccess ( ) . getGreaterThanSignKeyword_1_4_2_3 ( ) ) ; } } break ; } } break ; default : break loop9 ; } } while ( true ) ; } break ; } } } if ( state . backtracking == 0 ) { leaveRule ( ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class XmlPrintStream { /** * Open an XML element with the given name , and attributes . A call to closeElement ( ) will output * the appropriate XML closing tag . This class remembers the tag names . * The String parameters are taken to be alternatively names and values . Any odd value * at the end of the list is added as a valueless attribute . * @ param name Name of the element . * @ param attributes Attributes in name value pairs . */ public void openElement ( String name , String ... attributes ) { } }
elementStack . push ( name ) ; startElement ( name , attributes ) ; println ( ">" ) ;
public class UdpClient { /** * Inject default attribute to the future child { @ link Channel } connections . They * will be available via { @ link Channel # attr ( AttributeKey ) } . * @ param key the attribute key * @ param value the attribute value * @ param < T > the attribute type * @ return a new { @ link UdpClient } * @ see Bootstrap # attr ( AttributeKey , Object ) */ public final < T > UdpClient attr ( AttributeKey < T > key , T value ) { } }
Objects . requireNonNull ( key , "key" ) ; Objects . requireNonNull ( value , "value" ) ; return bootstrap ( b -> b . attr ( key , value ) ) ;
public class FailOverWrapper { /** * Causes the executing thread to pause for a period of time . * @ param time in ms */ private static void pause ( final long time ) { } }
final long startTime = System . currentTimeMillis ( ) ; do { try { final long sleepTime = time - ( System . currentTimeMillis ( ) - startTime ) ; Thread . sleep ( sleepTime > 0 ? sleepTime : 10 ) ; } catch ( InterruptedException e ) { // don ' t care } } while ( ( System . currentTimeMillis ( ) - startTime ) < time ) ;
public class CmsOrgUnitsAdminList { /** * Performs a forward to the overview of the single organizational unit the current user * is allowed to administrate . < p > * @ throws ServletException in case of errors during forwarding * @ throws IOException in case of errors during forwarding * @ throws CmsException in case of errors during getting orgunits */ public void forwardToSingleAdminOU ( ) throws ServletException , IOException , CmsException { } }
List < CmsOrganizationalUnit > orgUnits = getOrgUnits ( ) ; if ( orgUnits . isEmpty ( ) ) { OpenCms . getWorkplaceManager ( ) . getToolManager ( ) . jspForwardTool ( this , "/" , null ) ; return ; } Map < String , String [ ] > params = new HashMap < String , String [ ] > ( ) ; params . put ( A_CmsOrgUnitDialog . PARAM_OUFQN , new String [ ] { orgUnits . get ( 0 ) . getName ( ) } ) ; params . put ( CmsDialog . PARAM_ACTION , new String [ ] { CmsDialog . DIALOG_INITIAL } ) ; OpenCms . getWorkplaceManager ( ) . getToolManager ( ) . jspForwardTool ( this , getForwardToolPath ( ) , params ) ;
public class Matrix4d { /** * Set this matrix to be a symmetric perspective projection frustum transformation for a right - handed coordinate system * using OpenGL ' s NDC z range of < code > [ - 1 . . + 1 ] < / code > . * In order to apply the perspective projection transformation to an existing transformation , * use { @ link # perspectiveRect ( double , double , double , double ) perspectiveRect ( ) } . * @ see # perspectiveRect ( double , double , double , double ) * @ param width * the width of the near frustum plane * @ param height * the height of the near frustum plane * @ param zNear * near clipping plane distance . If the special value { @ link Float # POSITIVE _ INFINITY } is used , the near clipping plane will be at positive infinity . * In that case , < code > zFar < / code > may not also be { @ link Float # POSITIVE _ INFINITY } . * @ param zFar * far clipping plane distance . If the special value { @ link Float # POSITIVE _ INFINITY } is used , the far clipping plane will be at positive infinity . * In that case , < code > zNear < / code > may not also be { @ link Float # POSITIVE _ INFINITY } . * @ return this */ public Matrix4d setPerspectiveRect ( double width , double height , double zNear , double zFar ) { } }
return setPerspectiveRect ( width , height , zNear , zFar , false ) ;
public class UnitResponse { /** * Standard json formation without data lost . */ public String toJSONString ( ) { } }
if ( context . isPretty ( ) ) { return JSON . toJSONStringWithDateFormat ( this , Constant . DATE_SERIALIZE_FORMAT , SerializerFeature . PrettyFormat ) ; } else { return JSONObject . toJSONStringWithDateFormat ( this , Constant . DATE_SERIALIZE_FORMAT ) ; }
public class ListRobotApplicationsResult { /** * A list of robot application summaries that meet the criteria of the request . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setRobotApplicationSummaries ( java . util . Collection ) } or * { @ link # withRobotApplicationSummaries ( java . util . Collection ) } if you want to override the existing values . * @ param robotApplicationSummaries * A list of robot application summaries that meet the criteria of the request . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListRobotApplicationsResult withRobotApplicationSummaries ( RobotApplicationSummary ... robotApplicationSummaries ) { } }
if ( this . robotApplicationSummaries == null ) { setRobotApplicationSummaries ( new java . util . ArrayList < RobotApplicationSummary > ( robotApplicationSummaries . length ) ) ; } for ( RobotApplicationSummary ele : robotApplicationSummaries ) { this . robotApplicationSummaries . add ( ele ) ; } return this ;
public class ReplayRelay { /** * Creates an unbounded replay relay . * The internal buffer is backed by an { @ link ArrayList } and starts with an initial capacity of 16 . Once the * number of items reaches this capacity , it will grow as necessary ( usually by 50 % ) . However , as the * number of items grows , this causes frequent array reallocation and copying , and may hurt performance * and latency . This can be avoided with the { @ link # create ( int ) } overload which takes an initial capacity * parameter and can be tuned to reduce the array reallocation frequency as needed . */ @ CheckReturnValue @ NonNull public static < T > ReplayRelay < T > create ( ) { } }
return new ReplayRelay < T > ( new UnboundedReplayBuffer < T > ( 16 ) ) ;
public class Module { /** * Add a path to import from this module . * @ param path The path to add . * @ return this module descriptor . */ public Module withImportIncludePath ( String path ) { } }
checkList ( this . imports , INCLUDE ) ; this . imports . get ( INCLUDE ) . add ( path ) ; return this ;
import java . io . * ; import java . lang . * ; import java . util . * ; import java . math . * ; class ExtractIntegers { /** * Extract only integer values from a given list of mixed data types * > > > extract _ integers ( [ ' a ' , 3.14 , 5 ] ) * > > > extract _ integers ( [ 1 , 2 , 3 , ' abc ' , { } , [ ] ] ) * [ 1 , 2 , 3] */ public static ArrayList < Integer > extractIntegers ( List < Object > inputValues ) { } }
ArrayList < Integer > resultList = new ArrayList < > ( ) ; for ( Object element : inputValues ) { if ( element instanceof Integer ) { resultList . add ( ( Integer ) element ) ; } } return resultList ;
public class ZipBuilder { /** * Recursively adds the contents of the given directory and all subdirectories to the given ZIP * output stream . * @ param root * an ancestor of { @ code directory } , used to determine the relative path within the * archive * @ param directory * current directory to be added * @ param zos * ZIP output stream * @ throws IOException */ private void addDirectory ( File root , File directory , String targetPath , ZipOutputStream zos ) throws IOException { } }
String prefix = targetPath ; if ( ! prefix . isEmpty ( ) && ! prefix . endsWith ( "/" ) ) { prefix += "/" ; } // directory entries are required , or else bundle classpath may be // broken if ( ! directory . equals ( root ) ) { String path = normalizePath ( root , directory ) ; ZipEntry jarEntry = new ZipEntry ( prefix + path + "/" ) ; jarOutputStream . putNextEntry ( jarEntry ) ; } File [ ] children = directory . listFiles ( ) ; // loop through dirList , and zip the files for ( File child : children ) { if ( child . isDirectory ( ) ) { addDirectory ( root , child , prefix , jarOutputStream ) ; } else { addFile ( root , child , prefix , jarOutputStream ) ; } }
public class PresentsSession { /** * Queues a message for delivery to the client . */ protected boolean postMessage ( DownstreamMessage msg , PresentsConnection expect ) { } }
PresentsConnection conn = getConnection ( ) ; // make sure that the connection they expect us to be using is the one we ' re using ; there // are circumstances were sufficient delay between request and response gives the client // time to drop their original connection and establish a new one , opening the door to // major confusion if ( expect != null && conn != expect ) { return false ; } if ( _compound != null ) { _compound . msgs . add ( msg ) ; return true ; } // make sure we have a connection at all if ( conn != null ) { conn . postMessage ( msg ) ; _messagesOut ++ ; // count ' em up ! return true ; } // don ' t log dropped messages unless we ' re dropping a lot of them ( meaning something is // still queueing messages up for this dead client even though it shouldn ' t be ) if ( ++ _messagesDropped % 50 == 0 ) { log . warning ( "Dropping many messages?" , "client" , this , "count" , _messagesDropped , "msg" , msg ) ; } // make darned sure we don ' t have any remaining subscriptions if ( _subscrips . size ( ) > 0 ) { // log . warning ( " Clearing stale subscriptions " , " client " , this , // " subscrips " , _ subscrips . size ( ) ) ; clearSubscrips ( _messagesDropped > 10 ) ; } return false ;
public class AsyncMutateInBuilder { /** * Perform several { @ link Mutation mutation } operations inside a single existing { @ link JsonDocument JSON document } * and watch for durability requirements . * The list of mutations and paths to mutate in the JSON is added through builder methods like * { @ link # arrayInsert ( String , Object ) } . * Multi - mutations are applied as a whole , atomically at the document level . That means that if one of the mutations * fails , none of the mutations are applied . Otherwise , all mutations can be considered successful and the whole * operation will receive a { @ link DocumentFragment } with the updated cas ( and optionally { @ link MutationToken } ) . * The subdocument API has the benefit of only transmitting the fragment of the document you want to mutate * on the wire , instead of the whole document . * This Observable most notable error conditions are : * - The enclosing document does not exist : { @ link DocumentDoesNotExistException } * - The enclosing document is not JSON : { @ link DocumentNotJsonException } * - No mutation was defined through the builder API : { @ link IllegalArgumentException } * - A mutation spec couldn ' t be encoded and the whole operation was cancelled : { @ link TranscodingException } * - The multi - mutation failed : { @ link MultiMutationException } * - The durability constraint could not be fulfilled because of a temporary or persistent problem : { @ link DurabilityException } * - CAS was provided but optimistic locking failed : { @ link CASMismatchException } * When receiving a { @ link MultiMutationException } , one can inspect the exception to find the zero - based index and * error { @ link ResponseStatus status code } of the first failing { @ link Mutation } . Subsequent mutations may have * also failed had they been attempted , but a single spec failing causes the whole operation to be cancelled . * Other top - level error conditions are similar to those encountered during a document - level { @ link AsyncBucket # replace ( Document ) } . * A { @ link DurabilityException } typically happens if the given amount of replicas needed to fulfill the durability * requirement cannot be met because either the bucket does not have enough replicas configured or they are not * available in a failover event . As an example , if one replica is configured and { @ link ReplicateTo # TWO } is used , * the observable is errored with a { @ link DurabilityException } . The same can happen if one replica is configured , * but one node has been failed over and not yet rebalanced ( hence , on a subset of the partitions there is no * replica available ) . * * It is important to understand that the original execute has already happened , so the actual * execute and the watching for durability requirements are two separate tasks internally . * * * @ param persistTo the persistence requirement to watch . * @ param replicateTo the replication requirement to watch . * @ param timeout the specific timeout to apply for the operation . * @ param timeUnit the time unit for the timeout . * @ return an { @ link Observable } of a single { @ link DocumentFragment } ( if successful ) containing updated cas metadata . * Note that some individual results could also bear a value , like counter operations . */ public Observable < DocumentFragment < Mutation > > execute ( final PersistTo persistTo , final ReplicateTo replicateTo , final long timeout , final TimeUnit timeUnit ) { } }
Observable < DocumentFragment < Mutation > > mutationResult = execute ( timeout , timeUnit ) ; if ( persistTo == PersistTo . NONE && replicateTo == ReplicateTo . NONE ) { return mutationResult ; } return mutationResult . flatMap ( new Func1 < DocumentFragment < Mutation > , Observable < DocumentFragment < Mutation > > > ( ) { @ Override public Observable < DocumentFragment < Mutation > > call ( final DocumentFragment < Mutation > fragment ) { Observable < DocumentFragment < Mutation > > result = Observe . call ( core , bucketName , docId , fragment . cas ( ) , false , fragment . mutationToken ( ) , persistTo . value ( ) , replicateTo . value ( ) , environment . observeIntervalDelay ( ) , environment . retryStrategy ( ) ) . map ( new Func1 < Boolean , DocumentFragment < Mutation > > ( ) { @ Override public DocumentFragment < Mutation > call ( Boolean aBoolean ) { return fragment ; } } ) . onErrorResumeNext ( new Func1 < Throwable , Observable < DocumentFragment < Mutation > > > ( ) { @ Override public Observable < DocumentFragment < Mutation > > call ( Throwable throwable ) { return Observable . error ( new DurabilityException ( "Durability requirement failed: " + throwable . getMessage ( ) , throwable ) ) ; } } ) ; if ( timeout > 0 ) { result = result . timeout ( timeout , timeUnit , environment . scheduler ( ) ) ; } return result ; } } ) ;
public class JavacParser { /** * EnumBody = " { " { EnumeratorDeclarationList } [ " , " ] * [ " ; " { ClassBodyDeclaration } ] " } " */ List < JCTree > enumBody ( Name enumName ) { } }
accept ( LBRACE ) ; ListBuffer < JCTree > defs = new ListBuffer < > ( ) ; if ( token . kind == COMMA ) { nextToken ( ) ; } else if ( token . kind != RBRACE && token . kind != SEMI ) { defs . append ( enumeratorDeclaration ( enumName ) ) ; while ( token . kind == COMMA ) { nextToken ( ) ; if ( token . kind == RBRACE || token . kind == SEMI ) break ; defs . append ( enumeratorDeclaration ( enumName ) ) ; } if ( token . kind != SEMI && token . kind != RBRACE ) { defs . append ( syntaxError ( token . pos , "expected3" , COMMA , RBRACE , SEMI ) ) ; nextToken ( ) ; } } if ( token . kind == SEMI ) { nextToken ( ) ; while ( token . kind != RBRACE && token . kind != EOF ) { defs . appendList ( classOrInterfaceBodyDeclaration ( enumName , false ) ) ; if ( token . pos <= endPosTable . errorEndPos ) { // error recovery skip ( false , true , true , false ) ; } } } accept ( RBRACE ) ; return defs . toList ( ) ;
public class ResourceUtils { /** * If the provided resource is a classpath resource , running inside an embedded container , * and if the container is running in a non - exploded form , classpath resources become non - accessible . * So , this method will attempt to move resources out of classpath and onto a physical location * outside the context , typically in the " cas " directory of the temp system folder . * @ param resource the resource * @ param isDirectory the if the resource is a directory , in which case entries will be copied over . * @ param containsName the resource name pattern * @ return the file */ @ SneakyThrows public static Resource prepareClasspathResourceIfNeeded ( final Resource resource , final boolean isDirectory , final String containsName ) { } }
LOGGER . trace ( "Preparing possible classpath resource [{}]" , resource ) ; if ( resource == null ) { LOGGER . debug ( "No resource defined to prepare. Returning null" ) ; return null ; } if ( org . springframework . util . ResourceUtils . isFileURL ( resource . getURL ( ) ) ) { return resource ; } val url = org . springframework . util . ResourceUtils . extractArchiveURL ( resource . getURL ( ) ) ; val file = org . springframework . util . ResourceUtils . getFile ( url ) ; val casDirectory = new File ( FileUtils . getTempDirectory ( ) , "cas" ) ; val destination = new File ( casDirectory , resource . getFilename ( ) ) ; if ( isDirectory ) { LOGGER . trace ( "Creating resource directory [{}]" , destination ) ; FileUtils . forceMkdir ( destination ) ; FileUtils . cleanDirectory ( destination ) ; } else if ( destination . exists ( ) ) { LOGGER . trace ( "Deleting resource directory [{}]" , destination ) ; FileUtils . forceDelete ( destination ) ; } LOGGER . trace ( "Processing file [{}]" , file ) ; try ( val jFile = new JarFile ( file ) ) { val e = jFile . entries ( ) ; while ( e . hasMoreElements ( ) ) { val entry = e . nextElement ( ) ; val name = entry . getName ( ) ; LOGGER . trace ( "Comparing [{}] against [{}] and pattern [{}]" , name , resource . getFilename ( ) , containsName ) ; if ( name . contains ( resource . getFilename ( ) ) && RegexUtils . find ( containsName , name ) ) { try ( val stream = jFile . getInputStream ( entry ) ) { var copyDestination = destination ; if ( isDirectory ) { val entryFileName = new File ( name ) ; copyDestination = new File ( destination , entryFileName . getName ( ) ) ; } LOGGER . trace ( "Copying resource entry [{}] to [{}]" , name , copyDestination ) ; try ( val writer = Files . newBufferedWriter ( copyDestination . toPath ( ) , StandardCharsets . UTF_8 ) ) { IOUtils . copy ( stream , writer , StandardCharsets . UTF_8 ) ; } } } } } return new FileSystemResource ( destination ) ;
public class PTSaxton2006 { /** * For calculating SLDUL * @ param soilParas should include 1 . Sand weight percentage by layer * ( [ 0,100 ] % ) , 2 . Clay weight percentage by layer ( [ 0,100 ] % ) , 3 . Organic * matter weight percentage by layer ( [ 0,100 ] % ) , ( = SLOC * 1.72) * @ return Soil water , drained upper limit , fraction */ public static String getSLDUL ( String [ ] soilParas ) { } }
if ( soilParas != null && soilParas . length >= 3 ) { return divide ( calcMoisture33Kpa ( soilParas [ 0 ] , soilParas [ 1 ] , soilParas [ 2 ] ) , "100" , 3 ) ; } else { return null ; }
public class OperationDataUpdater { /** * { @ inheritDoc } */ @ Override protected JSONObject extractData ( MBeanInfo pMBeanInfo , String pOperation ) { } }
JSONObject opMap = new JSONObject ( ) ; for ( MBeanOperationInfo opInfo : pMBeanInfo . getOperations ( ) ) { if ( pOperation == null || opInfo . getName ( ) . equals ( pOperation ) ) { JSONObject map = new JSONObject ( ) ; JSONArray argList = new JSONArray ( ) ; for ( MBeanParameterInfo paramInfo : opInfo . getSignature ( ) ) { JSONObject args = new JSONObject ( ) ; args . put ( DESCRIPTION . getKey ( ) , paramInfo . getDescription ( ) ) ; args . put ( NAME . getKey ( ) , paramInfo . getName ( ) ) ; args . put ( TYPE . getKey ( ) , paramInfo . getType ( ) ) ; argList . add ( args ) ; } map . put ( ARGS . getKey ( ) , argList ) ; map . put ( RETURN_TYPE . getKey ( ) , opInfo . getReturnType ( ) ) ; map . put ( DESCRIPTION . getKey ( ) , opInfo . getDescription ( ) ) ; Object ops = opMap . get ( opInfo . getName ( ) ) ; if ( ops != null ) { if ( ops instanceof List ) { // If it is already a list , simply add it to the end ( ( List ) ops ) . add ( map ) ; } else if ( ops instanceof Map ) { // If it is a map , add a list with two elements // ( the old one and the new one ) JSONArray opList = new JSONArray ( ) ; opList . add ( ops ) ; opList . add ( map ) ; opMap . put ( opInfo . getName ( ) , opList ) ; } else { throw new IllegalArgumentException ( "Internal: list, addOperations: Expected Map or List, not " + ops . getClass ( ) ) ; } } else { // No value set yet , simply add the map as plain value opMap . put ( opInfo . getName ( ) , map ) ; } } } return opMap ;
public class PropertiesLoader { /** * Load properties from a file . * @ param file * File name . If { @ code location } is { @ link FileLocation # CLASSPATH * CLASSPATH } and if { @ code file } does not start with { @ code " / " } , * { @ code " / " } is prepended . * @ param location * Location from which the file is loaded . * @ return * { @ link Properties Properties } loaded from the file . * { @ code null } is returned on failure . * @ throws IllegalArgumentException * { @ code file } is { @ code null } , or { @ code location } is { @ code null } . */ public static TypedProperties load ( String file , FileLocation location ) { } }
// If file is null . if ( file == null ) { // file must be specified . throw new IllegalArgumentException ( "file is null." ) ; } // If location is null . if ( location == null ) { // location must be specified . throw new IllegalArgumentException ( "location is null." ) ; } InputStream in = null ; try { // Open the file . in = open ( file , location ) ; if ( in == null ) { // Failed to open the file . return null ; } // Build Properties from the input stream . Properties properties = load ( in ) ; // Wrap the properties . return new PropertiesWrapper ( properties ) ; } catch ( IOException e ) { // Failed to open the file , or Properties . load ( ) failed . return null ; } finally { // Close the input stream silently . close ( in ) ; }
public class TranscriptionJobSummaryMarshaller { /** * Marshall the given parameter object . */ public void marshall ( TranscriptionJobSummary transcriptionJobSummary , ProtocolMarshaller protocolMarshaller ) { } }
if ( transcriptionJobSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( transcriptionJobSummary . getTranscriptionJobName ( ) , TRANSCRIPTIONJOBNAME_BINDING ) ; protocolMarshaller . marshall ( transcriptionJobSummary . getCreationTime ( ) , CREATIONTIME_BINDING ) ; protocolMarshaller . marshall ( transcriptionJobSummary . getCompletionTime ( ) , COMPLETIONTIME_BINDING ) ; protocolMarshaller . marshall ( transcriptionJobSummary . getLanguageCode ( ) , LANGUAGECODE_BINDING ) ; protocolMarshaller . marshall ( transcriptionJobSummary . getTranscriptionJobStatus ( ) , TRANSCRIPTIONJOBSTATUS_BINDING ) ; protocolMarshaller . marshall ( transcriptionJobSummary . getFailureReason ( ) , FAILUREREASON_BINDING ) ; protocolMarshaller . marshall ( transcriptionJobSummary . getOutputLocationType ( ) , OUTPUTLOCATIONTYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RandomProjectedNeighborsAndDensities { /** * Compute list of neighbors for each point from sets resulting from * projection * @ return list of neighbors for each point */ public DataStore < ? extends DBIDs > getNeighs ( ) { } }
final DBIDs ids = points . getDBIDs ( ) ; // init lists WritableDataStore < ModifiableDBIDs > neighs = DataStoreUtil . makeStorage ( ids , DataStoreFactory . HINT_HOT , ModifiableDBIDs . class ) ; for ( DBIDIter it = ids . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { neighs . put ( it , DBIDUtil . newHashSet ( ) ) ; } FiniteProgress splitp = LOG . isVerbose ( ) ? new FiniteProgress ( "Processing splits for neighborhoods" , splitsets . size ( ) , LOG ) : null ; // go through all sets Iterator < ArrayDBIDs > it1 = splitsets . iterator ( ) ; DBIDVar v = DBIDUtil . newVar ( ) ; while ( it1 . hasNext ( ) ) { ArrayDBIDs pinSet = it1 . next ( ) ; final int indoff = pinSet . size ( ) >> 1 ; // middle point of projection pinSet . assignVar ( indoff , v ) ; // add all points as neighbors to middle point neighs . get ( v ) . addDBIDs ( pinSet ) ; // and the the middle point to all other points in set for ( DBIDIter it = pinSet . iter ( ) ; it . valid ( ) ; it . advance ( ) ) { neighs . get ( it ) . add ( v ) ; } LOG . incrementProcessed ( splitp ) ; } LOG . ensureCompleted ( splitp ) ; return neighs ;
public class DatabaseDAODefaultImpl { public DbHistory [ ] get_device_property_history ( Database database , String deviceName , String propname ) throws DevFailed { } }
if ( ! database . isAccess_checked ( ) ) checkAccess ( database ) ; DeviceData argIn = new DeviceData ( ) ; argIn . insert ( new String [ ] { deviceName , propname } ) ; DeviceData argOut = command_inout ( database , "DbGetDevicePropertyHist" , argIn ) ; List < DbHistory > dbHistories = convertPropertyHistory ( argOut . extractStringArray ( ) , false ) ; DbHistory [ ] array = new DbHistory [ dbHistories . size ( ) ] ; for ( int i = 0 ; i < array . length ; i ++ ) array [ i ] = dbHistories . get ( i ) ; return array ;
public class AnnotationTypeBuilder { /** * Build the signature of the current annotation type . * @ param node the XML element that specifies which components to document * @ param annotationInfoTree the content tree to which the documentation will be added */ public void buildAnnotationTypeSignature ( XMLNode node , Content annotationInfoTree ) { } }
writer . addAnnotationTypeSignature ( utils . modifiersToString ( annotationType , true ) , annotationInfoTree ) ;
public class BigtableTableAdminClient { /** * Drops rows by the specified key prefix and tableId asynchronously * < p > Please note that this method is considered part of the admin API and is rate limited . * < p > Sample code : * < pre > { @ code * ApiFuture < Void > dropFuture = client . dropRowRangeAsync ( " my - table " , " prefix " ) ; * ApiFutures . addCallback ( * dropFuture , * new ApiFutureCallback < Void > ( ) { * public void onSuccess ( Void tableNames ) { * System . out . println ( " Successfully dropped row range . " ) ; * public void onFailure ( Throwable t ) { * t . printStackTrace ( ) ; * MoreExecutors . directExecutor ( ) * } < / pre > */ @ SuppressWarnings ( "WeakerAccess" ) public ApiFuture < Void > dropRowRangeAsync ( String tableId , String rowKeyPrefix ) { } }
return dropRowRangeAsync ( tableId , ByteString . copyFromUtf8 ( rowKeyPrefix ) ) ;
public class PythonReceiver { /** * = = = = = Setup = = = = = */ @ SuppressWarnings ( "unchecked" ) public void open ( File inputFile ) throws IOException { } }
deserializer = ( Deserializer < OUT > ) ( readAsByteArray ? new ByteArrayDeserializer ( ) : new TupleDeserializer ( ) ) ; inputFile . getParentFile ( ) . mkdirs ( ) ; if ( inputFile . exists ( ) ) { inputFile . delete ( ) ; } inputFile . createNewFile ( ) ; inputRAF = new RandomAccessFile ( inputFile , "rw" ) ; inputRAF . setLength ( mappedFileSizeBytes ) ; inputRAF . seek ( mappedFileSizeBytes - 1 ) ; inputRAF . writeByte ( 0 ) ; inputRAF . seek ( 0 ) ; inputChannel = inputRAF . getChannel ( ) ; fileBuffer = inputChannel . map ( FileChannel . MapMode . READ_WRITE , 0 , mappedFileSizeBytes ) ;
public class S3TaskClientImpl { /** * { @ inheritDoc } */ @ Override public GetUrlTaskResult getHlsUrl ( String spaceId , String contentId ) throws ContentStoreException { } }
final GetHlsUrlTaskParameters params = new GetHlsUrlTaskParameters ( ) ; params . setSpaceId ( spaceId ) ; params . setContentId ( contentId ) ; return GetUrlTaskResult . deserialize ( contentStore . performTask ( StorageTaskConstants . GET_HLS_URL_TASK_NAME , params . serialize ( ) ) ) ;
public class AbstractDBOpenHelper { /** * helpers */ protected final boolean createIndex ( SQLiteDatabase db , String table , boolean unique , String firstColumn , String ... otherColumns ) { } }
ArrayList < String > statements = new ArrayList < String > ( ) ; statements . add ( PersistUtils . getCreateIndex ( table , unique , firstColumn , otherColumns ) ) ; return executeStatements ( db , statements ) ;
public class Options { /** * Returns an immutable set of the given options for a move . */ public static ImmutableSet < CopyOption > getMoveOptions ( CopyOption ... options ) { } }
return ImmutableSet . copyOf ( Lists . asList ( LinkOption . NOFOLLOW_LINKS , options ) ) ;
public class OpenSslSessionStats { /** * Returns the current number of sessions in the internal session cache . */ public long number ( ) { } }
Lock readerLock = context . ctxLock . readLock ( ) ; readerLock . lock ( ) ; try { return SSLContext . sessionNumber ( context . ctx ) ; } finally { readerLock . unlock ( ) ; }
public class XPathBuilder { /** * < p > < b > Used for finding element process ( to generate xpath address ) < / b > < / p > * < p > Result Example : < / p > * < pre > * / / * [ contains ( @ class , ' x - grid - panel ' ) ] [ last ( ) ] * < / pre > * @ param position { @ link Position } * @ param < T > the element which calls this method * @ return this element */ @ SuppressWarnings ( "unchecked" ) public < T extends XPathBuilder > T setPosition ( final Position position ) { } }
this . position = position . getValue ( ) ; return ( T ) this ;
public class CreateUserRequest { /** * check if the request contains a display name * @ param formItemList * form item list extracted * @ param createUserResponse * response object * @ return display name < br > * < b > null < / b > if there was no display name passed */ private static String checkDisplayName ( final FormItemList formItemList , final CreateUserResponse createUserResponse ) { } }
final String displayName = formItemList . getField ( ProtocolConstants . Parameters . Create . User . DISPLAY_NAME ) ; if ( displayName != null ) { return displayName ; } else { createUserResponse . displayNameMissing ( ) ; } return null ;
public class Ref { /** * Create a relative ref url from the specified ref * @ param ref the ref url to be made relative * @ return the relative ref url or null if the specified ref was not valid */ public static String getRelativeRef ( String ref ) { } }
Matcher matcher = match ( ref ) ; return matcher != null ? String . format ( "/%s/%s" , matcher . group ( 1 ) , matcher . group ( 2 ) ) : null ;
public class PostExecutionInterceptorContext { /** * Makes sure that the call tree can ' t be { @ linkplain # excludeCallTree excluded } * Note : if the call tree has not been collected , calling this method won ' t restore it . * See also { @ link PreExecutionInterceptorContext # mustCollectCallTree } * @ param reason the reason why the call tree should always be preserved ( debug message ) * @ return < code > this < / code > for chaining */ public PostExecutionInterceptorContext mustPreserveCallTree ( String reason ) { } }
if ( getSpanContext ( ) . getCallTree ( ) == null ) { logger . info ( "Can't preserve the call tree because it has not been collected" ) ; } logger . debug ( "Must preserve call tree because {}" , reason ) ; mustPreserveCallTree = true ; excludeCallTree = false ; return this ;
public class PersonDirectoryConfiguration { /** * Merges attributes from the request with those from other DAOs . */ @ Bean ( name = "requestAttributeMergingDao" ) @ Qualifier ( "uPortalInternal" ) public IPersonAttributeDao getRequestAttributeMergingDao ( ) { } }
final MergingPersonAttributeDaoImpl rslt = new MergingPersonAttributeDaoImpl ( ) ; rslt . setUsernameAttributeProvider ( getUsernameAttributeProvider ( ) ) ; rslt . setMerger ( new ReplacingAttributeAdder ( ) ) ; final List < IPersonAttributeDao > daos = new ArrayList < > ( ) ; daos . add ( getRequestAttributesDao ( ) ) ; daos . add ( getCachingPersonAttributeDao ( ) ) ; rslt . setPersonAttributeDaos ( daos ) ; return rslt ;
public class Horizon { /** * Sets the value of the current pitch * @ param PITCH */ public void setPitch ( final double PITCH ) { } }
this . pitch = PITCH % 180 ; if ( pitch > 90 ) { pitch = 90 - ( pitch - 90 ) ; if ( ! upsidedown ) { setRoll ( roll - 180 ) ; } upsidedown = true ; } else if ( pitch < - 90 ) { pitch = - 90 + ( - 90 - pitch ) ; if ( ! upsidedown ) { setRoll ( roll + 180 ) ; } upsidedown = true ; } else { upsidedown = false ; this . oldPitch = pitch ; } fireStateChanged ( ) ; repaint ( ) ;
public class PaginationAutoMapInterceptor { /** * 生成特定数据库的分页语句 * @ param sql * @ param page * @ return */ private String buildPageSql ( String sql , Page page ) { } }
if ( page == null || dialect == null || dialect . equals ( "" ) ) { return sql ; } StringBuilder sb = new StringBuilder ( ) ; int startRow = page . getOffset ( ) ; if ( "mysql" . equals ( dialect ) ) { sb . append ( sql ) ; sb . append ( " limit " ) . append ( startRow ) . append ( "," ) . append ( page . getLimit ( ) ) ; } else if ( "hsqldb" . equals ( dialect ) ) { sb . append ( "select limit " ) ; sb . append ( startRow ) ; sb . append ( " " ) ; sb . append ( page . getLimit ( ) ) ; sb . append ( " " ) ; sb . append ( sql . substring ( 6 ) ) ; } else if ( "oracle" . equals ( dialect ) ) { sb . append ( "select * from (select tmp_tb.*,ROWNUM row_id from (" ) ; sb . append ( sql ) ; sb . append ( ") tmp_tb where ROWNUM<=" ) ; sb . append ( startRow + page . getLimit ( ) ) ; sb . append ( ") where row_id>" ) ; sb . append ( startRow ) ; } else { throw new IllegalArgumentException ( "SelectInterceptor error:does not support " + dialect ) ; } return sb . toString ( ) ;
public class UpdatableResultSet { /** * { inheritDoc } . */ public void updateString ( String columnLabel , String value ) throws SQLException { } }
updateString ( findColumn ( columnLabel ) , value ) ;
public class MediaSpec { /** * Converts a resolution from a CSS length to ' dpi ' . * @ param spec the CSS resolution specification * @ return the resolution in ' dpi ' or { @ code null } when the unit is invalid */ protected Float dpiResolution ( TermResolution spec ) { } }
float nval = spec . getValue ( ) ; TermLength . Unit unit = spec . getUnit ( ) ; switch ( unit ) { case dpi : return nval ; case dpcm : return nval * 2.54f ; case dppx : return nval * getResolution ( ) ; default : return null ; }
public class BufferingLogOutputStream { /** * Flush any pending data in the { @ link # logBuffer } * @ throws IOException If closing the stream fails . */ @ Override public void close ( ) throws IOException { } }
// first close the parent so we get all remaining data super . close ( ) ; // then ensure that any remaining buffer is logged synchronized ( logBuffer ) { if ( logBuffer . length ( ) > 0 ) { log . info ( logBuffer . toString ( ) ) ; logBuffer . setLength ( 0 ) ; lastFlush = System . currentTimeMillis ( ) ; } }
public class JobAgentsInner { /** * Updates a job agent . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param jobAgentName The name of the job agent to be updated . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable for the request */ public Observable < ServiceResponse < JobAgentInner > > updateWithServiceResponseAsync ( String resourceGroupName , String serverName , String jobAgentName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( serverName == null ) { throw new IllegalArgumentException ( "Parameter serverName is required and cannot be null." ) ; } if ( jobAgentName == null ) { throw new IllegalArgumentException ( "Parameter jobAgentName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } final Map < String , String > tags = null ; JobAgentUpdate parameters = new JobAgentUpdate ( ) ; parameters . withTags ( null ) ; Observable < Response < ResponseBody > > observable = service . update ( resourceGroupName , serverName , jobAgentName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , parameters , this . client . userAgent ( ) ) ; return client . getAzureClient ( ) . getPutOrPatchResultAsync ( observable , new TypeToken < JobAgentInner > ( ) { } . getType ( ) ) ;
public class AbstractQueryGenerator { /** * Generate SqlQuerySpec with given DocumentQuery and query head . * @ param query DocumentQuery represent one query method . * @ param queryHead * @ return The SqlQuerySpec for DocumentClient . */ protected SqlQuerySpec generateQuery ( @ NonNull DocumentQuery query , @ NonNull String queryHead ) { } }
Assert . hasText ( queryHead , "query head should have text." ) ; final Pair < String , List < Pair < String , Object > > > queryBody = generateQueryBody ( query ) ; final String queryString = String . join ( " " , queryHead , queryBody . getValue0 ( ) , generateQueryTail ( query ) ) ; final List < Pair < String , Object > > parameters = queryBody . getValue1 ( ) ; final SqlParameterCollection sqlParameters = new SqlParameterCollection ( ) ; sqlParameters . addAll ( parameters . stream ( ) . map ( p -> new SqlParameter ( "@" + p . getValue0 ( ) , toDocumentDBValue ( p . getValue1 ( ) ) ) ) . collect ( Collectors . toList ( ) ) ) ; return new SqlQuerySpec ( queryString , sqlParameters ) ;
public class ModelsImpl { /** * Adds a composite entity extractor to the application . * @ param appId The application ID . * @ param versionId The version ID . * @ param compositeModelCreateObject A model containing the name and children of the new entity extractor . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < UUID > addCompositeEntityAsync ( UUID appId , String versionId , CompositeEntityModel compositeModelCreateObject , final ServiceCallback < UUID > serviceCallback ) { } }
return ServiceFuture . fromResponse ( addCompositeEntityWithServiceResponseAsync ( appId , versionId , compositeModelCreateObject ) , serviceCallback ) ;
public class NodeOverrides { /** * The node property overrides for the job . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setNodePropertyOverrides ( java . util . Collection ) } or * { @ link # withNodePropertyOverrides ( java . util . Collection ) } if you want to override the existing values . * @ param nodePropertyOverrides * The node property overrides for the job . * @ return Returns a reference to this object so that method calls can be chained together . */ public NodeOverrides withNodePropertyOverrides ( NodePropertyOverride ... nodePropertyOverrides ) { } }
if ( this . nodePropertyOverrides == null ) { setNodePropertyOverrides ( new java . util . ArrayList < NodePropertyOverride > ( nodePropertyOverrides . length ) ) ; } for ( NodePropertyOverride ele : nodePropertyOverrides ) { this . nodePropertyOverrides . add ( ele ) ; } return this ;
public class FavoriteUpdater { /** * Set favorite to the logged in user . If no user , no action is done */ public void add ( DbSession dbSession , ComponentDto componentDto , @ Nullable Integer userId , boolean failIfTooManyFavorites ) { } }
if ( userId == null ) { return ; } List < PropertyDto > existingFavoriteOnComponent = dbClient . propertiesDao ( ) . selectByQuery ( PropertyQuery . builder ( ) . setKey ( PROP_FAVORITE_KEY ) . setUserId ( userId ) . setComponentId ( componentDto . getId ( ) ) . build ( ) , dbSession ) ; checkArgument ( existingFavoriteOnComponent . isEmpty ( ) , "Component '%s' is already a favorite" , componentDto . getDbKey ( ) ) ; List < PropertyDto > existingFavorites = dbClient . propertiesDao ( ) . selectByKeyAndUserIdAndComponentQualifier ( dbSession , PROP_FAVORITE_KEY , userId , componentDto . qualifier ( ) ) ; if ( existingFavorites . size ( ) >= 100 ) { checkArgument ( ! failIfTooManyFavorites , "You cannot have more than 100 favorites on components with qualifier '%s'" , componentDto . qualifier ( ) ) ; return ; } dbClient . propertiesDao ( ) . saveProperty ( dbSession , new PropertyDto ( ) . setKey ( PROP_FAVORITE_KEY ) . setResourceId ( componentDto . getId ( ) ) . setUserId ( userId ) ) ;
public class TransactionCache { /** * Returns a previously built concept * @ param id The id of the concept * @ param < X > The type of the concept * @ return The cached concept */ public < X extends Concept > X getCachedConcept ( ConceptId id ) { } }
// noinspection unchecked return ( X ) conceptCache . get ( id ) ;
public class GenRestBuilderProcessor { /** * Prints a warning message * @ param e The element which has caused the error . Can be null * @ param msg The error message * @ param args if the error message contains % s , % d etc . placeholders this arguments will be used * to replace them */ public void warn ( Element e , String msg , Object ... args ) { } }
messager . printMessage ( Diagnostic . Kind . WARNING , String . format ( msg , args ) , e ) ;
public class ULocale { /** * < strong > [ icu ] < / strong > Converts the specified keyword ( BCP 47 Unicode locale extension key , or * legacy key ) to the legacy key . For example , legacy key " collation " is * returned for the input BCP 47 Unicode locale extension key " co " . * @ param keyword the input locale keyword ( either BCP 47 Unicode locale * extension key or legacy key ) . * @ return the well - formed legacy key , or null if the specified * keyword cannot be mapped to a well - formed legacy key . * @ see # toUnicodeLocaleKey ( String ) */ public static String toLegacyKey ( String keyword ) { } }
String legacyKey = KeyTypeData . toLegacyKey ( keyword ) ; if ( legacyKey == null ) { // Checks if the specified locale key is well - formed with the legacy locale syntax . // Note : // Neither ICU nor LDML / CLDR provides the definition of keyword syntax . // However , a key should not contain ' = ' obviously . For now , all existing // keys are using ASCII alphabetic letters only . We won ' t add any new key // that is not compatible with the BCP 47 syntax . Therefore , we assume // a valid key consist from [ 0-9a - zA - Z ] , no symbols . if ( keyword . matches ( "[0-9a-zA-Z]+" ) ) { legacyKey = AsciiUtil . toLowerString ( keyword ) ; } } return legacyKey ;
public class DMNStyle { /** * Sets the value of the strokeColor property . * @ param value * allowed object is * { @ link Color } */ public void setStrokeColor ( org . kie . dmn . model . api . dmndi . Color value ) { } }
this . strokeColor = value ;
public class LRImporter { /** * Get a result from an obtain request * If the resumption token is not null , it will override the other parameters for ths request * @ param requestID the " request _ id " value to use for this request * @ param byResourceID the " by _ resource _ id " value to use for this request * @ param byDocID the " by _ doc _ id " value to use for this request * @ param idsOnly the " ids _ only " value to use for this request * @ param resumptionToken the " resumption _ token " value to use for this request * @ return the result from this request */ private LRResult getObtainJSONData ( String requestID , Boolean byResourceID , Boolean byDocID , Boolean idsOnly , String resumptionToken ) throws LRException { } }
String path = getObtainRequestPath ( requestID , byResourceID , byDocID , idsOnly , resumptionToken ) ; JSONObject json = getJSONFromPath ( path ) ; return new LRResult ( json ) ;
public class EnvironmentVariableMarshaller { /** * Marshall the given parameter object . */ public void marshall ( EnvironmentVariable environmentVariable , ProtocolMarshaller protocolMarshaller ) { } }
if ( environmentVariable == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( environmentVariable . getKey ( ) , KEY_BINDING ) ; protocolMarshaller . marshall ( environmentVariable . getValue ( ) , VALUE_BINDING ) ; protocolMarshaller . marshall ( environmentVariable . getSecure ( ) , SECURE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class WrapperIterator { /** * Tests if this iterator contains more elements . < p > * @ return < code > true < / code > if this iterator contains more elements ; * < code > false < / code > otherwise . */ public boolean hasNext ( ) { } }
// for chained iterators if ( chained ) { if ( it1 == null ) { if ( it2 == null ) { return false ; } if ( it2 . hasNext ( ) ) { return true ; } it2 = null ; return false ; } else { if ( it1 . hasNext ( ) ) { return true ; } it1 = null ; return hasNext ( ) ; } } // for other interators if ( elements == null ) { return false ; } for ( ; notNull && i < elements . length && elements [ i ] == null ; i ++ ) { } if ( i < elements . length ) { return true ; } else { // release elements for garbage collection elements = null ; return false ; }
public class VictimsConfig { /** * Get the configured cache directory . If the directory does not exist , it * will be created . * @ return * @ throws VictimsException */ public static File home ( ) throws VictimsException { } }
File directory = new File ( getPropertyValue ( Key . HOME ) ) ; if ( ! directory . exists ( ) ) { try { FileUtils . forceMkdir ( directory ) ; } catch ( IOException e ) { throw new VictimsException ( "Could not create home directory." , e ) ; } } return directory ;
public class IndexTermReader { /** * This method is used to create a target which refers to current topic . * @ return instance of IndexTermTarget created */ private IndexTermTarget genTarget ( ) { } }
final IndexTermTarget target = new IndexTermTarget ( ) ; String fragment ; if ( topicIdStack . peek ( ) == null ) { fragment = null ; } else { fragment = topicIdStack . peek ( ) ; } if ( title != null ) { target . setTargetName ( title ) ; } else { target . setTargetName ( targetFile ) ; } if ( fragment != null ) { target . setTargetURI ( setFragment ( targetFile , fragment ) ) ; } else { target . setTargetURI ( targetFile ) ; } return target ;
public class API { /** * Validates that the mandatory parameters of the given { @ code ApiElement } are present , throwing an { @ code ApiException } if * not . * @ param params the parameters of the API request . * @ param element the API element to validate . * @ throws ApiException if any of the mandatory parameters is missing . */ private void validateMandatoryParams ( JSONObject params , ApiElement element ) throws ApiException { } }
if ( element == null ) { return ; } List < String > mandatoryParams = element . getMandatoryParamNames ( ) ; if ( mandatoryParams != null ) { for ( String param : mandatoryParams ) { if ( ! params . has ( param ) || params . getString ( param ) . length ( ) == 0 ) { throw new ApiException ( ApiException . Type . MISSING_PARAMETER , param ) ; } } }
public class DecimalFormat { /** * Appends an affix pattern to the given StringBuffer . Localize unquoted specials . * < b > Note : < / b > This implementation does not support new String localized symbols . */ private void appendAffixPattern ( StringBuffer buffer , boolean isNegative , boolean isPrefix , boolean localized ) { } }
String affixPat = null ; if ( isPrefix ) { affixPat = isNegative ? negPrefixPattern : posPrefixPattern ; } else { affixPat = isNegative ? negSuffixPattern : posSuffixPattern ; } // When there is a null affix pattern , we use the affix itself . if ( affixPat == null ) { String affix = null ; if ( isPrefix ) { affix = isNegative ? negativePrefix : positivePrefix ; } else { affix = isNegative ? negativeSuffix : positiveSuffix ; } // Do this crudely for now : Wrap everything in quotes . buffer . append ( QUOTE ) ; for ( int i = 0 ; i < affix . length ( ) ; ++ i ) { char ch = affix . charAt ( i ) ; if ( ch == QUOTE ) { buffer . append ( ch ) ; } buffer . append ( ch ) ; } buffer . append ( QUOTE ) ; return ; } if ( ! localized ) { buffer . append ( affixPat ) ; } else { int i , j ; for ( i = 0 ; i < affixPat . length ( ) ; ++ i ) { char ch = affixPat . charAt ( i ) ; switch ( ch ) { case QUOTE : j = affixPat . indexOf ( QUOTE , i + 1 ) ; if ( j < 0 ) { throw new IllegalArgumentException ( "Malformed affix pattern: " + affixPat ) ; } buffer . append ( affixPat . substring ( i , j + 1 ) ) ; i = j ; continue ; case PATTERN_PER_MILLE : ch = symbols . getPerMill ( ) ; break ; case PATTERN_PERCENT : ch = symbols . getPercent ( ) ; break ; case PATTERN_MINUS_SIGN : ch = symbols . getMinusSign ( ) ; break ; } // check if char is same as any other symbol if ( ch == symbols . getDecimalSeparator ( ) || ch == symbols . getGroupingSeparator ( ) ) { buffer . append ( QUOTE ) ; buffer . append ( ch ) ; buffer . append ( QUOTE ) ; } else { buffer . append ( ch ) ; } } }
public class LogConditionalObjectiveFunction { /** * Calculate conditional likelihood for datasets with real - valued features . * Currently this can calculate CL only ( no support for SCL ) . * TODO : sum - conditional obj . fun . with RVFs . */ protected void rvfcalculate ( double [ ] x ) { } }
value = 0.0 ; if ( derivativeNumerator == null ) { derivativeNumerator = new double [ x . length ] ; for ( int d = 0 ; d < data . length ; d ++ ) { // if ( d = = testMin ) { // d = testMax - 1; // continue ; int [ ] features = data [ d ] ; for ( int f = 0 ; f < features . length ; f ++ ) { int i = indexOf ( features [ f ] , labels [ d ] ) ; if ( dataweights == null ) { derivativeNumerator [ i ] -= values [ d ] [ f ] ; } else { derivativeNumerator [ i ] -= dataweights [ d ] * values [ d ] [ f ] ; } } } } copy ( derivative , derivativeNumerator ) ; // Arrays . fill ( derivative , 0.0 ) ; double [ ] sums = new double [ numClasses ] ; double [ ] probs = new double [ numClasses ] ; // double [ ] counts = new double [ numClasses ] ; // Arrays . fill ( counts , 0.0 ) ; for ( int d = 0 ; d < data . length ; d ++ ) { // if ( d = = testMin ) { // d = testMax - 1; // continue ; int [ ] features = data [ d ] ; // activation Arrays . fill ( sums , 0.0 ) ; for ( int c = 0 ; c < numClasses ; c ++ ) { for ( int f = 0 ; f < features . length ; f ++ ) { int i = indexOf ( features [ f ] , c ) ; sums [ c ] += x [ i ] * values [ d ] [ f ] ; } } // expectation ( slower routine replaced by fast way ) // double total = Double . NEGATIVE _ INFINITY ; // for ( int c = 0 ; c < numClasses ; c + + ) { // total = SloppyMath . logAdd ( total , sums [ c ] ) ; double total = ArrayMath . logSum ( sums ) ; for ( int c = 0 ; c < numClasses ; c ++ ) { probs [ c ] = Math . exp ( sums [ c ] - total ) ; if ( dataweights != null ) { probs [ c ] *= dataweights [ d ] ; } for ( int f = 0 ; f < features . length ; f ++ ) { int i = indexOf ( features [ f ] , c ) ; derivative [ i ] += probs [ c ] * values [ d ] [ f ] ; } } double dV = sums [ labels [ d ] ] - total ; if ( dataweights != null ) { dV *= dataweights [ d ] ; } value -= dV ; } value += prior . compute ( x , derivative ) ;
public class PTable { /** * Bump the use count . * This doesn ' t have to be synchronized because getPTable in PDatabase is . * @ param pTableOwner The table owner to add . * @ return The new use count . */ public int addPTableOwner ( ThinPhysicalTableOwner pTableOwner ) { } }
if ( pTableOwner != null ) { m_lTimeLastUsed = System . currentTimeMillis ( ) ; m_setPTableOwners . add ( pTableOwner ) ; pTableOwner . setPTable ( this ) ; } return m_setPTableOwners . size ( ) ;
public class JobLauncherFactory { /** * Create a new { @ link JobLauncher } . * This method will never return a { @ code null } . * @ param sysProps system configuration properties * @ param jobProps job configuration properties * @ return newly created { @ link JobLauncher } */ public static @ Nonnull JobLauncher newJobLauncher ( Properties sysProps , Properties jobProps ) throws Exception { } }
return newJobLauncher ( sysProps , jobProps , null ) ;
public class ApplicationSession { /** * Refresh this session ' s application schema from the database . Since the * application ' s { @ link ApplicationDefinition } is cached , it could be out of date * if the schema has been modified . This method fetches the latest version and * returns it . An exception is thrown if the application has been deleted or any * other error occurs . * @ return Latest version of this session ' s application as an * { @ link ApplicationDefinition } , which is also cahced . * @ see # getAppDef ( ) */ public ApplicationDefinition refreshSchema ( ) { } }
try { // Send a GET request to " / _ applications / { application } StringBuilder uri = new StringBuilder ( "/_applications/" ) ; uri . append ( Utils . urlEncode ( m_appDef . getAppName ( ) ) ) ; RESTResponse response = m_restClient . sendRequest ( HttpMethod . GET , uri . toString ( ) ) ; m_logger . debug ( "listApplication() response: {}" , response . toString ( ) ) ; throwIfErrorResponse ( response ) ; m_appDef . parse ( getUNodeResult ( response ) ) ; return m_appDef ; } catch ( Exception e ) { throw new RuntimeException ( e ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getIfcDuctFittingTypeEnum ( ) { } }
if ( ifcDuctFittingTypeEnumEEnum == null ) { ifcDuctFittingTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 819 ) ; } return ifcDuctFittingTypeEnumEEnum ;
public class LoopingDuplicationTaskProducer { /** * / * ( non - Javadoc ) * @ see org . duracloud . mill . ltp . LoopingTaskProducer # loadMorselQueueFromSource ( java . util . Queue ) */ @ Override protected void loadMorselQueueFromSource ( Queue < DuplicationMorsel > morselQueue ) { } }
// generate set of morsels based on duplication policy for ( String account : this . policyManager . getDuplicationAccounts ( ) ) { DuplicationPolicy policy = this . policyManager . getDuplicationPolicy ( account ) ; try { final CredentialsRepo credRepo = getCredentialsRepo ( ) ; if ( getCredentialsRepo ( ) . isAccountActive ( account ) ) { AccountCredentials accountCreds = credRepo . getAccountCredentials ( account ) ; for ( StorageProviderCredentials cred : accountCreds . getProviderCredentials ( ) ) { if ( cred . isPrimary ( ) ) { StorageProvider provider = getStorageProvider ( cred ) ; Iterator < String > spaces = provider . getSpaces ( ) ; while ( spaces . hasNext ( ) ) { String spaceId = spaces . next ( ) ; Set < DuplicationStorePolicy > storePolicies = policy . getDuplicationStorePolicies ( spaceId ) ; for ( DuplicationStorePolicy storePolicy : storePolicies ) { morselQueue . add ( new DuplicationMorsel ( account , spaceId , null , storePolicy ) ) ; } } } } } } catch ( AccountCredentialsNotFoundException e ) { throw new RuntimeException ( e ) ; } }
public class InternalSARLParser { /** * InternalSARL . g : 14416:1 : entryRuleXBasicForLoopExpression returns [ EObject current = null ] : iv _ ruleXBasicForLoopExpression = ruleXBasicForLoopExpression EOF ; */ public final EObject entryRuleXBasicForLoopExpression ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleXBasicForLoopExpression = null ; try { // InternalSARL . g : 14416:64 : ( iv _ ruleXBasicForLoopExpression = ruleXBasicForLoopExpression EOF ) // InternalSARL . g : 14417:2 : iv _ ruleXBasicForLoopExpression = ruleXBasicForLoopExpression EOF { if ( state . backtracking == 0 ) { newCompositeNode ( grammarAccess . getXBasicForLoopExpressionRule ( ) ) ; } pushFollow ( FOLLOW_1 ) ; iv_ruleXBasicForLoopExpression = ruleXBasicForLoopExpression ( ) ; state . _fsp -- ; if ( state . failed ) return current ; if ( state . backtracking == 0 ) { current = iv_ruleXBasicForLoopExpression ; } match ( input , EOF , FOLLOW_2 ) ; if ( state . failed ) return current ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class GetDimensionValuesResult { /** * The filters that you used to filter your request . Some dimensions are available only for a specific context . * If you set the context to < code > COST _ AND _ USAGE < / code > , you can use the following dimensions for searching : * < ul > * < li > * AZ - The Availability Zone . An example is < code > us - east - 1a < / code > . * < / li > * < li > * DATABASE _ ENGINE - The Amazon Relational Database Service database . Examples are Aurora or MySQL . * < / li > * < li > * INSTANCE _ TYPE - The type of Amazon EC2 instance . An example is < code > m4 . xlarge < / code > . * < / li > * < li > * LEGAL _ ENTITY _ NAME - The name of the organization that sells you AWS services , such as Amazon Web Services . * < / li > * < li > * LINKED _ ACCOUNT - The description in the attribute map that includes the full name of the member account . The * value field contains the AWS ID of the member account . * < / li > * < li > * OPERATING _ SYSTEM - The operating system . Examples are Windows or Linux . * < / li > * < li > * OPERATION - The action performed . Examples include < code > RunInstance < / code > and < code > CreateBucket < / code > . * < / li > * < li > * PLATFORM - The Amazon EC2 operating system . Examples are Windows or Linux . * < / li > * < li > * PURCHASE _ TYPE - The reservation type of the purchase to which this usage is related . Examples include On - Demand * Instances and Standard Reserved Instances . * < / li > * < li > * SERVICE - The AWS service such as Amazon DynamoDB . * < / li > * < li > * USAGE _ TYPE - The type of usage . An example is DataTransfer - In - Bytes . The response for the * < code > GetDimensionValues < / code > operation includes a unit attribute . Examples include GB and Hrs . * < / li > * < li > * USAGE _ TYPE _ GROUP - The grouping of common usage types . An example is Amazon EC2 : CloudWatch – Alarms . The * response for this operation includes a unit attribute . * < / li > * < li > * RECORD _ TYPE - The different types of charges such as RI fees , usage costs , tax refunds , and credits . * < / li > * < / ul > * If you set the context to < code > RESERVATIONS < / code > , you can use the following dimensions for searching : * < ul > * < li > * AZ - The Availability Zone . An example is < code > us - east - 1a < / code > . * < / li > * < li > * CACHE _ ENGINE - The Amazon ElastiCache operating system . Examples are Windows or Linux . * < / li > * < li > * DEPLOYMENT _ OPTION - The scope of Amazon Relational Database Service deployments . Valid values are * < code > SingleAZ < / code > and < code > MultiAZ < / code > . * < / li > * < li > * INSTANCE _ TYPE - The type of Amazon EC2 instance . An example is < code > m4 . xlarge < / code > . * < / li > * < li > * LINKED _ ACCOUNT - The description in the attribute map that includes the full name of the member account . The * value field contains the AWS ID of the member account . * < / li > * < li > * PLATFORM - The Amazon EC2 operating system . Examples are Windows or Linux . * < / li > * < li > * REGION - The AWS Region . * < / li > * < li > * SCOPE ( Utilization only ) - The scope of a Reserved Instance ( RI ) . Values are regional or a single Availability * Zone . * < / li > * < li > * TAG ( Coverage only ) - The tags that are associated with a Reserved Instance ( RI ) . * < / li > * < li > * TENANCY - The tenancy of a resource . Examples are shared or dedicated . * < / li > * < / ul > * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDimensionValues ( java . util . Collection ) } or { @ link # withDimensionValues ( java . util . Collection ) } if you * want to override the existing values . * @ param dimensionValues * The filters that you used to filter your request . Some dimensions are available only for a specific * context . < / p > * If you set the context to < code > COST _ AND _ USAGE < / code > , you can use the following dimensions for searching : * < ul > * < li > * AZ - The Availability Zone . An example is < code > us - east - 1a < / code > . * < / li > * < li > * DATABASE _ ENGINE - The Amazon Relational Database Service database . Examples are Aurora or MySQL . * < / li > * < li > * INSTANCE _ TYPE - The type of Amazon EC2 instance . An example is < code > m4 . xlarge < / code > . * < / li > * < li > * LEGAL _ ENTITY _ NAME - The name of the organization that sells you AWS services , such as Amazon Web Services . * < / li > * < li > * LINKED _ ACCOUNT - The description in the attribute map that includes the full name of the member account . * The value field contains the AWS ID of the member account . * < / li > * < li > * OPERATING _ SYSTEM - The operating system . Examples are Windows or Linux . * < / li > * < li > * OPERATION - The action performed . Examples include < code > RunInstance < / code > and < code > CreateBucket < / code > . * < / li > * < li > * PLATFORM - The Amazon EC2 operating system . Examples are Windows or Linux . * < / li > * < li > * PURCHASE _ TYPE - The reservation type of the purchase to which this usage is related . Examples include * On - Demand Instances and Standard Reserved Instances . * < / li > * < li > * SERVICE - The AWS service such as Amazon DynamoDB . * < / li > * < li > * USAGE _ TYPE - The type of usage . An example is DataTransfer - In - Bytes . The response for the * < code > GetDimensionValues < / code > operation includes a unit attribute . Examples include GB and Hrs . * < / li > * < li > * USAGE _ TYPE _ GROUP - The grouping of common usage types . An example is Amazon EC2 : CloudWatch – Alarms . The * response for this operation includes a unit attribute . * < / li > * < li > * RECORD _ TYPE - The different types of charges such as RI fees , usage costs , tax refunds , and credits . * < / li > * < / ul > * If you set the context to < code > RESERVATIONS < / code > , you can use the following dimensions for searching : * < ul > * < li > * AZ - The Availability Zone . An example is < code > us - east - 1a < / code > . * < / li > * < li > * CACHE _ ENGINE - The Amazon ElastiCache operating system . Examples are Windows or Linux . * < / li > * < li > * DEPLOYMENT _ OPTION - The scope of Amazon Relational Database Service deployments . Valid values are * < code > SingleAZ < / code > and < code > MultiAZ < / code > . * < / li > * < li > * INSTANCE _ TYPE - The type of Amazon EC2 instance . An example is < code > m4 . xlarge < / code > . * < / li > * < li > * LINKED _ ACCOUNT - The description in the attribute map that includes the full name of the member account . * The value field contains the AWS ID of the member account . * < / li > * < li > * PLATFORM - The Amazon EC2 operating system . Examples are Windows or Linux . * < / li > * < li > * REGION - The AWS Region . * < / li > * < li > * SCOPE ( Utilization only ) - The scope of a Reserved Instance ( RI ) . Values are regional or a single * Availability Zone . * < / li > * < li > * TAG ( Coverage only ) - The tags that are associated with a Reserved Instance ( RI ) . * < / li > * < li > * TENANCY - The tenancy of a resource . Examples are shared or dedicated . * < / li > * @ return Returns a reference to this object so that method calls can be chained together . */ public GetDimensionValuesResult withDimensionValues ( DimensionValuesWithAttributes ... dimensionValues ) { } }
if ( this . dimensionValues == null ) { setDimensionValues ( new java . util . ArrayList < DimensionValuesWithAttributes > ( dimensionValues . length ) ) ; } for ( DimensionValuesWithAttributes ele : dimensionValues ) { this . dimensionValues . add ( ele ) ; } return this ;
public class GoogleHadoopFileSystemBase { /** * Returns home directory of the current user . * Note : This directory is only used for Hadoop purposes . * It is not the same as a user ' s OS home directory . */ @ Override public Path getHomeDirectory ( ) { } }
Path result = new Path ( getFileSystemRoot ( ) , getHomeDirectorySubpath ( ) ) ; logger . atFine ( ) . log ( "GHFS.getHomeDirectory:=> %s" , result ) ; return result ;
public class ProductSegmentation { /** * Sets the operatingSystemSegment value for this ProductSegmentation . * @ param operatingSystemSegment * The operating system segmentation . { @ link OperatingSystemTargeting # isTargeted } * must be { @ code true } . * We only allow segment by Operating _ System , not Operating _ System _ Version * ( will be ignored ) . * < p > This attribute is optional . */ public void setOperatingSystemSegment ( com . google . api . ads . admanager . axis . v201808 . OperatingSystemTargeting operatingSystemSegment ) { } }
this . operatingSystemSegment = operatingSystemSegment ;
public class RestClientUtil { /** * 创建或者更新索引文档 * @ param indexName * @ param indexType * @ param params * @ return * @ throws ElasticSearchException */ public String addDateMapDocument ( String indexName , String indexType , Map params , ClientOptions clientOptions ) throws ElasticSearchException { } }
return addMapDocument ( this . indexNameBuilder . getIndexName ( indexName ) , indexType , params , clientOptions ) ;
public class SoftMaxRegression { /** * { @ inheritDoc } */ @ Override protected void _predict ( Dataframe newData ) { } }
_predictDatasetParallel ( newData , knowledgeBase . getStorageEngine ( ) , knowledgeBase . getConfiguration ( ) . getConcurrencyConfiguration ( ) ) ;