signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CodeScriptAction { /** * 保存 */ public String save ( ) { } }
Integer codeScriptId = getInt ( "codeScript.id" ) ; CodeScript codeScript = null ; if ( null == codeScriptId ) { codeScript = new CodeScript ( ) ; } else { codeScript = ( CodeScript ) entityDao . get ( CodeScript . class , codeScriptId ) ; } codeScript . setUpdatedAt ( new Date ( System . currentTimeMillis ( ) ) ) ; Model . populate ( codeScript , Params . sub ( "codeScript" ) ) ; entityDao . saveOrUpdate ( codeScript ) ; return redirect ( "search" , "info.save.success" ) ;
public class PropertyValueConverter { /** * Converts a string value from the database to the best matching java * primitive type . */ @ Override public Object convertToEntityAttribute ( String dbData ) { } }
if ( "true" . equalsIgnoreCase ( dbData ) ) { return true ; } else if ( "false" . equalsIgnoreCase ( dbData ) ) { return false ; } else if ( NumberUtils . isParsable ( dbData ) ) { if ( NumberUtils . isDigits ( dbData ) || ( dbData . startsWith ( "-" ) && NumberUtils . isDigits ( dbData . substring ( 1 ) ) ) ) { return Long . parseLong ( dbData ) ; } else { return Double . parseDouble ( dbData ) ; } } return dbData ;
public class Workbook { /** * Creates a new workbook object . * @ param format The format of the workbook ( XLS or XLSX ) * @ param os The output stream to write the workbook to * @ return The new workbook created * @ throws IOException if the file cannot be written */ public static Workbook createWorkbook ( FileFormat format , OutputStream os ) throws IOException { } }
return createWorkbook ( format , os , null ) ;
public class AmazonSimpleWorkflowClient { /** * Returns information about the specified workflow execution including its type and some statistics . * < note > * This operation is eventually consistent . The results are best effort and may not exactly reflect recent updates * and changes . * < / note > * < b > Access Control < / b > * You can use IAM policies to control this action ' s access to Amazon SWF resources as follows : * < ul > * < li > * Use a < code > Resource < / code > element with the domain name to limit the action to only specified domains . * < / li > * < li > * Use an < code > Action < / code > element to allow or deny permission to call this action . * < / li > * < li > * You cannot use an IAM policy to constrain this action ' s parameters . * < / li > * < / ul > * If the caller doesn ' t have sufficient permissions to invoke the action , or the parameter values fall outside the * specified constraints , the action fails . The associated event attribute ' s < code > cause < / code > parameter is set to * < code > OPERATION _ NOT _ PERMITTED < / code > . For details and example IAM policies , see < a * href = " http : / / docs . aws . amazon . com / amazonswf / latest / developerguide / swf - dev - iam . html " > Using IAM to Manage Access to * Amazon SWF Workflows < / a > in the < i > Amazon SWF Developer Guide < / i > . * @ param describeWorkflowExecutionRequest * @ return Result of the DescribeWorkflowExecution operation returned by the service . * @ throws UnknownResourceException * Returned when the named resource cannot be found with in the scope of this operation ( region or domain ) . * This could happen if the named resource was never created or is no longer available for this operation . * @ throws OperationNotPermittedException * Returned when the caller doesn ' t have sufficient permissions to invoke the action . * @ sample AmazonSimpleWorkflow . DescribeWorkflowExecution * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / swf - 2012-01-25 / DescribeWorkflowExecution " target = " _ top " > AWS * API Documentation < / a > */ @ Override public WorkflowExecutionDetail describeWorkflowExecution ( DescribeWorkflowExecutionRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeWorkflowExecution ( request ) ;
public class ToStringBuilder { /** * < p > Append to the < code > toString < / code > a < code > double < / code > * array . < / p > * < p > A boolean parameter controls the level of detail to show . * Setting < code > true < / code > will output the array in full . Setting * < code > false < / code > will output a summary , typically the size of * the array . < / p > * @ param fieldName the field name * @ param array the array to add to the < code > toString < / code > * @ param fullDetail < code > true < / code > for detail , < code > false < / code > * for summary info * @ return this */ public ToStringBuilder append ( final String fieldName , final double [ ] array , final boolean fullDetail ) { } }
style . append ( buffer , fieldName , array , Boolean . valueOf ( fullDetail ) ) ; return this ;
public class MethodWriter { /** * Resizes and replaces the temporary instructions inserted by * { @ link Label # resolve } for wide forward jumps , while keeping jump offsets * and instruction addresses consistent . This may require to resize other * existing instructions , or even to introduce new instructions : for * example , increasing the size of an instruction by 2 at the middle of a * method can increases the offset of an IFEQ instruction from 32766 to * 32768 , in which case IFEQ 32766 must be replaced with IFNEQ 8 GOTO _ W * 32765 . This , in turn , may require to increase the size of another jump * instruction , and so on . . . All these operations are handled automatically * by this method . * < i > This method must be called after all the method that is being built * has been visited < / i > . In particular , the { @ link Label Label } objects used * to construct the method are no longer valid after this method has been * called . */ private void resizeInstructions ( ) { } }
byte [ ] b = code . data ; // bytecode of the method int u , v , label ; // indexes in b int i , j ; // loop indexes /* * 1st step : As explained above , resizing an instruction may require to * resize another one , which may require to resize yet another one , and * so on . The first step of the algorithm consists in finding all the * instructions that need to be resized , without modifying the code . * This is done by the following " fix point " algorithm : * Parse the code to find the jump instructions whose offset will need * more than 2 bytes to be stored ( the future offset is computed from * the current offset and from the number of bytes that will be inserted * or removed between the source and target instructions ) . For each such * instruction , adds an entry in ( a copy of ) the indexes and sizes * arrays ( if this has not already been done in a previous iteration ! ) . * If at least one entry has been added during the previous step , go * back to the beginning , otherwise stop . * In fact the real algorithm is complicated by the fact that the size * of TABLESWITCH and LOOKUPSWITCH instructions depends on their * position in the bytecode ( because of padding ) . In order to ensure the * convergence of the algorithm , the number of bytes to be added or * removed from these instructions is over estimated during the previous * loop , and computed exactly only after the loop is finished ( this * requires another pass to parse the bytecode of the method ) . */ int [ ] allIndexes = new int [ 0 ] ; // copy of indexes int [ ] allSizes = new int [ 0 ] ; // copy of sizes boolean [ ] resize ; // instructions to be resized int newOffset ; // future offset of a jump instruction resize = new boolean [ code . length ] ; // 3 = loop again , 2 = loop ended , 1 = last pass , 0 = done int state = 3 ; do { if ( state == 3 ) { state = 2 ; } u = 0 ; while ( u < b . length ) { int opcode = b [ u ] & 0xFF ; // opcode of current instruction int insert = 0 ; // bytes to be added after this instruction switch ( ClassWriter . TYPE [ opcode ] ) { case ClassWriter . NOARG_INSN : case ClassWriter . IMPLVAR_INSN : u += 1 ; break ; case ClassWriter . LABEL_INSN : if ( opcode > 201 ) { // converts temporary opcodes 202 to 217 , 218 and // 219 to IFEQ . . . JSR ( inclusive ) , IFNULL and // IFNONNULL opcode = opcode < 218 ? opcode - 49 : opcode - 20 ; label = u + readUnsignedShort ( b , u + 1 ) ; } else { label = u + readShort ( b , u + 1 ) ; } newOffset = getNewOffset ( allIndexes , allSizes , u , label ) ; if ( newOffset < Short . MIN_VALUE || newOffset > Short . MAX_VALUE ) { if ( ! resize [ u ] ) { if ( opcode == Opcodes . GOTO || opcode == Opcodes . JSR ) { // two additional bytes will be required to // replace this GOTO or JSR instruction with // a GOTO _ W or a JSR _ W insert = 2 ; } else { // five additional bytes will be required to // replace this IFxxx < l > instruction with // IFNOTxxx < l ' > GOTO _ W < l > , where IFNOTxxx // is the " opposite " opcode of IFxxx ( i . e . , // IFNE for IFEQ ) and where < l ' > designates // the instruction just after the GOTO _ W . insert = 5 ; } resize [ u ] = true ; } } u += 3 ; break ; case ClassWriter . LABELW_INSN : u += 5 ; break ; case ClassWriter . TABL_INSN : if ( state == 1 ) { // true number of bytes to be added ( or removed ) // from this instruction = ( future number of padding // bytes - current number of padding byte ) - // previously over estimated variation = // = ( ( 3 - newOffset % 4 ) - ( 3 - u % 4 ) ) - u % 4 // = ( - newOffset % 4 + u % 4 ) - u % 4 // = - ( newOffset & 3) newOffset = getNewOffset ( allIndexes , allSizes , 0 , u ) ; insert = - ( newOffset & 3 ) ; } else if ( ! resize [ u ] ) { // over estimation of the number of bytes to be // added to this instruction = 3 - current number // of padding bytes = 3 - ( 3 - u % 4 ) = u % 4 = u & 3 insert = u & 3 ; resize [ u ] = true ; } // skips instruction u = u + 4 - ( u & 3 ) ; u += 4 * ( readInt ( b , u + 8 ) - readInt ( b , u + 4 ) + 1 ) + 12 ; break ; case ClassWriter . LOOK_INSN : if ( state == 1 ) { // like TABL _ INSN newOffset = getNewOffset ( allIndexes , allSizes , 0 , u ) ; insert = - ( newOffset & 3 ) ; } else if ( ! resize [ u ] ) { // like TABL _ INSN insert = u & 3 ; resize [ u ] = true ; } // skips instruction u = u + 4 - ( u & 3 ) ; u += 8 * readInt ( b , u + 4 ) + 8 ; break ; case ClassWriter . WIDE_INSN : opcode = b [ u + 1 ] & 0xFF ; if ( opcode == Opcodes . IINC ) { u += 6 ; } else { u += 4 ; } break ; case ClassWriter . VAR_INSN : case ClassWriter . SBYTE_INSN : case ClassWriter . LDC_INSN : u += 2 ; break ; case ClassWriter . SHORT_INSN : case ClassWriter . LDCW_INSN : case ClassWriter . FIELDORMETH_INSN : case ClassWriter . TYPE_INSN : case ClassWriter . IINC_INSN : u += 3 ; break ; case ClassWriter . ITFMETH_INSN : case ClassWriter . INDYMETH_INSN : u += 5 ; break ; // case ClassWriter . MANA _ INSN : default : u += 4 ; break ; } if ( insert != 0 ) { // adds a new ( u , insert ) entry in the allIndexes and // allSizes arrays int [ ] newIndexes = new int [ allIndexes . length + 1 ] ; int [ ] newSizes = new int [ allSizes . length + 1 ] ; System . arraycopy ( allIndexes , 0 , newIndexes , 0 , allIndexes . length ) ; System . arraycopy ( allSizes , 0 , newSizes , 0 , allSizes . length ) ; newIndexes [ allIndexes . length ] = u ; newSizes [ allSizes . length ] = insert ; allIndexes = newIndexes ; allSizes = newSizes ; if ( insert > 0 ) { state = 3 ; } } } if ( state < 3 ) { -- state ; } } while ( state != 0 ) ; // 2nd step : // copies the bytecode of the method into a new bytevector , updates the // offsets , and inserts ( or removes ) bytes as requested . ByteVector newCode = new ByteVector ( code . length ) ; u = 0 ; while ( u < code . length ) { int opcode = b [ u ] & 0xFF ; switch ( ClassWriter . TYPE [ opcode ] ) { case ClassWriter . NOARG_INSN : case ClassWriter . IMPLVAR_INSN : newCode . putByte ( opcode ) ; u += 1 ; break ; case ClassWriter . LABEL_INSN : if ( opcode > 201 ) { // changes temporary opcodes 202 to 217 ( inclusive ) , 218 // and 219 to IFEQ . . . JSR ( inclusive ) , IFNULL and // IFNONNULL opcode = opcode < 218 ? opcode - 49 : opcode - 20 ; label = u + readUnsignedShort ( b , u + 1 ) ; } else { label = u + readShort ( b , u + 1 ) ; } newOffset = getNewOffset ( allIndexes , allSizes , u , label ) ; if ( resize [ u ] ) { // replaces GOTO with GOTO _ W , JSR with JSR _ W and IFxxx // < l > with IFNOTxxx < l ' > GOTO _ W < l > , where IFNOTxxx is // the " opposite " opcode of IFxxx ( i . e . , IFNE for IFEQ ) // and where < l ' > designates the instruction just after // the GOTO _ W . if ( opcode == Opcodes . GOTO ) { newCode . putByte ( 200 ) ; // GOTO _ W } else if ( opcode == Opcodes . JSR ) { newCode . putByte ( 201 ) ; // JSR _ W } else { newCode . putByte ( opcode <= 166 ? ( ( opcode + 1 ) ^ 1 ) - 1 : opcode ^ 1 ) ; newCode . putShort ( 8 ) ; // jump offset newCode . putByte ( 200 ) ; // GOTO _ W // newOffset now computed from start of GOTO _ W newOffset -= 3 ; } newCode . putInt ( newOffset ) ; } else { newCode . putByte ( opcode ) ; newCode . putShort ( newOffset ) ; } u += 3 ; break ; case ClassWriter . LABELW_INSN : label = u + readInt ( b , u + 1 ) ; newOffset = getNewOffset ( allIndexes , allSizes , u , label ) ; newCode . putByte ( opcode ) ; newCode . putInt ( newOffset ) ; u += 5 ; break ; case ClassWriter . TABL_INSN : // skips 0 to 3 padding bytes v = u ; u = u + 4 - ( v & 3 ) ; // reads and copies instruction newCode . putByte ( Opcodes . TABLESWITCH ) ; newCode . putByteArray ( null , 0 , ( 4 - newCode . length % 4 ) % 4 ) ; label = v + readInt ( b , u ) ; u += 4 ; newOffset = getNewOffset ( allIndexes , allSizes , v , label ) ; newCode . putInt ( newOffset ) ; j = readInt ( b , u ) ; u += 4 ; newCode . putInt ( j ) ; j = readInt ( b , u ) - j + 1 ; u += 4 ; newCode . putInt ( readInt ( b , u - 4 ) ) ; for ( ; j > 0 ; -- j ) { label = v + readInt ( b , u ) ; u += 4 ; newOffset = getNewOffset ( allIndexes , allSizes , v , label ) ; newCode . putInt ( newOffset ) ; } break ; case ClassWriter . LOOK_INSN : // skips 0 to 3 padding bytes v = u ; u = u + 4 - ( v & 3 ) ; // reads and copies instruction newCode . putByte ( Opcodes . LOOKUPSWITCH ) ; newCode . putByteArray ( null , 0 , ( 4 - newCode . length % 4 ) % 4 ) ; label = v + readInt ( b , u ) ; u += 4 ; newOffset = getNewOffset ( allIndexes , allSizes , v , label ) ; newCode . putInt ( newOffset ) ; j = readInt ( b , u ) ; u += 4 ; newCode . putInt ( j ) ; for ( ; j > 0 ; -- j ) { newCode . putInt ( readInt ( b , u ) ) ; u += 4 ; label = v + readInt ( b , u ) ; u += 4 ; newOffset = getNewOffset ( allIndexes , allSizes , v , label ) ; newCode . putInt ( newOffset ) ; } break ; case ClassWriter . WIDE_INSN : opcode = b [ u + 1 ] & 0xFF ; if ( opcode == Opcodes . IINC ) { newCode . putByteArray ( b , u , 6 ) ; u += 6 ; } else { newCode . putByteArray ( b , u , 4 ) ; u += 4 ; } break ; case ClassWriter . VAR_INSN : case ClassWriter . SBYTE_INSN : case ClassWriter . LDC_INSN : newCode . putByteArray ( b , u , 2 ) ; u += 2 ; break ; case ClassWriter . SHORT_INSN : case ClassWriter . LDCW_INSN : case ClassWriter . FIELDORMETH_INSN : case ClassWriter . TYPE_INSN : case ClassWriter . IINC_INSN : newCode . putByteArray ( b , u , 3 ) ; u += 3 ; break ; case ClassWriter . ITFMETH_INSN : case ClassWriter . INDYMETH_INSN : newCode . putByteArray ( b , u , 5 ) ; u += 5 ; break ; // case MANA _ INSN : default : newCode . putByteArray ( b , u , 4 ) ; u += 4 ; break ; } } // updates the stack map frame labels if ( compute == FRAMES ) { Label l = labels ; while ( l != null ) { /* * Detects the labels that are just after an IF instruction that * has been resized with the IFNOT GOTO _ W pattern . These labels * are now the target of a jump instruction ( the IFNOT * instruction ) . Note that we need the original label position * here . getNewOffset must therefore never have been called for * this label . */ u = l . position - 3 ; if ( u >= 0 && resize [ u ] ) { l . status |= Label . TARGET ; } getNewOffset ( allIndexes , allSizes , l ) ; l = l . successor ; } // Update the offsets in the uninitialized types for ( i = 0 ; i < cw . typeTable . length ; ++ i ) { Item item = cw . typeTable [ i ] ; if ( item != null && item . type == ClassWriter . TYPE_UNINIT ) { item . intVal = getNewOffset ( allIndexes , allSizes , 0 , item . intVal ) ; } } // The stack map frames are not serialized yet , so we don ' t need // to update them . They will be serialized in visitMaxs . } else if ( frameCount > 0 ) { /* * Resizing an existing stack map frame table is really hard . Not * only the table must be parsed to update the offets , but new * frames may be needed for jump instructions that were inserted by * this method . And updating the offsets or inserting frames can * change the format of the following frames , in case of packed * frames . In practice the whole table must be recomputed . For this * the frames are marked as potentially invalid . This will cause the * whole class to be reread and rewritten with the COMPUTE _ FRAMES * option ( see the ClassWriter . toByteArray method ) . This is not very * efficient but is much easier and requires much less code than any * other method I can think of . */ cw . invalidFrames = true ; } // updates the exception handler block labels Handler h = firstHandler ; while ( h != null ) { getNewOffset ( allIndexes , allSizes , h . start ) ; getNewOffset ( allIndexes , allSizes , h . end ) ; getNewOffset ( allIndexes , allSizes , h . handler ) ; h = h . next ; } // updates the instructions addresses in the // local var and line number tables for ( i = 0 ; i < 2 ; ++ i ) { ByteVector bv = i == 0 ? localVar : localVarType ; if ( bv != null ) { b = bv . data ; u = 0 ; while ( u < bv . length ) { label = readUnsignedShort ( b , u ) ; newOffset = getNewOffset ( allIndexes , allSizes , 0 , label ) ; writeShort ( b , u , newOffset ) ; label += readUnsignedShort ( b , u + 2 ) ; newOffset = getNewOffset ( allIndexes , allSizes , 0 , label ) - newOffset ; writeShort ( b , u + 2 , newOffset ) ; u += 10 ; } } } if ( lineNumber != null ) { b = lineNumber . data ; u = 0 ; while ( u < lineNumber . length ) { writeShort ( b , u , getNewOffset ( allIndexes , allSizes , 0 , readUnsignedShort ( b , u ) ) ) ; u += 4 ; } } // updates the labels of the other attributes Attribute attr = cattrs ; while ( attr != null ) { Label [ ] labels = attr . getLabels ( ) ; if ( labels != null ) { for ( i = labels . length - 1 ; i >= 0 ; -- i ) { getNewOffset ( allIndexes , allSizes , labels [ i ] ) ; } } attr = attr . next ; } // replaces old bytecodes with new ones code = newCode ;
public class CmsEditSiteForm { /** * Reads parameter from form . < p > * @ return a Map with Parameter information . */ private Map < String , String > getParameter ( ) { } }
Map < String , String > ret = new TreeMap < String , String > ( ) ; for ( Component c : m_parameter ) { if ( c instanceof CmsRemovableFormRow < ? > ) { String [ ] parameterStringArray = ( ( String ) ( ( CmsRemovableFormRow < ? extends AbstractField < ? > > ) c ) . getInput ( ) . getValue ( ) ) . split ( "=" ) ; ret . put ( parameterStringArray [ 0 ] , parameterStringArray [ 1 ] ) ; } } return ret ;
public class Particle { /** * Setter to set the current positions to be the local best positions . */ public void setParticleLocalBeststoCurrent ( ) { } }
for ( int i = 0 ; i < locations . length ; i ++ ) { particleLocalBests [ i ] = locations [ i ] ; }
public class LongSummaryStatistics { /** * Combines the state of another { @ code LongSummaryStatistics } into this * one . * @ param other another { @ code LongSummaryStatistics } * @ throws NullPointerException if { @ code other } is null */ public void combine ( LongSummaryStatistics other ) { } }
count += other . count ; sum += other . sum ; min = Math . min ( min , other . min ) ; max = Math . max ( max , other . max ) ;
public class CompositeDateFormat { /** * { @ inheritDoc } */ @ Override public StringBuffer format ( final Date date , final StringBuffer toAppendTo , final FieldPosition fieldPosition ) { } }
return ResqueDateFormatThreadLocal . getInstance ( ) . format ( date , toAppendTo , fieldPosition ) ;
public class DisasterRecoveryConfigurationsInner { /** * Creates or updates a disaster recovery configuration . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param disasterRecoveryConfigurationName The name of the disaster recovery configuration to be created / updated . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < DisasterRecoveryConfigurationInner > createOrUpdateAsync ( String resourceGroupName , String serverName , String disasterRecoveryConfigurationName , final ServiceCallback < DisasterRecoveryConfigurationInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( createOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , disasterRecoveryConfigurationName ) , serviceCallback ) ;
public class NetworkWatchersInner { /** * Updates a network watcher tags . * @ param resourceGroupName The name of the resource group . * @ param networkWatcherName The name of the network watcher . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the NetworkWatcherInner object */ public Observable < ServiceResponse < NetworkWatcherInner > > updateTagsWithServiceResponseAsync ( String resourceGroupName , String networkWatcherName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( networkWatcherName == null ) { throw new IllegalArgumentException ( "Parameter networkWatcherName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } final String apiVersion = "2018-04-01" ; final Map < String , String > tags = null ; TagsObject parameters = new TagsObject ( ) ; parameters . withTags ( null ) ; return service . updateTags ( resourceGroupName , networkWatcherName , this . client . subscriptionId ( ) , apiVersion , this . client . acceptLanguage ( ) , parameters , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < NetworkWatcherInner > > > ( ) { @ Override public Observable < ServiceResponse < NetworkWatcherInner > > call ( Response < ResponseBody > response ) { try { ServiceResponse < NetworkWatcherInner > clientResponse = updateTagsDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class CachedConnectionManagerImpl { /** * { @ inheritDoc } */ public void userTransactionStarted ( ) throws SystemException { } }
Context context = currentContext ( ) ; log . tracef ( "user tx started, context: %s" , context ) ; if ( context != null ) { for ( org . ironjacamar . core . connectionmanager . ConnectionManager cm : context . getConnectionManagers ( ) ) { if ( cm . getTransactionSupport ( ) != TransactionSupportLevel . NoTransaction ) { List < org . ironjacamar . core . connectionmanager . listener . ConnectionListener > cls = context . getConnectionListeners ( cm ) ; if ( ! cls . isEmpty ( ) ) { Map < Credential , org . ironjacamar . core . connectionmanager . listener . ConnectionListener > enlistmentMap = new HashMap < > ( ) ; List < org . ironjacamar . core . connectionmanager . listener . ConnectionListener > cleanup = new ArrayList < > ( ) ; try { for ( org . ironjacamar . core . connectionmanager . listener . ConnectionListener cl : cls ) { if ( enlistmentMap . get ( cl . getCredential ( ) ) == null ) { enlistmentMap . put ( cl . getCredential ( ) , cl ) ; } else { // Merge org . ironjacamar . core . connectionmanager . listener . ConnectionListener existing = enlistmentMap . get ( cl . getCredential ( ) ) ; for ( Object c : cl . getConnections ( ) ) { existing . getManagedConnection ( ) . associateConnection ( c ) ; existing . addConnection ( c ) ; context . switchConnectionListener ( c , cl , existing ) ; } cl . clearConnections ( ) ; cleanup . add ( cl ) ; } } // Enlist ConnectionListener ' s for ( org . ironjacamar . core . connectionmanager . listener . ConnectionListener cl : enlistmentMap . values ( ) ) { if ( Tracer . isEnabled ( ) ) { for ( Object c : cl . getConnections ( ) ) { Tracer . ccmUserTransaction ( cl . getManagedConnectionPool ( ) . getPool ( ) . getConfiguration ( ) . getId ( ) , cl . getManagedConnectionPool ( ) , cl , c , context . toString ( ) ) ; } } cm . transactionStarted ( cl ) ; } // Do cleanup for ( org . ironjacamar . core . connectionmanager . listener . ConnectionListener cl : cleanup ) { context . removeConnectionListener ( cm , cl ) ; cm . returnConnectionListener ( cl , false ) ; } } catch ( Exception e ) { SystemException se = new SystemException ( ) ; se . initCause ( e ) ; throw se ; } } } } }
public class MembershipHandlerImpl { /** * { @ inheritDoc } */ public Membership findMembership ( String id ) throws Exception { } }
Session session = service . getStorageSession ( ) ; try { return findMembership ( session , id ) . membership ; } finally { session . logout ( ) ; }
public class GenerateCompatibilityGraph { /** * Generate Compatibility Graph Nodes * @ return * @ throws IOException */ protected int compatibilityGraphNodes ( ) throws IOException { } }
compGraphNodes . clear ( ) ; List < IAtom > basicAtomVecA = null ; List < IAtom > basicAtomVecB = null ; IAtomContainer reactant = source ; IAtomContainer product = target ; basicAtomVecA = reduceAtomSet ( reactant ) ; basicAtomVecB = reduceAtomSet ( product ) ; List < List < Integer > > labelListMolA = labelAtoms ( reactant ) ; List < List < Integer > > labelListMolB = labelAtoms ( product ) ; int molANodes = 0 ; int countNodes = 1 ; for ( List < Integer > labelA : labelListMolA ) { int molBNodes = 0 ; for ( List < Integer > labelB : labelListMolB ) { if ( labelA . equals ( labelB ) ) { compGraphNodes . add ( reactant . indexOf ( basicAtomVecA . get ( molANodes ) ) ) ; compGraphNodes . add ( product . indexOf ( basicAtomVecB . get ( molBNodes ) ) ) ; compGraphNodes . add ( countNodes ++ ) ; } molBNodes ++ ; } molANodes ++ ; } return 0 ;
import java . util . * ; class Main { /** * This function identifies the first duplicate character in a given string . * If there are no repeated characters , it returns ' None ' as default . * Examples : * find _ first _ duplicate _ character ( ' abcabc ' ) * find _ first _ duplicate _ character ( ' abc ' ) * ' None ' * find _ first _ duplicate _ character ( ' 123123 ' ) * Parameters : * input _ str String : The string in which to find the first duplicate character . * Returns : * String : The first repeated character in the string , or ' None ' if no repetitions are found . */ public static String findFirstDuplicateCharacter ( String inputStr ) { } public static void main ( String [ ] args ) { System . out . println ( findFirstDuplicateCharacter ( "abcabc" ) ) ; System . out . println ( findFirstDuplicateCharacter ( "abc" ) ) ; // " None " System . out . println ( findFirstDuplicateCharacter ( "123123" ) ) ; } }
Map < Character , Integer > characterCountMap = new HashMap < > ( ) ; for ( char c : inputStr . toCharArray ( ) ) { if ( characterCountMap . containsKey ( c ) ) { return Character . toString ( c ) ; } else { characterCountMap . put ( c , 1 ) ; } } return "None" ;
public class LocationHelper { /** * Remove the listener to receive location updates */ @ RequiresPermission ( anyOf = { } }
Manifest . permission . ACCESS_COARSE_LOCATION , Manifest . permission . ACCESS_FINE_LOCATION } ) public synchronized void stopLocalization ( ) { if ( started ) { AlarmUtils . cancelAlarm ( getCancelPendingIntent ( ) ) ; locationManager . removeUpdates ( this ) ; if ( location == null ) { location = locationManager . getLastKnownLocation ( LocationManager . GPS_PROVIDER ) ; if ( location == null ) { location = locationManager . getLastKnownLocation ( LocationManager . NETWORK_PROVIDER ) ; } locationTime = DateUtils . nowMillis ( ) ; } started = false ; LOGGER . info ( "Localization stopped" ) ; }
public class BaseCustomDfuImpl { /** * Writes the Init packet to the characteristic . This method is SYNCHRONOUS and wait until the * { @ link android . bluetooth . BluetoothGattCallback # onCharacteristicWrite ( android . bluetooth . BluetoothGatt , android . bluetooth . BluetoothGattCharacteristic , int ) } * will be called or the device gets disconnected . If connection state will change , * or an error will occur , an exception will be thrown . * @ param characteristic the characteristic to write to . Should be the DFU PACKET . * @ param buffer the init packet as a byte array . * @ param size the init packet size . * @ throws DeviceDisconnectedException Thrown when the device will disconnect in the middle of the transmission . * @ throws DfuException Thrown if DFU error occur . * @ throws UploadAbortedException Thrown if DFU operation was aborted by user . */ private void writeInitPacket ( final BluetoothGattCharacteristic characteristic , final byte [ ] buffer , final int size ) throws DeviceDisconnectedException , DfuException , UploadAbortedException { } }
if ( mAborted ) throw new UploadAbortedException ( ) ; byte [ ] locBuffer = buffer ; if ( buffer . length != size ) { locBuffer = new byte [ size ] ; System . arraycopy ( buffer , 0 , locBuffer , 0 , size ) ; } mReceivedData = null ; mError = 0 ; mInitPacketInProgress = true ; characteristic . setWriteType ( BluetoothGattCharacteristic . WRITE_TYPE_NO_RESPONSE ) ; characteristic . setValue ( locBuffer ) ; logi ( "Sending init packet (Value = " + parse ( locBuffer ) + ")" ) ; mService . sendLogBroadcast ( DfuBaseService . LOG_LEVEL_VERBOSE , "Writing to characteristic " + characteristic . getUuid ( ) ) ; mService . sendLogBroadcast ( DfuBaseService . LOG_LEVEL_DEBUG , "gatt.writeCharacteristic(" + characteristic . getUuid ( ) + ")" ) ; mGatt . writeCharacteristic ( characteristic ) ; // We have to wait for confirmation try { synchronized ( mLock ) { while ( ( mInitPacketInProgress && mConnected && mError == 0 ) || mPaused ) mLock . wait ( ) ; } } catch ( final InterruptedException e ) { loge ( "Sleeping interrupted" , e ) ; } if ( ! mConnected ) throw new DeviceDisconnectedException ( "Unable to write Init DFU Parameters: device disconnected" ) ; if ( mError != 0 ) throw new DfuException ( "Unable to write Init DFU Parameters" , mError ) ;
public class AtomicVariableWidthArray { /** * Gets an array containing all the values in the array . The returned values are not guaranteed to be from the same time instant . < br > If an array is provided and it is the correct length , then * that array will be used as the destination array . * @ param array the provided array * @ return an array containing the values in the array */ public final int [ ] getArray ( int [ ] array ) { } }
if ( array == null || array . length != length ( ) ) { array = new int [ length ( ) ] ; } for ( int i = 0 ; i < length ( ) ; i ++ ) { array [ i ] = get ( i ) ; } return array ;
public class StringHelper { /** * Split the provided string by the provided separator and invoke the consumer * for each matched element . * @ param sSep * The separator to use . May not be < code > null < / code > . * @ param sElements * The concatenated String to convert . May be < code > null < / code > or empty . * @ param aConsumer * The non - < code > null < / code > consumer that is invoked for each exploded * element */ public static void explode ( @ Nonnull final String sSep , @ Nullable final String sElements , @ Nonnull final Consumer < ? super String > aConsumer ) { } }
explode ( sSep , sElements , - 1 , aConsumer ) ;
public class DispatcherServlet { /** * Do HTTP response . * @ param context { @ link RequestContext } */ public static void result ( final RequestContext context ) { } }
final HttpServletResponse response = context . getResponse ( ) ; if ( response . isCommitted ( ) ) { // Response sends redirect or error return ; } AbstractResponseRenderer renderer = context . getRenderer ( ) ; if ( null == renderer ) { renderer = new Http404Renderer ( ) ; } renderer . render ( context ) ;
public class PullRequest { /** * The targets of the pull request , including the source branch and destination branch for the pull request . * @ param pullRequestTargets * The targets of the pull request , including the source branch and destination branch for the pull request . */ public void setPullRequestTargets ( java . util . Collection < PullRequestTarget > pullRequestTargets ) { } }
if ( pullRequestTargets == null ) { this . pullRequestTargets = null ; return ; } this . pullRequestTargets = new java . util . ArrayList < PullRequestTarget > ( pullRequestTargets ) ;
public class RelationalDatabaseSpec { /** * You can specify alternative versions of the same entity by creating * sequential entity specs with the same name , different column specs for * one or more properties or references , and mutually exclusive constraint * specs . One example of where this is useful is inpatient versus clinic * hospital encounters , for which dates might be stored differently . * @ param constantParameterSpecs */ private void setConstantSpecs ( EntitySpec [ ] constantParameterSpecs ) { } }
if ( constantParameterSpecs == null ) { this . constantSpecs = EMPTY_ES_ARR ; } else { this . constantSpecs = constantParameterSpecs . clone ( ) ; }
public class RESTClient { /** * Performs PUT request . * @ param path Request path . * @ throws IOException If error during HTTP connection or entity parsing occurs . * @ throws RESTException If HTTP response code is non OK . */ public Response put ( String path ) throws IOException , RESTException { } }
return put ( path , null , null ) ;
public class JsonBuiltin { /** * - - - PRIVATE PARSER METHODS - - - */ protected static final Object parseNext ( Source src ) throws IOException { } }
skipWhitespaces ( src ) ; switch ( src . ch ) { case '"' : return parseString ( src ) ; case 't' : src . idx += 4 ; return Boolean . TRUE ; case 'f' : src . idx += 5 ; return Boolean . FALSE ; case 'n' : src . idx += 4 ; return null ; case '[' : return parseList ( src ) ; case '{' : return parseMap ( src ) ; case '0' : case '1' : case '2' : case '3' : case '4' : case '5' : case '6' : case '7' : case '8' : case '9' : case '-' : return parseNumber ( src ) ; default : throw new IOException ( "Unable to determine the next character, it is not a string, number, array, or object!" ) ; }
public class TwillModule { /** * Provider method for instantiating { @ link org . apache . twill . yarn . YarnTwillRunnerService } . */ @ Singleton @ Provides private YarnTwillRunnerService provideYarnTwillRunnerService ( CConfiguration configuration , YarnConfiguration yarnConfiguration , LocationFactory locationFactory ) { } }
String zkConnectStr = configuration . get ( Constants . Zookeeper . QUORUM ) + configuration . get ( Constants . CFG_TWILL_ZK_NAMESPACE ) ; // Copy the yarn config and set the max heap ratio . YarnConfiguration yarnConfig = new YarnConfiguration ( yarnConfiguration ) ; yarnConfig . set ( Constants . CFG_TWILL_RESERVED_MEMORY_MB , configuration . get ( Constants . CFG_TWILL_RESERVED_MEMORY_MB ) ) ; YarnTwillRunnerService runner = new YarnTwillRunnerService ( yarnConfig , zkConnectStr , LocationFactories . namespace ( locationFactory , "twill" ) ) ; // Set JVM options based on configuration runner . setJVMOptions ( configuration . get ( Constants . Container . PROGRAM_JVM_OPTS ) ) ; return runner ;
public class ControlMessageFactoryImpl { /** * Create a new , empty ControlAck message * @ return The new ControlAck * @ exception MessageCreateFailedException Thrown if such a message can not be created */ public final ControlAck createNewControlAck ( ) throws MessageCreateFailedException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createNewControlAck" ) ; ControlAck msg = null ; try { msg = new ControlAckImpl ( MfpConstants . CONSTRUCTOR_NO_OP ) ; } catch ( MessageDecodeFailedException e ) { /* No need to FFDC this as JsMsgObject will already have done so */ // No FFDC code needed throw new MessageCreateFailedException ( e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createNewControlAck" ) ; return msg ;
public class ServerImpl { /** * Update the receive connecting state . * @ param client The current client . * @ param buffer The data buffer . * @ param from The id from . * @ param expected The expected client state . * @ throws IOException If error . */ private void receiveConnecting ( ClientSocket client , DataInputStream buffer , byte from , StateConnection expected ) throws IOException { } }
if ( ServerImpl . checkValidity ( client , from , expected ) ) { // Receive the name final byte [ ] name = new byte [ buffer . readByte ( ) ] ; if ( buffer . read ( name ) == - 1 ) { throw new IOException ( "Unable to read client name !" ) ; } client . setName ( new String ( name , NetworkMessage . CHARSET ) ) ; // Send new state client . setState ( StateConnection . CONNECTED ) ; client . getOut ( ) . writeByte ( NetworkMessageSystemId . CONNECTED ) ; client . getOut ( ) . writeByte ( client . getId ( ) ) ; client . getOut ( ) . writeByte ( clientsNumber - 1 ) ; // Send the list of other clients for ( final ClientSocket other : clients . values ( ) ) { if ( other . getId ( ) != from ) { ServerImpl . writeIdAndName ( client , other . getId ( ) , other . getName ( ) ) ; } } // Send message of the day if has if ( messageOfTheDay != null ) { final byte [ ] motd = messageOfTheDay . getBytes ( NetworkMessage . CHARSET ) ; client . getOut ( ) . writeByte ( motd . length ) ; client . getOut ( ) . write ( motd ) ; } // Send client . getOut ( ) . flush ( ) ; }
public class ConfigCheckReport { /** * Use { @ link # getWarnsMap ( ) } instead . */ @ java . lang . Deprecated public java . util . Map < java . lang . String , alluxio . grpc . InconsistentProperties > getWarns ( ) { } }
return getWarnsMap ( ) ;
public class EJBControlImpl { /** * Find the method which has the same signature in the specified class . * @ param controlBeanMethod Method signature find . * @ param ejbInterface Class to search for method signature . * @ return Method from ejbInterface if found , null if not found . */ protected Method findEjbMethod ( Method controlBeanMethod , Class ejbInterface ) { } }
final String cbMethodName = controlBeanMethod . getName ( ) ; final Class cbMethodReturnType = controlBeanMethod . getReturnType ( ) ; final Class [ ] cbMethodParams = controlBeanMethod . getParameterTypes ( ) ; Method [ ] ejbMethods = ejbInterface . getMethods ( ) ; for ( Method m : ejbMethods ) { if ( ! cbMethodName . equals ( m . getName ( ) ) || ! cbMethodReturnType . equals ( m . getReturnType ( ) ) ) { continue ; } Class [ ] params = m . getParameterTypes ( ) ; if ( cbMethodParams . length == params . length ) { int i ; for ( i = 0 ; i < cbMethodParams . length ; i ++ ) { if ( cbMethodParams [ i ] != params [ i ] ) break ; } if ( i == cbMethodParams . length ) return m ; } } return null ;
public class DatabasesInner { /** * Returns the list of databases of the given Kusto cluster . * @ param resourceGroupName The name of the resource group containing the Kusto cluster . * @ param clusterName The name of the Kusto cluster . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the List & lt ; DatabaseInner & gt ; object */ public Observable < ServiceResponse < List < DatabaseInner > > > listByClusterWithServiceResponseAsync ( String resourceGroupName , String clusterName ) { } }
if ( resourceGroupName == null ) { throw new IllegalArgumentException ( "Parameter resourceGroupName is required and cannot be null." ) ; } if ( clusterName == null ) { throw new IllegalArgumentException ( "Parameter clusterName is required and cannot be null." ) ; } if ( this . client . subscriptionId ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.subscriptionId() is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } return service . listByCluster ( resourceGroupName , clusterName , this . client . subscriptionId ( ) , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < List < DatabaseInner > > > > ( ) { @ Override public Observable < ServiceResponse < List < DatabaseInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < DatabaseInner > > result = listByClusterDelegate ( response ) ; List < DatabaseInner > items = null ; if ( result . body ( ) != null ) { items = result . body ( ) . items ( ) ; } ServiceResponse < List < DatabaseInner > > clientResponse = new ServiceResponse < List < DatabaseInner > > ( items , result . response ( ) ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class InvokeOnPartitions { /** * Executes all the operations on the partitions . */ @ SuppressWarnings ( "unchecked" ) < T > ICompletableFuture < Map < Integer , T > > invokeAsync ( ) { } }
assert ! invoked : "already invoked" ; invoked = true ; ensureNotCallingFromPartitionOperationThread ( ) ; invokeOnAllPartitions ( ) ; return future ;
public class GraphUtils { /** * Find the leave vertices in the graph . I . E . Vertices that have no outgoing edges * @ param graph graph to search * @ return mutable snapshot of all leaf vertices . */ public static < V > Set < V > getLeafVertices ( DirectedGraph < V , DefaultEdge > graph ) { } }
Set < V > vertexSet = graph . vertexSet ( ) ; Set < V > leaves = new HashSet < V > ( vertexSet . size ( ) * 2 ) ; for ( V vertex : vertexSet ) { if ( graph . outgoingEdgesOf ( vertex ) . isEmpty ( ) ) { leaves . add ( vertex ) ; } } return leaves ;
public class GenericUtils { /** * Take an input byte [ ] and return the long translation . For example , the * byte [ ] ' 0053 ' would return 53. * @ param array * @ param offset into the array to start at * @ param length number of bytes to review * @ return long * @ throws NumberFormatException ( if the data contains invalid digits ) */ static public long asLongValue ( byte [ ] array , int offset , int length ) { } }
if ( null == array || array . length <= offset ) { return - 1L ; } long longVal = 0 ; long mark = 1 ; int digit ; int i = offset + length - 1 ; // ignore trailing whitespace for ( ; offset <= i ; i -- ) { char c = ( char ) array [ i ] ; if ( BNFHeaders . SPACE != c && BNFHeaders . TAB != c ) { break ; } } for ( ; offset <= i ; i -- ) { digit = array [ i ] - ZERO ; if ( 0 > digit || 9 < digit ) { // stop on any nondigit , if it ' s not a DASH then throw an exc if ( DASH != array [ i ] ) { throw new NumberFormatException ( "Invalid digit: " + array [ i ] ) ; } break ; } longVal += digit * mark ; mark *= 10 ; } // check for negative numbers if ( offset <= i && array [ i ] == DASH ) { longVal = - longVal ; } return longVal ;
public class LBiObjBytePredicateBuilder { /** * One of ways of creating builder . This might be the only way ( considering all _ functional _ builders ) that might be utilize to specify generic params only once . */ @ Nonnull public static < T1 , T2 > LBiObjBytePredicateBuilder < T1 , T2 > biObjBytePredicate ( Consumer < LBiObjBytePredicate < T1 , T2 > > consumer ) { } }
return new LBiObjBytePredicateBuilder ( consumer ) ;
public class CPDefinitionLinkPersistenceImpl { /** * Returns the last cp definition link in the ordered set where CPDefinitionId = & # 63 ; . * @ param CPDefinitionId the cp definition ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching cp definition link * @ throws NoSuchCPDefinitionLinkException if a matching cp definition link could not be found */ @ Override public CPDefinitionLink findByCPDefinitionId_Last ( long CPDefinitionId , OrderByComparator < CPDefinitionLink > orderByComparator ) throws NoSuchCPDefinitionLinkException { } }
CPDefinitionLink cpDefinitionLink = fetchByCPDefinitionId_Last ( CPDefinitionId , orderByComparator ) ; if ( cpDefinitionLink != null ) { return cpDefinitionLink ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPDefinitionId=" ) ; msg . append ( CPDefinitionId ) ; msg . append ( "}" ) ; throw new NoSuchCPDefinitionLinkException ( msg . toString ( ) ) ;
public class FloatingPointBitsConverterUtil { /** * Converts a float value to a sortable int . * @ see # doubleToSortableLong ( double ) */ public static int floatToSortableInt ( float value ) { } }
int bits = Float . floatToIntBits ( value ) ; return bits ^ ( bits >> 31 ) & Integer . MAX_VALUE ;
public class CaffeineCache { /** * { @ inheritDoc } */ @ Override public V get ( K key ) { } }
U . must ( loading , "No loader was specified for this cache. Please specify one or use getIfExists()!" ) ; return loadingCache . get ( key ) ;
public class Context { /** * Create an array with a specified initial length . * @ param scope the scope to create the object in * @ param length the initial length ( JavaScript arrays may have * additional properties added dynamically ) . * @ return the new array object */ public Scriptable newArray ( Scriptable scope , int length ) { } }
NativeArray result = new NativeArray ( length ) ; ScriptRuntime . setBuiltinProtoAndParent ( result , scope , TopLevel . Builtins . Array ) ; return result ;
public class HashFunctions { /** * Fowler - Noll - Vo 64 bit hash ( FNV - 1a ) for bytes array . < br / > < p / > < h3 > Algorithm < / h3 > < p / > * < pre > * hash = offset _ basis * for each octet _ of _ data to be hashed * hash = hash xor octet _ of _ data * hash = hash * FNV _ prime * return hash < / pre > * < h3 > Links < / h3 > < a href = " http : / / www . isthe . com / chongo / tech / comp / fnv / " > http : / / www . isthe . com / chongo / tech / comp / fnv / < / a > < br / > * < a href = " http : / / en . wikipedia . org / wiki / Fowler % E2%80%93Noll % E2%80%93Vo _ hash _ function " > http : / / en . wikipedia . org / wiki / Fowler % E2%80%93Noll % E2%80%93Vo _ hash _ function < / a > < br / > * @ param bytes bytes array to hash * @ return hash 64 bit hash */ public static long FVN64hash ( byte [ ] bytes ) { } }
long hash = FNV_BASIS ; for ( int i = 0 ; i < bytes . length ; i ++ ) { hash ^= 0xFF & bytes [ i ] ; hash *= FNV_PRIME_64 ; } return hash ;
public class InternalXtextParser { /** * InternalXtext . g : 3407:1 : entryRuleWildcard returns [ EObject current = null ] : iv _ ruleWildcard = ruleWildcard EOF ; */ public final EObject entryRuleWildcard ( ) throws RecognitionException { } }
EObject current = null ; EObject iv_ruleWildcard = null ; try { // InternalXtext . g : 3407:49 : ( iv _ ruleWildcard = ruleWildcard EOF ) // InternalXtext . g : 3408:2 : iv _ ruleWildcard = ruleWildcard EOF { newCompositeNode ( grammarAccess . getWildcardRule ( ) ) ; pushFollow ( FollowSets000 . FOLLOW_1 ) ; iv_ruleWildcard = ruleWildcard ( ) ; state . _fsp -- ; current = iv_ruleWildcard ; match ( input , EOF , FollowSets000 . FOLLOW_2 ) ; } } catch ( RecognitionException re ) { recover ( input , re ) ; appendSkippedTokens ( ) ; } finally { } return current ;
public class SecurityDomain { /** * Sets the System environment to support the Security Domain ' s settings . This is used for protocols which only read their crypto * settings via environment variables . * < br > Also see { @ link # restoreSystemEnvironment ( ) } */ public void setDomainEnvironment ( ) { } }
systemProperties = System . getProperties ( ) ; if ( System . getProperty ( SET_DOMAIN_ENVIRONMENT ) != null ) { logger . debug ( "Setting System environment properties to Security Domain values" ) ; for ( int i = 0 ; i < ENVNAMES . length ; i ++ ) { setOrClearSystemProperties ( ENVNAMES [ i ] , domainProperties ) ; } }
public class ManagedCustomerPage { /** * Gets the links value for this ManagedCustomerPage . * @ return links * Links between manager and client customers . */ public com . google . api . ads . adwords . axis . v201809 . mcm . ManagedCustomerLink [ ] getLinks ( ) { } }
return links ;
public class VorbisAudioFileReader { /** * Return the AudioInputStream from the given File . * @ return * @ throws javax . sound . sampled . UnsupportedAudioFileException * @ throws java . io . IOException */ @ Override public AudioInputStream getAudioInputStream ( File file ) throws UnsupportedAudioFileException , IOException { } }
LOG . log ( Level . FINE , "getAudioInputStream(File file)" ) ; InputStream inputStream = new FileInputStream ( file ) ; try { return getAudioInputStream ( inputStream ) ; } catch ( UnsupportedAudioFileException | IOException e ) { inputStream . close ( ) ; throw e ; }
public class Handler { /** * If there is no matching path , the OrchestrationHandler is going to try to start the defaultHandlers . * If there are default handlers defined , store the chain id within the exchange . * Otherwise return false . * @ param httpServerExchange * The current requests server exchange . * @ return true if a handler has been defined for the given path . */ public static boolean startDefaultHandlers ( HttpServerExchange httpServerExchange ) { } }
// check if defaultHandlers is empty if ( defaultHandlers != null && defaultHandlers . size ( ) > 0 ) { httpServerExchange . putAttachment ( CHAIN_ID , "defaultHandlers" ) ; httpServerExchange . putAttachment ( CHAIN_SEQ , 0 ) ; return true ; } return false ;
public class QuantilesCallback { /** * Returns the buckets attribute and sample them . */ public static BucketsSample sampleBuckets ( Stopwatch stopwatch ) { } }
final Buckets buckets = getBuckets ( stopwatch ) ; return buckets == null ? null : buckets . sample ( ) ;
public class SVGParser { /** * Hue ( degrees ) , saturation [ 0 , 100 ] , lightness [ 0 , 100] */ private static int hslToRgb ( float hue , float sat , float light ) { } }
hue = ( hue >= 0f ) ? hue % 360f : ( hue % 360f ) + 360f ; // positive modulo ( ie . - 10 = > 350) hue /= 60f ; // [0 , 360 ] - > [ 0 , 6] sat /= 100 ; // [0 , 100 ] - > [ 0 , 1] light /= 100 ; // [0 , 100 ] - > [ 0 , 1] sat = ( sat < 0f ) ? 0f : ( sat > 1f ) ? 1f : sat ; light = ( light < 0f ) ? 0f : ( light > 1f ) ? 1f : light ; float t1 , t2 ; if ( light <= 0.5f ) { t2 = light * ( sat + 1f ) ; } else { t2 = light + sat - ( light * sat ) ; } t1 = light * 2f - t2 ; float r = hueToRgb ( t1 , t2 , hue + 2f ) ; float g = hueToRgb ( t1 , t2 , hue ) ; float b = hueToRgb ( t1 , t2 , hue - 2f ) ; return clamp255 ( r * 256f ) << 16 | clamp255 ( g * 256f ) << 8 | clamp255 ( b * 256f ) ;
public class InternalXbaseWithAnnotationsParser { /** * InternalXbaseWithAnnotations . g : 92:1 : ruleXAnnotationElementValuePair : ( ( rule _ _ XAnnotationElementValuePair _ _ Group _ _ 0 ) ) ; */ public final void ruleXAnnotationElementValuePair ( ) throws RecognitionException { } }
int stackSize = keepStackSize ( ) ; try { // InternalXbaseWithAnnotations . g : 96:2 : ( ( ( rule _ _ XAnnotationElementValuePair _ _ Group _ _ 0 ) ) ) // InternalXbaseWithAnnotations . g : 97:2 : ( ( rule _ _ XAnnotationElementValuePair _ _ Group _ _ 0 ) ) { // InternalXbaseWithAnnotations . g : 97:2 : ( ( rule _ _ XAnnotationElementValuePair _ _ Group _ _ 0 ) ) // InternalXbaseWithAnnotations . g : 98:3 : ( rule _ _ XAnnotationElementValuePair _ _ Group _ _ 0 ) { if ( state . backtracking == 0 ) { before ( grammarAccess . getXAnnotationElementValuePairAccess ( ) . getGroup ( ) ) ; } // InternalXbaseWithAnnotations . g : 99:3 : ( rule _ _ XAnnotationElementValuePair _ _ Group _ _ 0 ) // InternalXbaseWithAnnotations . g : 99:4 : rule _ _ XAnnotationElementValuePair _ _ Group _ _ 0 { pushFollow ( FOLLOW_2 ) ; rule__XAnnotationElementValuePair__Group__0 ( ) ; state . _fsp -- ; if ( state . failed ) return ; } if ( state . backtracking == 0 ) { after ( grammarAccess . getXAnnotationElementValuePairAccess ( ) . getGroup ( ) ) ; } } } } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; } finally { restoreStackSize ( stackSize ) ; } return ;
public class OWLDataAllValuesFromImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the * { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } . * @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the * object ' s content from * @ param instance the object instance to deserialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the deserialization operation is not * successful */ @ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLDataAllValuesFromImpl instance ) throws SerializationException { } }
deserialize ( streamReader , instance ) ;
public class RestTemplateBuilder { /** * Set a root URL that should be applied to each request that starts with { @ code ' / ' } . * See { @ link RootUriTemplateHandler } for details . * @ param rootUri the root URI or { @ code null } * @ return a new builder instance */ public RestTemplateBuilder rootUri ( String rootUri ) { } }
return new RestTemplateBuilder ( this . detectRequestFactory , rootUri , this . messageConverters , this . requestFactorySupplier , this . uriTemplateHandler , this . errorHandler , this . basicAuthentication , this . restTemplateCustomizers , this . requestFactoryCustomizer , this . interceptors ) ;
public class CommandLineCompiler { /** * Adds command - line arguments for include directories . * If relativeArgs is not null will add corresponding relative paths * include switches to that vector ( for use in building a configuration * identifier that is consistent between machines ) . * @ param baseDirPath * Base directory path . * @ param includeDirs * Array of include directory paths * @ param args * Vector of command line arguments used to execute the task * @ param relativeArgs * Vector of command line arguments used to build the * configuration identifier */ protected void addIncludes ( final String baseDirPath , final File [ ] includeDirs , final Vector < String > args , final Vector < String > relativeArgs , final StringBuffer includePathId , final boolean isSystem ) { } }
for ( final File includeDir : includeDirs ) { args . addElement ( getIncludeDirSwitch ( includeDir . getAbsolutePath ( ) , isSystem ) ) ; if ( relativeArgs != null ) { final String relative = CUtil . getRelativePath ( baseDirPath , includeDir ) ; relativeArgs . addElement ( getIncludeDirSwitch ( relative , isSystem ) ) ; if ( includePathId != null ) { if ( includePathId . length ( ) == 0 ) { includePathId . append ( "/I" ) ; } else { includePathId . append ( " /I" ) ; } includePathId . append ( relative ) ; } } }
public class Log4j2Log { /** * 打印日志 < br > * 此方法用于兼容底层日志实现 , 通过传入当前包装类名 , 以解决打印日志中行号错误问题 * @ param fqcn 完全限定类名 ( Fully Qualified Class Name ) , 用于纠正定位错误行号 * @ param level 日志级别 , 使用org . apache . logging . log4j . Level中的常量 * @ param t 异常 * @ param msgTemplate 消息模板 * @ param arguments 参数 * @ return 是否支持 LocationAwareLogger对象 , 如果不支持需要日志方法调用被包装类的相应方法 */ private boolean logIfEnabled ( String fqcn , Level level , Throwable t , String msgTemplate , Object ... arguments ) { } }
if ( this . logger instanceof AbstractLogger ) { ( ( AbstractLogger ) this . logger ) . logIfEnabled ( fqcn , level , null , StrUtil . format ( msgTemplate , arguments ) , t ) ; return true ; } else { return false ; }
public class FA { /** * Inserts a new states to the automaton . New states are * allocated at the end . * @ param label label for the state created . */ public int add ( Object label ) { } }
ensureCapacity ( used ) ; states [ used ] = new State ( label ) ; return used ++ ;
public class InputProcessingConfigurationUpdateMarshaller { /** * Marshall the given parameter object . */ public void marshall ( InputProcessingConfigurationUpdate inputProcessingConfigurationUpdate , ProtocolMarshaller protocolMarshaller ) { } }
if ( inputProcessingConfigurationUpdate == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( inputProcessingConfigurationUpdate . getInputLambdaProcessorUpdate ( ) , INPUTLAMBDAPROCESSORUPDATE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class PostLogin { /** * { @ inheritDoc } */ @ Override public String processAllReturningText ( final String s ) { } }
XmlElement root = XmlConverter . getRootElement ( s ) ; findContent ( root ) ; return s ;
public class DefaultGroovyMethods { /** * Allows a Map to be iterated through using a closure . If the * closure takes one parameter then it will be passed the Map . Entry * otherwise if the closure takes two parameters then it will be * passed the key and the value . * < pre class = " groovyTestCase " > def result = " " * [ a : 1 , b : 3 ] . each { key , value { @ code - > } result + = " $ key $ value " } * assert result = = " a1b3 " < / pre > * < pre class = " groovyTestCase " > def result = " " * [ a : 1 , b : 3 ] . each { entry { @ code - > } result + = entry } * assert result = = " a = 1b = 3 " < / pre > * In general , the order in which the map contents are processed * cannot be guaranteed . In practise , specialized forms of Map , * e . g . a TreeMap will have its contents processed according to * the natural ordering of the map . * @ param self the map over which we iterate * @ param closure the 1 or 2 arg closure applied on each entry of the map * @ return returns the self parameter * @ since 1.5.0 */ public static < K , V > Map < K , V > each ( Map < K , V > self , @ ClosureParams ( MapEntryOrKeyValue . class ) Closure closure ) { } }
for ( Map . Entry entry : self . entrySet ( ) ) { callClosureForMapEntry ( closure , entry ) ; } return self ;
public class SessionListener { /** * { @ inheritDoc } */ @ Override public void sessionWillPassivate ( HttpSessionEvent event ) { } }
if ( ! instanceEnabled ) { return ; } // pour getSessionCount SESSION_COUNT . decrementAndGet ( ) ; // pour invalidateAllSession removeSession ( event . getSession ( ) ) ;
public class CommerceCountryPersistenceImpl { /** * Returns a range of all the commerce countries . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CommerceCountryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce countries * @ param end the upper bound of the range of commerce countries ( not inclusive ) * @ return the range of commerce countries */ @ Override public List < CommerceCountry > findAll ( int start , int end ) { } }
return findAll ( start , end , null ) ;
public class BackendProviderResolver { /** * Find correct backend provider for the current context classloader . If no context classloader is available , a * fallback with the classloader of this resolver class is taken * @ return A bunch of TraceeBackendProvider registered and available in the current classloader */ public Set < TraceeBackendProvider > getBackendProviders ( ) { } }
// Create a working copy of Cache . Reference is updated upon cache update . final Map < ClassLoader , Set < TraceeBackendProvider > > cacheCopy = providersPerClassloader ; // Try to determine TraceeBackendProvider by context classloader . Fallback : use classloader of class . final Set < TraceeBackendProvider > providerFromContextClassLoader = getTraceeProviderFromClassloader ( cacheCopy , GetClassLoader . fromContext ( ) ) ; if ( ! providerFromContextClassLoader . isEmpty ( ) ) { return providerFromContextClassLoader ; } else { return getTraceeProviderFromClassloader ( cacheCopy , GetClassLoader . fromClass ( BackendProviderResolver . class ) ) ; }
public class AbstractDatabaseEngine { /** * Executes the given statement . * If the statement for some reason fails to execute , the error is logged * but no exception is thrown . * @ param statement The statement . */ protected void executeUpdateSilently ( String statement ) { } }
logger . trace ( statement ) ; try ( Statement alter = conn . createStatement ( ) ) { alter . execute ( statement ) ; } catch ( final SQLException e ) { logger . debug ( "Could not execute {}." , statement , e ) ; }
public class StringGroovyMethods { /** * Support the range subscript operator for CharSequence * @ param text a CharSequence * @ param range a Range * @ return the subsequence CharSequence * @ since 1.0 */ public static CharSequence getAt ( CharSequence text , Range range ) { } }
RangeInfo info = subListBorders ( text . length ( ) , range ) ; CharSequence sequence = text . subSequence ( info . from , info . to ) ; return info . reverse ? reverse ( sequence ) : sequence ;
public class AmazonECSClient { /** * Describes the task sets in the specified cluster and service . This is used when a service uses the * < code > EXTERNAL < / code > deployment controller type . For more information , see < a * href = " http : / / docs . aws . amazon . com / AmazonECS / latest / developerguide / deployment - types . html " > Amazon ECS Deployment * Types < / a > in the < i > Amazon Elastic Container Service Developer Guide < / i > . * @ param describeTaskSetsRequest * @ return Result of the DescribeTaskSets operation returned by the service . * @ throws ServerException * These errors are usually caused by a server issue . * @ throws ClientException * These errors are usually caused by a client action , such as using an action or resource on behalf of a * user that doesn ' t have permissions to use the action or resource , or specifying an identifier that is not * valid . * @ throws InvalidParameterException * The specified parameter is invalid . Review the available parameters for the API request . * @ throws ClusterNotFoundException * The specified cluster could not be found . You can view your available clusters with < a > ListClusters < / a > . * Amazon ECS clusters are Region - specific . * @ throws UnsupportedFeatureException * The specified task is not supported in this Region . * @ throws AccessDeniedException * You do not have authorization to perform the requested action . * @ throws ServiceNotFoundException * The specified service could not be found . You can view your available services with < a > ListServices < / a > . * Amazon ECS services are cluster - specific and Region - specific . * @ throws ServiceNotActiveException * The specified service is not active . You can ' t update a service that is inactive . If you have previously * deleted a service , you can re - create it with < a > CreateService < / a > . * @ sample AmazonECS . DescribeTaskSets * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ecs - 2014-11-13 / DescribeTaskSets " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DescribeTaskSetsResult describeTaskSets ( DescribeTaskSetsRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeTaskSets ( request ) ;
public class ContextUtils { /** * Get the file points to an external movies directory . * @ param context the context . * @ return the { @ link java . io . File } . */ @ TargetApi ( Build . VERSION_CODES . FROYO ) public static File getExternalFilesDirForMovies ( Context context ) { } }
return context . getExternalFilesDir ( Environment . DIRECTORY_MOVIES ) ;
public class ExcelDataProviderImpl { /** * Gets data from Excel sheet by applying the given filter . * @ param dataFilter * an implementation class of { @ link DataProviderFilter } * @ return An iterator over a collection of Object Array to be used with TestNG DataProvider */ @ Override public Iterator < Object [ ] > getDataByFilter ( DataProviderFilter dataFilter ) { } }
logger . entering ( dataFilter ) ; List < Object [ ] > objs = new ArrayList < > ( ) ; Field [ ] fields = resource . getCls ( ) . getDeclaredFields ( ) ; // Extracting number of rows of data to read // Notice that numRows is returning the actual number of non - blank rows . // Thus if there are blank rows in the sheet then we will miss some last rows of data . List < Row > rowToBeRead = excelReader . getAllExcelRows ( resource . getCls ( ) . getSimpleName ( ) , false ) ; List < String > excelHeaderRow = getHeaderRowContents ( resource . getCls ( ) . getSimpleName ( ) , fields . length ) ; for ( Row row : rowToBeRead ) { List < String > excelRowData = excelReader . getRowContents ( row , fields . length ) ; Map < String , String > headerRowDataMap = prepareHeaderRowDataMap ( excelHeaderRow , excelRowData ) ; if ( excelRowData . size ( ) != 0 ) { try { Object temp = prepareObject ( getObject ( ) , fields , excelRowData , headerRowDataMap ) ; if ( dataFilter . filter ( temp ) ) { objs . add ( new Object [ ] { temp } ) ; } } catch ( IllegalAccessException e ) { throw new DataProviderException ( "Unable to create instance of type '" + resource . getCls ( ) . getName ( ) + "'" , e ) ; } } } logger . exiting ( objs . iterator ( ) ) ; return objs . iterator ( ) ;
public class ZWaveController { /** * Send Request Node info message to the controller . * @ param nodeId the nodeId of the node to identify * @ throws SerialInterfaceException when timing out or getting an invalid response . */ public void requestNodeInfo ( int nodeId ) { } }
SerialMessage newMessage = new SerialMessage ( nodeId , SerialMessage . SerialMessageClass . RequestNodeInfo , SerialMessage . SerialMessageType . Request , SerialMessage . SerialMessageClass . ApplicationUpdate , SerialMessage . SerialMessagePriority . High ) ; byte [ ] newPayload = { ( byte ) nodeId } ; newMessage . setMessagePayload ( newPayload ) ; this . enqueue ( newMessage ) ;
public class NioOutboundPipeline { /** * Tries to unschedule this pipeline . * It will only be unscheduled if : * - there are no pending frames . * If the outputBuffer is dirty then it will register itself for an OP _ WRITE since we are interested in knowing * if there is more space in the socket output buffer . * If the outputBuffer is not dirty , then it will unregister itself from an OP _ WRITE since it isn ' t interested * in space in the socket outputBuffer . * This call is only made by the owning IO thread . */ private void unschedule ( ) throws IOException { } }
// since everything is written , we are not interested anymore in write - events , so lets unsubscribe unregisterOp ( OP_WRITE ) ; // So the outputBuffer is empty , so we are going to unschedule the pipeline . scheduled . set ( false ) ; if ( writeQueue . isEmpty ( ) && priorityWriteQueue . isEmpty ( ) ) { // there are no remaining frames , so we are done . return ; } // So there are frames , but we just unscheduled ourselves . If we don ' t try to reschedule , then these // Frames are at risk not to be send . if ( ! scheduled . compareAndSet ( false , true ) ) { // someone else managed to schedule this OutboundHandler , so we are done . return ; } // We managed to reschedule . So lets add ourselves to the owner so we are processed again . // We don ' t need to call wakeup because the current thread is the IO - thread and the selectionQueue will be processed // till it is empty . So it will also pick up tasks that are added while it is processing the selectionQueue . // owner can ' t be null because this method is made by the owning io thread . owner ( ) . addTask ( this ) ;
public class Storage { /** * Storage objects sort such that primaries sort first , mirrors after . */ @ Override public int compareTo ( Storage o ) { } }
return ComparisonChain . start ( ) . compareTrueFirst ( isConsistent ( ) , o . isConsistent ( ) ) // Primaries * must * be consistent . . compareTrueFirst ( _masterPrimary , o . _masterPrimary ) // Master primary sorts first . compare ( o . getPromotionId ( ) , getPromotionId ( ) , TimeUUIDs . ordering ( ) . nullsLast ( ) ) // Facade primary sorts first . compare ( _uuid , o . _uuid ) // Break ties in a way that ' s compatible with equals ( ) . result ( ) ;
public class CommerceRegionPersistenceImpl { /** * Returns the last commerce region in the ordered set where commerceCountryId = & # 63 ; . * @ param commerceCountryId the commerce country ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce region , or < code > null < / code > if a matching commerce region could not be found */ @ Override public CommerceRegion fetchByCommerceCountryId_Last ( long commerceCountryId , OrderByComparator < CommerceRegion > orderByComparator ) { } }
int count = countByCommerceCountryId ( commerceCountryId ) ; if ( count == 0 ) { return null ; } List < CommerceRegion > list = findByCommerceCountryId ( commerceCountryId , count - 1 , count , orderByComparator ) ; if ( ! list . isEmpty ( ) ) { return list . get ( 0 ) ; } return null ;
public class Main { /** * Runs the print . * @ param args the cli arguments * @ throws Exception */ @ VisibleForTesting public static void runMain ( final String [ ] args ) throws Exception { } }
final CliHelpDefinition helpCli = new CliHelpDefinition ( ) ; try { Args . parse ( helpCli , args ) ; if ( helpCli . help ) { printUsage ( 0 ) ; return ; } } catch ( IllegalArgumentException invalidOption ) { // Ignore because it is probably one of the non - help options . } final CliDefinition cli = new CliDefinition ( ) ; try { List < String > unusedArguments = Args . parse ( cli , args ) ; if ( ! unusedArguments . isEmpty ( ) ) { System . out . println ( "\n\nThe following arguments are not recognized: " + unusedArguments ) ; printUsage ( 1 ) ; return ; } } catch ( IllegalArgumentException invalidOption ) { System . out . println ( "\n\n" + invalidOption . getMessage ( ) ) ; printUsage ( 1 ) ; return ; } configureLogs ( cli . verbose ) ; AbstractXmlApplicationContext context = new ClassPathXmlApplicationContext ( DEFAULT_SPRING_CONTEXT ) ; if ( cli . springConfig != null ) { context = new ClassPathXmlApplicationContext ( DEFAULT_SPRING_CONTEXT , cli . springConfig ) ; } try { context . getBean ( Main . class ) . run ( cli ) ; } finally { context . close ( ) ; }
public class CreateRouteResult { /** * The request models for the route . * @ param requestModels * The request models for the route . * @ return Returns a reference to this object so that method calls can be chained together . */ public CreateRouteResult withRequestModels ( java . util . Map < String , String > requestModels ) { } }
setRequestModels ( requestModels ) ; return this ;
public class PersonBuilderImpl { /** * { @ inheritDoc } */ @ Override public Attribute createPersonEvent ( final Person person , final String type , final String dateString ) { } }
final Attribute event = gedObjectBuilder . createAttribute ( person , type ) ; event . insert ( new Date ( event , dateString ) ) ; return event ;
public class BigDecimalLiteralDouble { /** * accept multiple unary prefixes . */ private static boolean floatingPointArgument ( ExpressionTree tree ) { } }
if ( tree . getKind ( ) == Kind . UNARY_PLUS || tree . getKind ( ) == Kind . UNARY_MINUS ) { tree = ( ( UnaryTree ) tree ) . getExpression ( ) ; } return tree . getKind ( ) == Kind . DOUBLE_LITERAL || tree . getKind ( ) == Kind . FLOAT_LITERAL ;
public class CdnClient { /** * Post purge request * @ param url The URL to be purged . * @ return Result of the purge operation returned by the service . */ public PurgeResponse purge ( String url ) { } }
return purge ( new PurgeRequest ( ) . addTask ( new PurgeTask ( ) . withUrl ( url ) ) ) ;
public class ToStream { /** * Receive notification of ignorable whitespace in element content . * Not sure how to get this invoked quite yet . * @ param ch The characters from the XML document . * @ param start The start position in the array . * @ param length The number of characters to read from the array . * @ throws org . xml . sax . SAXException Any SAX exception , possibly * wrapping another exception . * @ see # characters * @ throws org . xml . sax . SAXException */ public void ignorableWhitespace ( char ch [ ] , int start , int length ) throws org . xml . sax . SAXException { } }
if ( 0 == length ) return ; characters ( ch , start , length ) ;
public class CmsSimpleDecoratedPanel { /** * Internal helper method for initializing the layout of this widget . < p > */ private void init ( ) { } }
int decorationWidth = getDecorationWidth ( ) ; m_decorationBox . setWidth ( decorationWidth + "px" ) ; m_primary . getElement ( ) . getStyle ( ) . setMarginLeft ( decorationWidth , Style . Unit . PX ) ;
public class ResolveStageBaseImpl { /** * { @ inheritDoc } * @ see org . jboss . shrinkwrap . resolver . api . ResolveStage # addDependencies ( java . util . Collection ) */ @ Override public RESOLVESTAGETYPE addDependencies ( final Collection < MavenDependency > dependencies ) throws IllegalArgumentException { } }
if ( dependencies == null ) { throw new IllegalArgumentException ( "dependencies must be provided" ) ; } for ( final MavenDependency dep : dependencies ) { this . addDependency ( dep ) ; } return this . covarientReturn ( ) ;
public class LofResultPrinter { /** * { @ inheritDoc } */ @ Override public void notifyResult ( LofResult result ) { } }
// LOFスコアが閾値以上の場合ログ出力を行う if ( this . threshold < result . getLofScore ( ) ) { logger . info ( "LOF Score thredhold over. LOF Score=" + result . getLofScore ( ) + ", Data=" + Arrays . toString ( result . getLofPoint ( ) . getDataPoint ( ) ) ) ; }
public class LabelOperationMetadata { /** * < code > * . google . cloud . datalabeling . v1beta1 . LabelVideoObjectDetectionOperationMetadata video _ object _ detection _ details = 6; * < / code > */ public com . google . cloud . datalabeling . v1beta1 . LabelVideoObjectDetectionOperationMetadata getVideoObjectDetectionDetails ( ) { } }
if ( detailsCase_ == 6 ) { return ( com . google . cloud . datalabeling . v1beta1 . LabelVideoObjectDetectionOperationMetadata ) details_ ; } return com . google . cloud . datalabeling . v1beta1 . LabelVideoObjectDetectionOperationMetadata . getDefaultInstance ( ) ;
public class UnifiedClassLoader { /** * Spring to register the given ClassFileTransformer on this ClassLoader */ @ Override public boolean addTransformer ( final ClassFileTransformer cft ) { } }
boolean added = false ; for ( ClassLoader loader : followOnClassLoaders ) { if ( loader instanceof SpringLoader ) { added |= ( ( SpringLoader ) loader ) . addTransformer ( cft ) ; } } return added ;
public class JNPM { /** * map of tag = > version */ protected IPromise < JsonObject > getDistributions ( String module ) { } }
Promise res = new Promise ( ) ; // http : / / registry . npmjs . org / - / package / react / dist - tags http . getContent ( config . getRepo ( ) + "/-/package/" + module + "/dist-tags" ) . then ( ( cont , err ) -> { if ( cont != null ) { JsonObject parse = Json . parse ( cont ) . asObject ( ) ; res . resolve ( parse ) ; } else { res . reject ( err ) ; } } ) ; return res ;
public class AbstractItemLink { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . msgstore . deliverydelay . DeliveryDelayable # deliveryDelayableIsInStore ( ) */ @ Override public boolean deliveryDelayableIsInStore ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "deliveryDelayableIsInStore" ) ; boolean isInStore = isInStore ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "deliveryDelayableIsInStore" , isInStore ) ; return isInStore ;
public class ServerStoreCompatibility { /** * Ensure compatibility of a client { @ link ServerStoreConfiguration } with an existing * server - side { @ code ServerStoreConfiguration } . * @ param serverConfiguration the existing server - side { @ code ServerStoreConfiguration } * @ param clientConfiguration the desired client - side { @ code ServerStoreConfiguration } * @ throws InvalidServerStoreConfigurationException if { @ code clientConfiguration } is not compatible with * { @ code serverConfiguration } */ public void verify ( ServerStoreConfiguration serverConfiguration , ServerStoreConfiguration clientConfiguration ) throws InvalidServerStoreConfigurationException { } }
StringBuilder sb = new StringBuilder ( "Existing ServerStore configuration is not compatible with the desired configuration: " ) ; if ( ! serverConfiguration . isCompatible ( clientConfiguration , sb ) ) { throw new InvalidServerStoreConfigurationException ( sb . toString ( ) ) ; }
public class Revision { /** * Compress time from long - to the integer - format : reduce the resolution to * " seconds " and zero time to 1th January 2000 * @ param date * date / time in the long format * @ return date / time in the compressed integer format */ public static int compressTime ( long date ) { } }
Long lowResolutionDate = new Long ( ( date - TIME_ZERO ) / MS_IN_SEC ) ; return lowResolutionDate . intValue ( ) ;
public class PeepholeReplaceKnownMethods { /** * Try to fold . split ( ) calls on strings */ private Node tryFoldStringSplit ( Node n , Node stringNode , Node arg1 ) { } }
if ( late ) { return n ; } checkArgument ( n . isCall ( ) ) ; checkArgument ( stringNode . isString ( ) ) ; String separator = null ; String stringValue = stringNode . getString ( ) ; // Maximum number of possible splits int limit = stringValue . length ( ) + 1 ; if ( arg1 != null ) { if ( arg1 . isString ( ) ) { separator = arg1 . getString ( ) ; } else if ( ! arg1 . isNull ( ) ) { return n ; } Node arg2 = arg1 . getNext ( ) ; if ( arg2 != null ) { if ( arg2 . isNumber ( ) ) { limit = Math . min ( ( int ) arg2 . getDouble ( ) , limit ) ; if ( limit < 0 ) { return n ; } } else { return n ; } } } // Split the string and convert the returned array into JS nodes String [ ] stringArray = jsSplit ( stringValue , separator , limit ) ; Node arrayOfStrings = IR . arraylit ( ) ; for ( String element : stringArray ) { arrayOfStrings . addChildToBack ( IR . string ( element ) . srcref ( stringNode ) ) ; } Node parent = n . getParent ( ) ; parent . replaceChild ( n , arrayOfStrings ) ; reportChangeToEnclosingScope ( parent ) ; return arrayOfStrings ;
public class PermissionHelper { /** * Similar to { @ link # checkPermission ( ) } , but it allows the AppInfoDialog will be enabled or disabled for this call . * @ param showAppInfoDialogEnabled true if the AppInfoDialog is enabled , false to disabled it . */ public boolean checkPermission ( boolean showAppInfoDialogEnabled ) { } }
boolean hasPermission ; if ( permissionRationaleMessageResId != 0 ) { hasPermission = checkPermission ( permissionDelegate , getPermissionRationaleTitleResId ( ) , permissionRationaleMessageResId , permission , permissionRequestCode ) ; } else { hasPermission = checkPermission ( permissionDelegate , permission , permissionRequestCode ) ; } if ( ! hasPermission && showAppInfoDialogEnabled ) { previouslyShouldShowRequestPermissionRationale = permissionDelegate . shouldShowRequestPermissionRationale ( permission ) ; } else { previouslyShouldShowRequestPermissionRationale = null ; } return hasPermission ;
public class XsdEmitter { /** * Create an XML schema complex type . We want to use Named complex types so * we add them to the XSD directly . We add complex types before their * children because its nicer for the XSD layout to list roots before leafs . * Redefined and redefining elements are grouped into an XML Schema choice . * A choice is created when an element marked as isRedefined is encountered * and it groups all subsequent elements marked as redefines until a non * redefining element is found . * @ param xsdDataItem COBOL data item decorated with XSD attributes * @ return a new complex type */ public XmlSchemaComplexType createXmlSchemaComplexType ( final XsdDataItem xsdDataItem ) { } }
// All complex types are root complex types XmlSchemaComplexType xmlSchemaComplexType = new XmlSchemaComplexType ( getXsd ( ) , true ) ; XmlSchemaChoice xmlSchemaChoice = null ; XmlSchemaSequence xmlSchemaSequence = new XmlSchemaSequence ( ) ; for ( XsdDataItem child : xsdDataItem . getChildren ( ) ) { XmlSchemaElement xmlSchemaElement = createXmlSchemaElement ( child ) ; if ( xmlSchemaElement != null ) { if ( child . isRedefined ( ) ) { xmlSchemaChoice = new XmlSchemaChoice ( ) ; xmlSchemaSequence . getItems ( ) . add ( xmlSchemaChoice ) ; xmlSchemaChoice . getItems ( ) . add ( xmlSchemaElement ) ; } else if ( child . getRedefines ( ) != null ) { if ( xmlSchemaChoice == null ) { _log . warn ( "Item " + child . getCobolName ( ) + " REDEFINES " + child . getRedefines ( ) + " but redefined item is not a sibling" ) ; xmlSchemaSequence . getItems ( ) . add ( xmlSchemaElement ) ; } else { xmlSchemaChoice . getItems ( ) . add ( xmlSchemaElement ) ; } } else { xmlSchemaSequence . getItems ( ) . add ( xmlSchemaElement ) ; } } } xmlSchemaComplexType . setParticle ( xmlSchemaSequence ) ; xmlSchemaComplexType . setName ( xsdDataItem . getXsdTypeName ( ) ) ; return xmlSchemaComplexType ;
public class SocketClientSink { /** * Closes the connection with the Socket server . */ @ Override public void close ( ) throws Exception { } }
// flag this as not running any more isRunning = false ; // clean up in locked scope , so there is no concurrent change to the stream and client synchronized ( lock ) { // we notify first ( this statement cannot fail ) . The notified thread will not continue // anyways before it can re - acquire the lock lock . notifyAll ( ) ; try { if ( outputStream != null ) { outputStream . close ( ) ; } } finally { if ( client != null ) { client . close ( ) ; } } }
public class IfThenElse { /** * { @ inheritDoc } */ public int getDepth ( ) { } }
return 1 + Math . max ( condition . getDepth ( ) , Math . max ( then . getDepth ( ) , otherwise . getDepth ( ) ) ) ;
public class FleetAttributes { /** * Names of metric groups that this fleet is included in . In Amazon CloudWatch , you can view metrics for an * individual fleet or aggregated metrics for fleets that are in a fleet metric group . A fleet can be included in * only one metric group at a time . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setMetricGroups ( java . util . Collection ) } or { @ link # withMetricGroups ( java . util . Collection ) } if you want to * override the existing values . * @ param metricGroups * Names of metric groups that this fleet is included in . In Amazon CloudWatch , you can view metrics for an * individual fleet or aggregated metrics for fleets that are in a fleet metric group . A fleet can be * included in only one metric group at a time . * @ return Returns a reference to this object so that method calls can be chained together . */ public FleetAttributes withMetricGroups ( String ... metricGroups ) { } }
if ( this . metricGroups == null ) { setMetricGroups ( new java . util . ArrayList < String > ( metricGroups . length ) ) ; } for ( String ele : metricGroups ) { this . metricGroups . add ( ele ) ; } return this ;
public class AbstractViewQuery { /** * Change the custom IME action associated with the text view . click the lable will trigger the associateView ' s onClick method * @ param listener */ public T imeAction ( TextView . OnEditorActionListener listener ) { } }
if ( view instanceof EditText ) { ( ( EditText ) view ) . setOnEditorActionListener ( listener ) ; } return self ( ) ;
public class BootstrapContextImpl { /** * Declarative Services method for setting a JCAContextProvider service reference * @ param ref reference to the service */ protected void setContextProvider ( ServiceReference < JCAContextProvider > ref ) { } }
contextProviders . putReference ( ( String ) ref . getProperty ( JCAContextProvider . TYPE ) , ref ) ;
public class ImagePanel { /** * Draws the checkerboard background to the specified graphics context . * @ param g2d graphics context to draw on * @ since 1.4 */ protected void drawCheckerBoard ( Graphics2D g2d ) { } }
g2d . setColor ( getBackground ( ) ) ; g2d . fillRect ( 0 , 0 , getWidth ( ) , getHeight ( ) ) ; g2d . setColor ( getForeground ( ) ) ; final int checkSize = this . checkerSize ; final int halfCheckSize = checkSize / 2 ; final Stroke checker = this . checkerStroke ; final Stroke backup = g2d . getStroke ( ) ; g2d . setStroke ( checker ) ; final int width = this . getWidth ( ) + checkSize ; final int height = this . getHeight ( ) + checkSize ; for ( int i = halfCheckSize ; i < height ; i += checkSize * 2 ) { g2d . drawLine ( halfCheckSize , i , width , i ) ; g2d . drawLine ( checkSize + halfCheckSize , i + checkSize , width , i + checkSize ) ; } g2d . setStroke ( backup ) ;
public class CouchDBClient { /** * ( non - Javadoc ) * @ see com . impetus . kundera . client . Client # findByRelation ( java . lang . String , * java . lang . Object , java . lang . Class ) */ @ Override public List < Object > findByRelation ( String colName , Object colValue , Class entityClazz ) { } }
EntityMetadata m = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , entityClazz ) ; Object [ ] ids = findIdsByColumn ( m . getSchema ( ) , m . getTableName ( ) , ( ( AbstractAttribute ) m . getIdAttribute ( ) ) . getJPAColumnName ( ) , colName , colValue , m . getEntityClazz ( ) ) ; List < Object > resultSet = new ArrayList < Object > ( ) ; if ( ids != null ) { for ( Object id : new HashSet ( Arrays . asList ( ids ) ) ) { Object object = find ( entityClazz , id ) ; if ( object != null ) { resultSet . add ( object ) ; } } } return resultSet ;
public class Database { /** * Change the key of a encrypted database . The * SQLite3 database must have been open ( ) ed . * Not available in public releases of SQLite . * @ param skey the key as String */ public void rekey ( String skey ) throws jsqlite . Exception { } }
synchronized ( this ) { byte ekey [ ] = null ; if ( skey != null && skey . length ( ) > 0 ) { ekey = new byte [ skey . length ( ) ] ; for ( int i = 0 ; i < skey . length ( ) ; i ++ ) { char c = skey . charAt ( i ) ; ekey [ i ] = ( byte ) ( ( c & 0xff ) ^ ( c >> 8 ) ) ; } } _rekey ( ekey ) ; }
public class JmfDevice { /** * Get video format for size . * @ param device device to get format from * @ param size specific size to search * @ return VideoFormat */ private VideoFormat getSizedVideoFormat ( Dimension size ) { } }
Format [ ] formats = device . getFormats ( ) ; VideoFormat format = null ; for ( Format f : formats ) { if ( ! "RGB" . equalsIgnoreCase ( f . getEncoding ( ) ) || ! ( f instanceof VideoFormat ) ) { continue ; } Dimension d = ( ( VideoFormat ) f ) . getSize ( ) ; if ( d . width == size . width && d . height == size . height ) { format = ( VideoFormat ) f ; break ; } } return format ;
public class FactoryBlurFilter { /** * Creates a mean filter for the specified image type . * @ param type Image type . * @ param radius Size of the filter . * @ return mean image filter . */ public static < T extends ImageBase < T > > BlurStorageFilter < T > mean ( ImageType < T > type , int radius ) { } }
return new BlurStorageFilter < > ( "mean" , type , radius ) ;
public class ParameterImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public void eUnset ( int featureID ) { } }
switch ( featureID ) { case SimpleAntlrPackage . PARAMETER__TYPE : setType ( TYPE_EDEFAULT ) ; return ; case SimpleAntlrPackage . PARAMETER__NAME : setName ( NAME_EDEFAULT ) ; return ; } super . eUnset ( featureID ) ;
public class DefaultResolverRegistry { /** * register card with type and card class * @ param type * @ param cardClz */ public void registerCard ( String type , Class < ? extends Card > cardClz ) { } }
mDefaultCardResolver . register ( type , cardClz ) ;
public class MediaStreaming { /** * { @ inheritDoc } */ @ Override public void write ( OutputStream output ) throws IOException , WebApplicationException { } }
List < Range > ranges = Lists . newArrayList ( ) ; String [ ] acceptRanges = range . split ( "=" ) ; if ( acceptRanges . length != 2 ) { throw new BadRequestException ( RANGE + " header error" ) ; } String accept = acceptRanges [ 0 ] ; for ( String range : acceptRanges [ 1 ] . split ( "," ) ) { String [ ] bounds = range . split ( "-" ) ; ranges . add ( new Range ( Long . valueOf ( bounds [ 0 ] ) , bounds . length == 2 ? Long . valueOf ( bounds [ 1 ] ) : null ) ) ; } boolean multipart = ranges . size ( ) > 1 ; Object len = headers . getFirst ( HttpHeaders . CONTENT_LENGTH ) ; long contentLength ; if ( len != null ) { if ( len instanceof String ) { contentLength = Long . parseLong ( ( String ) len ) ; } else if ( len instanceof Long ) { contentLength = ( Long ) len ; } else { contentLength = Long . parseLong ( String . valueOf ( len ) ) ; } } else { contentLength = streamingProcess . length ( entity ) ; } if ( multipart ) { int count = RANDOM . nextInt ( 11 ) + 20 ; String boundary = RandomStringUtils . randomAlphanumeric ( count ) ; headers . remove ( HttpHeaders . CONTENT_LENGTH ) ; headers . putSingle ( HttpHeaders . CONTENT_TYPE , String . format ( MULTIPART_BYTERANGES , boundary ) ) ; for ( Range range : ranges ) { output . write ( String . format ( BOUNDARY_LINE_FORMAT , boundary ) . getBytes ( ) ) ; output . write ( String . format ( CONTENT_TYPE_LINE_FORMAT , contentType ) . getBytes ( ) ) ; long to = range . getTo ( contentLength - 1 ) ; output . write ( String . format ( CONTENT_RANGE_LINE_FORMAT , accept , range . getFrom ( ) , to , contentLength ) . getBytes ( ) ) ; output . write ( EMPTY_LINE . getBytes ( ) ) ; long currentLength = to - range . getFrom ( ) + 1 ; streamingProcess . write ( entity , output , range . getFrom ( ) , range . to == null ? null : currentLength ) ; output . write ( EMPTY_LINE . getBytes ( ) ) ; } output . write ( String . format ( BOUNDARY_LINE_FORMAT , boundary + "--" ) . getBytes ( ) ) ; } else { Range range = ranges . get ( 0 ) ; long to = range . getTo ( contentLength - 1 ) ; headers . putSingle ( CONTENT_RANGE , String . format ( CONTENT_RANGE_FORMAT , accept , range . getFrom ( ) , to , contentLength ) ) ; long currentLength = to - range . getFrom ( ) + 1 ; headers . putSingle ( HttpHeaders . CONTENT_LENGTH , currentLength ) ; streamingProcess . write ( entity , output , range . getFrom ( ) , range . to == null ? null : currentLength ) ; }
public class Filter { /** * Applies the given filter to the input and returns only those elements * that match it . * @ param filter * a ( possible compound ) filter ; elements in the input array matching the * filter will be returned . * @ param elements * the collection of objects to filter . * @ return * a filtered set of elements . */ @ SafeVarargs public static < T > Collection < T > apply ( Filter < T > filter , T ... elements ) { } }
List < T > list = new ArrayList < T > ( ) ; if ( elements != null ) { for ( T element : elements ) { list . add ( element ) ; } } return apply ( filter , list ) ;