signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class CPDefinitionOptionRelLocalServiceBaseImpl { /** * Returns a range of cp definition option rels matching the UUID and company . * @ param uuid the UUID of the cp definition option rels * @ param companyId the primary key of the company * @ param start the lower bound of the range of cp definition option rels * @ param end the upper bound of the range of cp definition option rels ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the range of matching cp definition option rels , or an empty list if no matches were found */ @ Override public List < CPDefinitionOptionRel > getCPDefinitionOptionRelsByUuidAndCompanyId ( String uuid , long companyId , int start , int end , OrderByComparator < CPDefinitionOptionRel > orderByComparator ) { } }
return cpDefinitionOptionRelPersistence . findByUuid_C ( uuid , companyId , start , end , orderByComparator ) ;
public class Record { /** * Sets the number of fields in the record . If the new number of fields is longer than the current number of * fields , then null fields are appended . If the new number of fields is smaller than the current number of * fields , then the last fields are truncated . * @ param numFields The new number of fields . */ public void setNumFields ( final int numFields ) { } }
final int oldNumFields = this . numFields ; // check whether we increase or decrease the fields if ( numFields > oldNumFields ) { makeSpace ( numFields ) ; for ( int i = oldNumFields ; i < numFields ; i ++ ) { this . offsets [ i ] = NULL_INDICATOR_OFFSET ; } markModified ( oldNumFields ) ; } else { // decrease the number of fields // we do not remove the values from the cache , as the objects ( if they are there ) will most likely // be reused when the record is re - filled markModified ( numFields ) ; } this . numFields = numFields ;
public class CmsSecurityManager { /** * Imports a rewrite alias . < p > * @ param requestContext the current request context * @ param siteRoot the site root * @ param source the rewrite alias source * @ param target the rewrite alias target * @ param mode the alias mode * @ return the import result * @ throws CmsException if something goes wrong */ public CmsAliasImportResult importRewriteAlias ( CmsRequestContext requestContext , String siteRoot , String source , String target , CmsAliasMode mode ) throws CmsException { } }
CmsDbContext dbc = m_dbContextFactory . getDbContext ( requestContext ) ; try { return m_driverManager . importRewriteAlias ( dbc , siteRoot , source , target , mode ) ; } catch ( Exception e ) { dbc . report ( null , Messages . get ( ) . container ( Messages . ERR_DB_OPERATION_0 ) , e ) ; return null ; } finally { dbc . clear ( ) ; }
public class ExecuteArgAnalyzer { public void analyzeExecuteArg ( Method executeMethod , ExecuteArgBox box ) { } }
List < Class < ? > > pathParamTypeList = null ; // lazy loaded Parameter formParam = null ; final Parameter [ ] parameters = executeMethod . getParameters ( ) ; if ( parameters . length > 0 ) { boolean formEnd = false ; for ( Parameter parameter : parameters ) { if ( formEnd ) { throwActionFormNotLastParameterException ( executeMethod ) ; } checkNonGenericParameter ( executeMethod , parameter ) ; if ( isActionFormParameter ( parameter ) ) { formParam = parameter ; formEnd = true ; } else { if ( pathParamTypeList == null ) { pathParamTypeList = new ArrayList < Class < ? > > ( 4 ) ; } pathParamTypeList . add ( parameter . getType ( ) ) ; } } } box . setPathParamTypeList ( preparePathParamTypeList ( pathParamTypeList ) ) ; box . setOptionalGenericTypeMap ( prepareOptionalGenericTypeMap ( executeMethod ) ) ; box . setRootFormType ( prepareRootFormType ( formParam ) ) ; box . setListFormParameter ( prepareListFormParameter ( formParam ) ) ;
public class ReframingResponseObserver { /** * Checks if the awaited upstream response is available . If it is , then feed it to the { @ link * Reframer } and update the { @ link # awaitingInner } flag . Upon exit , if awaitingInner is not set , * then done is guaranteed to reflect the current status of the upstream . */ private void pollUpstream ( ) { } }
if ( ! awaitingInner ) { return ; } boolean localDone = this . done ; // Try to move the new item into the reframer InnerT newUpstreamItem = newItem . getAndSet ( null ) ; if ( newUpstreamItem != null ) { reframer . push ( newUpstreamItem ) ; // and reset the awaiting flag , if the item arrived or upstream closed awaitingInner = false ; } else if ( localDone ) { awaitingInner = false ; }
public class NetworkMessage { /** * First 4 bytes of sha512 ( payload ) */ private byte [ ] getChecksum ( byte [ ] bytes ) throws NoSuchProviderException , NoSuchAlgorithmException { } }
byte [ ] d = cryptography ( ) . sha512 ( bytes ) ; return new byte [ ] { d [ 0 ] , d [ 1 ] , d [ 2 ] , d [ 3 ] } ;
public class FSImage { /** * Load the image namespace from the given image file , verifying it against * the MD5 sum stored in its associated . md5 file . */ protected void loadFSImage ( ImageInputStream iis , File imageFile ) throws IOException { } }
MD5Hash expectedMD5 = MD5FileUtils . readStoredMd5ForFile ( imageFile ) ; if ( expectedMD5 == null ) { throw new IOException ( "No MD5 file found corresponding to image file " + imageFile ) ; } iis . setImageDigest ( expectedMD5 ) ; loadFSImage ( iis ) ;
public class JmxMBeans { /** * region jmx operations fetching */ private static Map < OperationKey , Method > fetchOpkeyToMethod ( Class < ? > mbeanClass ) { } }
Map < OperationKey , Method > opkeyToMethod = new HashMap < > ( ) ; Method [ ] methods = mbeanClass . getMethods ( ) ; for ( Method method : methods ) { if ( method . isAnnotationPresent ( JmxOperation . class ) ) { JmxOperation annotation = method . getAnnotation ( JmxOperation . class ) ; String opName = annotation . name ( ) ; if ( opName . equals ( "" ) ) { opName = method . getName ( ) ; } Class < ? > [ ] paramTypes = method . getParameterTypes ( ) ; Annotation [ ] [ ] paramAnnotations = method . getParameterAnnotations ( ) ; assert paramAnnotations . length == paramTypes . length ; String [ ] paramTypesNames = new String [ paramTypes . length ] ; for ( int i = 0 ; i < paramTypes . length ; i ++ ) { paramTypesNames [ i ] = paramTypes [ i ] . getName ( ) ; } opkeyToMethod . put ( new OperationKey ( opName , paramTypesNames ) , method ) ; } } return opkeyToMethod ;
public class ResourceGroovyMethods { /** * Create a new PrintWriter for this file , using specified * charset . If the given charset is " UTF - 16BE " or " UTF - 16LE " ( or an * equivalent alias ) , the requisite byte order mark is written to the * stream before the writer is returned . * @ param file a File * @ param charset the charset * @ return a PrintWriter * @ throws IOException if an IOException occurs . * @ since 1.0 */ public static PrintWriter newPrintWriter ( File file , String charset ) throws IOException { } }
return new GroovyPrintWriter ( newWriter ( file , charset ) ) ;
public class PrimaveraReader { /** * Code common to both XER and database readers to extract * currency format data . * @ param row row containing currency data */ public void processDefaultCurrency ( Row row ) { } }
ProjectProperties properties = m_project . getProjectProperties ( ) ; properties . setCurrencySymbol ( row . getString ( "curr_symbol" ) ) ; properties . setSymbolPosition ( CURRENCY_SYMBOL_POSITION_MAP . get ( row . getString ( "pos_curr_fmt_type" ) ) ) ; properties . setCurrencyDigits ( row . getInteger ( "decimal_digit_cnt" ) ) ; properties . setThousandsSeparator ( row . getString ( "digit_group_symbol" ) . charAt ( 0 ) ) ; properties . setDecimalSeparator ( row . getString ( "decimal_symbol" ) . charAt ( 0 ) ) ;
public class RepositoryUtils { /** * Matches a set of elements in a case insensitive way . * @ param patternMatcher the pattern to match * @ param elements the elements to match * @ return true if one of the element is matched */ public static boolean matches ( Pattern patternMatcher , Object ... elements ) { } }
if ( patternMatcher == null ) { return true ; } for ( Object element : elements ) { if ( matches ( patternMatcher , element ) ) { return true ; } } return false ;
public class GrassRasterReader { /** * read a row of data from a compressed floating point map * @ param rn * @ param adrows * @ param outFile * @ param typeBytes * @ return the ByteBuffer containing the data * @ throws IOException * @ throws DataFormatException */ private void readCompressedFPRowByNumber ( ByteBuffer rowdata , int rn , long [ ] adrows , RandomAccessFile thefile , int typeBytes ) throws DataFormatException , IOException { } }
int offset = ( int ) ( adrows [ rn + 1 ] - adrows [ rn ] ) ; /* * The fact that the file is compressed does not mean that the row is compressed . If the * first byte is 0 ( 49 ) , then the row is compressed , otherwise ( first byte = 48 ) the row has * to be read in simple XDR uncompressed format . */ byte [ ] tmp = new byte [ offset - 1 ] ; thefile . seek ( adrows [ rn ] ) ; int firstbyte = ( thefile . read ( ) & 0xff ) ; if ( firstbyte == 49 ) { /* The row is compressed . */ // thefile . seek ( ( long ) adrows [ rn ] + 1 ) ; thefile . read ( tmp , 0 , offset - 1 ) ; Inflater decompresser = new Inflater ( ) ; decompresser . setInput ( tmp , 0 , tmp . length ) ; decompresser . inflate ( rowdata . array ( ) ) ; decompresser . end ( ) ; } else if ( firstbyte == 48 ) { /* The row is NOT compressed */ // thefile . seek ( ( long ) ( adrows [ rn ] ) ) ; // if ( thefile . read ( ) = = 48) // thefile . seek ( ( long ) ( adrows [ rn ] + 1 ) ) ; thefile . read ( rowdata . array ( ) , 0 , offset - 1 ) ; }
public class VdmThreadManager { /** * IDbgpThreadAcceptor */ public void acceptDbgpThread ( IDbgpSession session , IProgressMonitor monitor ) { } }
SubMonitor sub = SubMonitor . convert ( monitor , 100 ) ; try { DbgpException error = session . getInfo ( ) . getError ( ) ; if ( error != null ) { throw error ; } session . configure ( target . getOptions ( ) ) ; session . getStreamManager ( ) . addListener ( this ) ; final boolean breakOnFirstLine = // target . breakOnFirstLineEnabled ( ) isAnyThreadInStepInto ( ) ; VdmThread thread = new VdmThread ( target , session , this ) ; thread . initialize ( sub . newChild ( 25 ) ) ; addThread ( thread ) ; final boolean isFirstThread = waitingForThreads ; if ( isFirstThread ) { waitingForThreads = false ; } if ( isFirstThread || ! isSupportsThreads ( thread ) ) { SubMonitor child = sub . newChild ( 25 ) ; target . breakpointManager . initializeSession ( thread . getDbgpSession ( ) , child ) ; child = sub . newChild ( 25 ) ; if ( configurator != null ) { configurator . initializeBreakpoints ( thread , child ) ; } } DebugEventHelper . fireCreateEvent ( thread ) ; final boolean stopBeforeCode = thread . getDbgpSession ( ) . getDebugOptions ( ) . get ( DebugOption . ENGINE_STOP_BEFORE_CODE ) ; boolean executed = false ; if ( ! breakOnFirstLine ) { if ( stopBeforeCode || ! hasBreakpointAtCurrentPosition ( thread ) ) { thread . resumeAfterAccept ( ) ; executed = true ; } } else { if ( stopBeforeCode || ! isValidStack ( thread ) ) { thread . initialStepInto ( ) ; executed = true ; } } if ( ! executed ) { if ( ! thread . isStackInitialized ( ) ) { thread . updateStack ( ) ; } DebugEventHelper . fireChangeEvent ( thread ) ; DebugEventHelper . fireSuspendEvent ( thread , DebugEvent . CLIENT_REQUEST ) ; } sub . worked ( 25 ) ; fireThreadAccepted ( thread , isFirstThread ) ; } catch ( Exception e ) { try { target . terminate ( ) ; } catch ( DebugException e1 ) { } VdmDebugPlugin . log ( e ) ; } finally { sub . done ( ) ; }
public class Pinyins { /** * 获取字符串首字母大写 , 指定分隔符 * @ param text * @ param separator * @ return * @ throws Exception */ public static String getFirstUpperLetter ( String text , String separator ) throws Exception { } }
PinyinFormat format = new PinyinFormat ( ) ; format . setUpperFormat ( PinyinUpperFormat . UPPER_FIRST_LETTER ) ; format . setToneFormat ( PinyinTONEFormat . TONE_NONE ) ; if ( $ . notNull ( separator ) ) { format . setSeparator ( separator ) ; } return getPinyin ( text , format ) ;
public class Rule { /** * Covers URLs such as : * { { http : / / blog . bjhargrave . com / 2007/09 / classforname - caches - defined - class - in . html } } < - - - direct link * { { { http : / / en . wikipedia . org / wiki / Double - checked _ locking } } } < - - - direct link * { { { http : / / jira . codehaus . org / browse / JETTY - 352 } JETTY - 352 } } < - - - renamed link */ private String handleUrls ( String description ) { } }
String result = description ; String [ ] urls = extractUrls ( description ) ; if ( urls . length > 0 ) { for ( String url : urls ) { String copy = url ; boolean trailingAcc = false ; if ( ! copy . startsWith ( "{" ) ) { copy = "<a>" + copy + "</a>" ; } else if ( copy . startsWith ( "{http" ) ) { if ( '}' == result . charAt ( result . indexOf ( copy ) + copy . length ( ) + 2 ) ) { trailingAcc = true ; copy = "<a>" + copy . substring ( 1 ) + "</a>" ; } else { copy = "<a href=\"" + copy . replace ( "{" , "" ) . replace ( "}" , "\">" ) + "</a>" ; } } else if ( copy . startsWith ( "{./" ) ) { copy = "<a href=\"http://codenarc.sourceforge.net/" + copy . replace ( "{./" , "" ) . replace ( "}" , "\">" ) + "</a>" ; } result = result . replace ( "{{" + url + "}}" + ( trailingAcc ? "}" : "" ) , copy ) ; } } return result ;
public class UpdateIdentityProviderConfigurationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( UpdateIdentityProviderConfigurationRequest updateIdentityProviderConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( updateIdentityProviderConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateIdentityProviderConfigurationRequest . getFleetArn ( ) , FLEETARN_BINDING ) ; protocolMarshaller . marshall ( updateIdentityProviderConfigurationRequest . getIdentityProviderType ( ) , IDENTITYPROVIDERTYPE_BINDING ) ; protocolMarshaller . marshall ( updateIdentityProviderConfigurationRequest . getIdentityProviderSamlMetadata ( ) , IDENTITYPROVIDERSAMLMETADATA_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FnInteger { /** * Determines whether the target object is null or not . * @ return false if the target object is null , true if not . */ public static final Function < Integer , Boolean > isNotNull ( ) { } }
return ( Function < Integer , Boolean > ) ( ( Function ) FnObject . isNotNull ( ) ) ;
public class Converters { /** * Registers the { @ link Period } converter . * @ param builder The GSON builder to register the converter with . * @ return A reference to { @ code builder } . */ public static GsonBuilder registerPeriod ( GsonBuilder builder ) { } }
if ( builder == null ) { throw new NullPointerException ( "builder cannot be null" ) ; } builder . registerTypeAdapter ( PERIOD_TYPE , new PeriodConverter ( ) ) ; return builder ;
public class PollStrategy { /** * Update the delay in milliseconds from the provided HTTP poll response . * @ param httpPollResponse The HTTP poll response to update the delay in milliseconds from . */ final void updateDelayInMillisecondsFrom ( HttpResponse httpPollResponse ) { } }
final Long parsedDelayInMilliseconds = delayInMillisecondsFrom ( httpPollResponse ) ; if ( parsedDelayInMilliseconds != null ) { delayInMilliseconds = parsedDelayInMilliseconds ; }
public class PrintConfigCommand { /** * Generate the Yaml representation of the given map . * @ param map the map to print out . * @ return the Yaml representation . * @ throws JsonProcessingException when the Json cannot be processed . */ @ SuppressWarnings ( "static-method" ) protected String generateYaml ( Map < String , Object > map ) throws JsonProcessingException { } }
final YAMLFactory yamlFactory = new YAMLFactory ( ) ; yamlFactory . configure ( Feature . WRITE_DOC_START_MARKER , false ) ; final ObjectMapper mapper = new ObjectMapper ( yamlFactory ) ; return mapper . writerWithDefaultPrettyPrinter ( ) . writeValueAsString ( map ) ;
public class DirectorySelectionPanel { /** * Set the " Include subdirectories " checkbox visible . If called outside the * EDT this method will switch to the UI thread using * < code > SwingUtilities . invokeAndWait ( Runnable ) < / code > . * @ param b * If visible < code > true < / code > else < code > false < / code > . */ public final void setIncludeSubdirsVisible ( final boolean b ) { } }
if ( SwingUtilities . isEventDispatchThread ( ) ) { setIncludeSubdirsVisibleIntern ( b ) ; } else { try { SwingUtilities . invokeAndWait ( new Runnable ( ) { public void run ( ) { setIncludeSubdirsVisibleIntern ( b ) ; } } ) ; } catch ( final Exception ex ) { ignore ( ) ; } }
public class DefaultOverlayService { /** * { @ inheritDoc } */ @ Override public Boolean uninstallOverlay ( JComponent targetComponent , JComponent overlay , Insets insets ) { } }
overlay . setVisible ( Boolean . FALSE ) ; return Boolean . TRUE ;
public class StringSubject { /** * Fails if the string does not have the given length . */ public void hasLength ( int expectedLength ) { } }
checkArgument ( expectedLength >= 0 , "expectedLength(%s) must be >= 0" , expectedLength ) ; check ( "length()" ) . that ( actual ( ) . length ( ) ) . isEqualTo ( expectedLength ) ;
public class svm_train { /** * read in a problem ( in svmlight format ) */ private void read_problem ( ) throws IOException { } }
BufferedReader fp = new BufferedReader ( new FileReader ( input_file_name ) ) ; java . util . Vector vy = new java . util . Vector ( ) ; java . util . Vector vx = new java . util . Vector ( ) ; int max_index = 0 ; while ( true ) { String line = fp . readLine ( ) ; if ( line == null ) break ; StringTokenizer st = new StringTokenizer ( line , " \t\n\r\f:" ) ; vy . addElement ( st . nextToken ( ) ) ; int m = st . countTokens ( ) / 2 ; svm_node [ ] x = new svm_node [ m ] ; for ( int j = 0 ; j < m ; j ++ ) { x [ j ] = new svm_node ( ) ; x [ j ] . index = atoi ( st . nextToken ( ) ) ; x [ j ] . value = atof ( st . nextToken ( ) ) ; } if ( m > 0 ) max_index = Math . max ( max_index , x [ m - 1 ] . index ) ; vx . addElement ( x ) ; } prob = new svm_problem ( ) ; prob . l = vy . size ( ) ; prob . x = new svm_node [ prob . l ] [ ] ; for ( int i = 0 ; i < prob . l ; i ++ ) prob . x [ i ] = ( svm_node [ ] ) vx . elementAt ( i ) ; prob . y = new double [ prob . l ] ; for ( int i = 0 ; i < prob . l ; i ++ ) prob . y [ i ] = atof ( ( String ) vy . elementAt ( i ) ) ; if ( param . gamma == 0 ) param . gamma = 1.0 / max_index ; fp . close ( ) ;
public class AfplibFactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertGCBIMGRESToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class PrivateKeyExtensions { /** * Gets the key length of the given { @ link PrivateKey } . * @ param privateKey * the private key * @ return the key length */ public static int getKeyLength ( final PrivateKey privateKey ) { } }
int length = - 1 ; if ( privateKey == null ) { return length ; } if ( privateKey instanceof RSAPrivateKey ) { length = ( ( RSAPrivateKey ) privateKey ) . getModulus ( ) . bitLength ( ) ; } if ( privateKey instanceof DSAPrivateKey ) { length = ( ( DSAPrivateKey ) privateKey ) . getParams ( ) . getQ ( ) . bitLength ( ) ; } if ( privateKey instanceof ECPrivateKey ) { length = ( ( ECPrivateKey ) privateKey ) . getParams ( ) . getCurve ( ) . getField ( ) . getFieldSize ( ) ; } return length ;
public class appfwglobal_binding { /** * Use this API to fetch a appfwglobal _ binding resource . */ public static appfwglobal_binding get ( nitro_service service ) throws Exception { } }
appfwglobal_binding obj = new appfwglobal_binding ( ) ; appfwglobal_binding response = ( appfwglobal_binding ) obj . get_resource ( service ) ; return response ;
public class Object2IntHashMap { /** * Overloaded version of { @ link Map # get ( Object ) } that takes a primitive int key . * Due to type erasure have to rename the method * @ param key for indexing the { @ link Map } * @ return the value if found otherwise missingValue */ public int getValue ( final K key ) { } }
@ DoNotSub final int mask = values . length - 1 ; @ DoNotSub int index = Hashing . hash ( key , mask ) ; int value ; while ( missingValue != ( value = values [ index ] ) ) { if ( key . equals ( keys [ index ] ) ) { break ; } index = ++ index & mask ; } return value ;
public class D6CrudDeleteHelper { /** * Generate INSERT preparedSQL statement * @ param policy * RAW _ SQL or PREPARED _ STATEMENT * @ return */ String createDeletePreparedSQLStatement ( ) { } }
final Set < String > primaryKeyColumnNameSet = getPrimaryColumnNames ( ) ; final StringGrabber sgSQL = new StringGrabber ( ) ; // Get the table name final DBTable table = mModelClazz . getAnnotation ( DBTable . class ) ; final String tableName = table . tableName ( ) ; sgSQL . append ( "DELETE FROM " + tableName + " WHERE " ) ; // Scan all column names in the model class for ( String columnName : primaryKeyColumnNameSet ) { sgSQL . append ( columnName ) ; sgSQL . append ( " = ?, " ) ; } // end of for ( String columnName : primaryKeyColumnNameSet ) { if ( sgSQL . length ( ) > 2 ) { sgSQL . removeTail ( 2 ) ; } final String sql = sgSQL . toString ( ) ; log ( "#createUpdatePreparedSQLStatement sql=" + sql ) ; return sql ;
public class AbstractValidateableDialogPreference { /** * Obtains the color , which is used to indicate validation errors , from a specific typed array . * @ param typedArray * The typed array , the error color should be obtained from , as an instance of the class * { @ link TypedArray } . The typed array may not be null */ private void obtainErrorColor ( @ NonNull final TypedArray typedArray ) { } }
setErrorColor ( typedArray . getColor ( R . styleable . AbstractValidateableView_errorColor , ContextCompat . getColor ( getContext ( ) , R . color . default_error_color ) ) ) ;
public class XMLUtil { /** * Replies the URL that corresponds to the specified attribute ' s path . * < p > The path is an ordered list of tag ' s names and ended by the name of * the attribute . * @ param document is the XML document to explore . * @ param caseSensitive indicates of the { @ code path } ' s components are case sensitive . * @ param path is the list of and ended by the attribute ' s name . * @ return the URL in the specified attribute or < code > null < / code > if * it was node found in the document */ @ Pure public static URL getAttributeURL ( Node document , boolean caseSensitive , String ... path ) { } }
assert document != null : AssertMessages . notNullParameter ( 0 ) ; return getAttributeURLWithDefault ( document , caseSensitive , null , path ) ;
public class RunbookDraftsInner { /** * Publish runbook draft . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param runbookName The parameters supplied to the publish runbook operation . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < Void > beginPublishAsync ( String resourceGroupName , String automationAccountName , String runbookName , final ServiceCallback < Void > serviceCallback ) { } }
return ServiceFuture . fromHeaderResponse ( beginPublishWithServiceResponseAsync ( resourceGroupName , automationAccountName , runbookName ) , serviceCallback ) ;
public class JQMRadioset { /** * Removes the given { @ link JQMRadio } from this radioset * @ param radio - the radio to remove */ public void removeRadio ( JQMRadio radio ) { } }
if ( radio == null ) return ; TextBox inp = radio . getInput ( ) ; if ( inp != null ) { radios . remove ( radio ) ; fieldset . remove ( inp ) ; } if ( radio . getLabel ( ) != null ) fieldset . remove ( radio . getLabel ( ) ) ;
public class TypedAttributeValueMarshaller { /** * Marshall the given parameter object . */ public void marshall ( TypedAttributeValue typedAttributeValue , ProtocolMarshaller protocolMarshaller ) { } }
if ( typedAttributeValue == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( typedAttributeValue . getStringValue ( ) , STRINGVALUE_BINDING ) ; protocolMarshaller . marshall ( typedAttributeValue . getBinaryValue ( ) , BINARYVALUE_BINDING ) ; protocolMarshaller . marshall ( typedAttributeValue . getBooleanValue ( ) , BOOLEANVALUE_BINDING ) ; protocolMarshaller . marshall ( typedAttributeValue . getNumberValue ( ) , NUMBERVALUE_BINDING ) ; protocolMarshaller . marshall ( typedAttributeValue . getDatetimeValue ( ) , DATETIMEVALUE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class RtfHeaderFooterGroup { /** * Set a RtfHeaderFooter to be displayed at a certain position * @ param headerFooter The RtfHeaderFooter to display * @ param displayAt The display location to use */ public void setHeaderFooter ( RtfHeaderFooter headerFooter , int displayAt ) { } }
this . mode = MODE_MULTIPLE ; headerFooter . setRtfDocument ( this . document ) ; headerFooter . setType ( this . type ) ; headerFooter . setDisplayAt ( displayAt ) ; switch ( displayAt ) { case RtfHeaderFooter . DISPLAY_ALL_PAGES : headerAll = headerFooter ; break ; case RtfHeaderFooter . DISPLAY_FIRST_PAGE : headerFirst = headerFooter ; break ; case RtfHeaderFooter . DISPLAY_LEFT_PAGES : headerLeft = headerFooter ; break ; case RtfHeaderFooter . DISPLAY_RIGHT_PAGES : headerRight = headerFooter ; break ; }
public class MIMETypedStream { /** * Closes the underlying stream if it ' s not already closed . * In the event of an error , a warning will be logged . */ public void close ( ) { } }
if ( this . m_stream != null ) { try { this . m_stream . close ( ) ; this . m_stream = null ; } catch ( IOException e ) { logger . warn ( "Error closing stream" , e ) ; } }
public class Indicator { /** * < editor - fold defaultstate = " collapsed " desc = " Visualization " > */ @ Override protected void paintComponent ( Graphics g ) { } }
final Graphics2D G2 = ( Graphics2D ) g . create ( ) ; G2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; G2 . setRenderingHint ( RenderingHints . KEY_RENDERING , RenderingHints . VALUE_RENDER_QUALITY ) ; G2 . setRenderingHint ( RenderingHints . KEY_STROKE_CONTROL , RenderingHints . VALUE_STROKE_NORMALIZE ) ; G2 . setRenderingHint ( RenderingHints . KEY_TEXT_ANTIALIASING , RenderingHints . VALUE_TEXT_ANTIALIAS_ON ) ; // Translate the coordinate system related to insets G2 . translate ( getFramelessOffset ( ) . getX ( ) , getFramelessOffset ( ) . getY ( ) ) ; // Draw combined background image G2 . drawImage ( bImage , 0 , 0 , null ) ; // Draw the symbol image if ( on ) { G2 . drawImage ( symbolOnImage , 0 , 0 , null ) ; } else { G2 . drawImage ( symbolOffImage , 0 , 0 , null ) ; } // Draw combined foreground image G2 . drawImage ( fImage , 0 , 0 , null ) ; if ( ! isEnabled ( ) ) { G2 . drawImage ( disabledImage , 0 , 0 , null ) ; } // Translate the coordinate system back to original G2 . translate ( - getInnerBounds ( ) . x , - getInnerBounds ( ) . y ) ; G2 . dispose ( ) ;
public class CompilerOptions { /** * Resolve a list of object template names and template Files to a set of * files based on this instance ' s include directories . * @ param objectNames * object template names to lookup * @ param tplFiles * template Files to process * @ return unmodifiable set of the resolved file names */ public Set < File > resolveFileList ( List < String > objectNames , Collection < File > tplFiles ) { } }
// First just copy the named templates . Set < File > filesToProcess = new TreeSet < File > ( ) ; if ( tplFiles != null ) { filesToProcess . addAll ( tplFiles ) ; } // Now loop over all of the object template names , lookup the files , and // add them to the set of files to process . if ( objectNames != null ) { for ( String oname : objectNames ) { SourceFile source = sourceRepository . retrievePanSource ( oname ) ; if ( ! source . isAbsent ( ) ) { filesToProcess . add ( source . getPath ( ) ) ; } else { throw EvaluationException . create ( ( SourceRange ) null , ( Context ) null , MSG_CANNOT_LOCATE_OBJECT_TEMPLATE , oname ) ; } } } return Collections . unmodifiableSet ( filesToProcess ) ;
public class TrainingsImpl { /** * Associate a set of images with a set of tags . * @ param projectId The project id * @ param createImageTagsOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the ImageTagCreateSummary object if successful . */ public ImageTagCreateSummary createImageTags ( UUID projectId , CreateImageTagsOptionalParameter createImageTagsOptionalParameter ) { } }
return createImageTagsWithServiceResponseAsync ( projectId , createImageTagsOptionalParameter ) . toBlocking ( ) . single ( ) . body ( ) ;
public class LongObjectHashMap { /** * Store the given value in the map , associating it with the given key . If the * map already contains an entry associated with the given key that entry will * be replaced . * @ param key The key to associate with the entry * @ param value The entry to be associated with the key * @ return The entry replaced by this put operation , or null if no entry was replaced . * Null can also be returned if the given key was previously associated with a null value . */ public Object put ( long key , Object value ) { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "put" , new Object [ ] { new Long ( key ) , value , this } ) ; if ( value == null ) { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "put" , "IllegalArgumentException" ) ; throw new IllegalArgumentException ( "Null is not a permitted value." ) ; } // Find an empty bucket for the new value or // a bucket the contains an entry with the // same key . int hash = getHashForNewEntry ( key ) ; // Store the value previously held in the bucket // mapping DELETED to null so that it // can be returned to the caller . Object previous = _values [ hash ] == DELETED ? null : _values [ hash ] ; // Add the new value and key to the map . _values [ hash ] = value ; _keys [ hash ] = key ; if ( previous == null ) { // This put resulted in a new entry being added to // the map rather than an existing entry being // updated . Increment the count of objects in the // map and rehash if necessary . _currentLoad ++ ; if ( _currentLoad == _resizeThreshold ) { if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "put" , new Object [ ] { new Long ( key ) , value , this } ) ; // The current load of the map means that it // has reached the desired loadFactor . Increase // the size of the map and rehash all the entries . // Take a copy of the map ' s contents so that we // can add them back in once it ' s been resized . long [ ] oldKeys = new long [ _mapSize ] ; Object [ ] oldValues = new Object [ _mapSize ] ; System . arraycopy ( _keys , 0 , oldKeys , 0 , oldKeys . length ) ; System . arraycopy ( _values , 0 , oldValues , 0 , oldValues . length ) ; // We know that the inital size of the map // is a power of 2 so , by doubling it , we // can increase the map ' s capacity and // still be sure that it ' s size is a power // of two . _mapSize = ( _mapSize * 2 ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "mapSize = " + _mapSize ) ; _resizeThreshold = ( int ) ( ( ( float ) _mapSize ) * ( ( float ) _loadFactor ) / 100f ) ; if ( tc . isDebugEnabled ( ) ) Tr . debug ( tc , "resizeThreshold = " + _resizeThreshold ) ; _values = new Object [ _mapSize ] ; _keys = new long [ _mapSize ] ; // Zero the count of objects in the map . This will // be incremented to the correct value as the // entries are added back into the resized map // below . _currentLoad = 0 ; // Loop through the old values adding live // entries back into the map . for ( int i = 0 ; i < oldValues . length ; i ++ ) { Object oldValue = oldValues [ i ] ; if ( oldValue != null && oldValue != DELETED ) { put ( oldKeys [ i ] , oldValue ) ; } } } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "put" , previous ) ; return previous ;
public class ExploitAssigns { /** * Checks name referenced in node to determine if it might have * changed . * @ return Whether the replacement can be made . */ private boolean isSafeReplacement ( Node node , Node replacement ) { } }
// No checks are needed for simple names . if ( node . isName ( ) ) { return true ; } checkArgument ( node . isGetProp ( ) ) ; while ( node . isGetProp ( ) ) { node = node . getFirstChild ( ) ; } return ! ( node . isName ( ) && isNameAssignedTo ( node . getString ( ) , replacement ) ) ;
public class RaXmlMetadataDeployer { /** * { @ inheritDoc } */ public Deployment deploy ( URL url , Context context , ClassLoader parent ) throws DeployException { } }
File archive = ( File ) context . get ( Constants . ATTACHMENT_ARCHIVE ) ; if ( archive == null ) throw new DeployException ( "Deployment " + url . toExternalForm ( ) + " not found" ) ; FileInputStream fis = null ; try { File raXml = new File ( archive , "META-INF/ra.xml" ) ; if ( raXml . exists ( ) ) { RaParser parser = new RaParser ( ) ; fis = new FileInputStream ( raXml ) ; XMLStreamReader xsr = XMLInputFactory . newInstance ( ) . createXMLStreamReader ( fis ) ; Connector c = parser . parse ( xsr ) ; context . put ( Constants . ATTACHMENT_RA_XML_METADATA , c ) ; } return null ; } catch ( Throwable t ) { throw new DeployException ( "Deployment " + url . toExternalForm ( ) + " failed" , t ) ; } finally { if ( fis != null ) { try { fis . close ( ) ; } catch ( IOException ignore ) { // Ignore } } }
public class Predict { /** * 返回插入的位置 * @ param score 得分 * @ param label 标签 * @ return 插入位置 */ public int add ( int label , float score ) { } }
int i = 0 ; int max ; if ( n == - 1 ) max = scores . size ( ) ; else max = n > scores . size ( ) ? scores . size ( ) : n ; for ( i = 0 ; i < max ; i ++ ) { if ( score > scores . get ( i ) ) break ; } // TODO : 没有删除多余的信息 if ( n != - 1 && i >= n ) return - 1 ; if ( i < scores . size ( ) ) { scores . insert ( i , score ) ; labels . insert ( i , label ) ; } else { scores . add ( score ) ; labels . add ( label ) ; } return i ;
public class HtmlTool { /** * Retrieves text content of the selected elements in HTML . Renders the element ' s text as it * would be displayed on the web page ( including its children ) . * @ param content * HTML content with the elements * @ param selector * CSS selector for elements to extract contents * @ return A list of element texts as rendered to display . Empty list if no elements are found . * @ since 1.0 */ public List < String > text ( String content , String selector ) { } }
Element body = parseContent ( content ) ; List < Element > elements = body . select ( selector ) ; List < String > texts = new ArrayList < String > ( ) ; for ( Element element : elements ) { texts . add ( element . text ( ) ) ; } return texts ;
public class DevicesStatusApi { /** * Update Device Status * Update Device Status * @ param deviceId Device ID . ( required ) * @ param body Body ( optional ) * @ return ApiResponse & lt ; DeviceStatus & gt ; * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiResponse < DeviceStatus > putDeviceStatusWithHttpInfo ( String deviceId , DeviceStatusPut body ) throws ApiException { } }
com . squareup . okhttp . Call call = putDeviceStatusValidateBeforeCall ( deviceId , body , null , null ) ; Type localVarReturnType = new TypeToken < DeviceStatus > ( ) { } . getType ( ) ; return apiClient . execute ( call , localVarReturnType ) ;
public class MiniDrawer { /** * returns always the original drawerItems and not the switched content * @ return */ private List < IDrawerItem > getDrawerItems ( ) { } }
return mDrawer . getOriginalDrawerItems ( ) != null ? mDrawer . getOriginalDrawerItems ( ) : mDrawer . getDrawerItems ( ) ;
public class JsiiClient { /** * Gets a value of a static property . * @ param fqn The FQN of the class * @ param property The name of the static property * @ return The value of the static property */ public JsonNode getStaticPropertyValue ( final String fqn , final String property ) { } }
ObjectNode req = makeRequest ( "sget" ) ; req . put ( "fqn" , fqn ) ; req . put ( "property" , property ) ; return this . runtime . requestResponse ( req ) . get ( "value" ) ;
public class HashCodeBuilder { /** * Uses reflection to build a valid hash code from the fields of { @ code object } . * This constructor uses two hard coded choices for the constants needed to build a hash code . * It uses < code > AccessibleObject . setAccessible < / code > to gain access to private fields . This means that it will * throw a security exception if run under a security manager , if the permissions are not set up correctly . It is * also not as efficient as testing explicitly . * Transient members will be not be used , as they are likely derived fields , and not part of the value of the * < code > Object < / code > . * Static fields will not be tested . Superclass fields will be included . If no fields are found to include * in the hash code , the result of this method will be constant . * @ param object * the Object to create a < code > hashCode < / code > for * @ param excludeFields * Collection of String field names to exclude from use in calculation of hash code * @ return int hash code * @ throws IllegalArgumentException * if the object is < code > null < / code > */ public static int reflectionHashCode ( final Object object , final Collection < String > excludeFields ) { } }
return reflectionHashCode ( object , ArrayUtil . toArray ( excludeFields , String . class ) ) ;
public class LogManager { /** * Adds a log writer , either global or to a particular log service . * Note that the writer is added to the log service ( s ) regardless if it was already * added before . < br > * If the writer is added global , it will receive logging information from all log * services that are already registered or will be registered in the future . * @ param logService name of a log service ; to add the writer global , use an empty * string or < code > null < / code > * @ param writer log writer to add * @ return true if the writer was added successfully , < br > * false a specified log service name was not found * @ see LogService # addWriter ( LogWriter ) */ public boolean addWriter ( String logService , LogWriter writer ) { } }
if ( logService != null && logService . length ( ) > 0 ) { final LogService l = ( LogService ) loggers . get ( logService ) ; if ( l != null ) l . addWriter ( writer ) ; return l != null ; } synchronized ( loggers ) { writers . add ( writer ) ; for ( final Iterator i = loggers . values ( ) . iterator ( ) ; i . hasNext ( ) ; ) ( ( LogService ) i . next ( ) ) . addWriter ( writer ) ; return true ; }
public class GrailsClassUtils { /** * < p > Tests whether or not the left hand type is compatible with the right hand type in Groovy * terms , i . e . can the left type be assigned a value of the right hand type in Groovy . < / p > * < p > This handles Java primitive type equivalence and uses isAssignableFrom for all other types , * with a bit of magic for native types and polymorphism i . e . Number assigned an int . * If either parameter is null an exception is thrown < / p > * @ param leftType The type of the left hand part of a notional assignment * @ param rightType The type of the right hand part of a notional assignment * @ return true if values of the right hand type can be assigned in Groovy to variables of the left hand type . */ public static boolean isGroovyAssignableFrom ( Class < ? > leftType , Class < ? > rightType ) { } }
if ( leftType == null ) { throw new NullPointerException ( "Left type is null!" ) ; } if ( rightType == null ) { throw new NullPointerException ( "Right type is null!" ) ; } if ( leftType == Object . class ) { return true ; } if ( leftType == rightType ) { return true ; } // check for primitive type equivalence Class < ? > r = PRIMITIVE_TYPE_COMPATIBLE_CLASSES . get ( leftType ) ; boolean result = r == rightType ; if ( ! result ) { // If no primitive < - > wrapper match , it may still be assignable // from polymorphic primitives i . e . Number - > int ( AKA Integer ) if ( rightType . isPrimitive ( ) ) { // see if incompatible r = PRIMITIVE_TYPE_COMPATIBLE_CLASSES . get ( rightType ) ; if ( r != null ) { result = leftType . isAssignableFrom ( r ) ; } } else { // Otherwise it may just be assignable using normal Java polymorphism result = leftType . isAssignableFrom ( rightType ) ; } } return result ;
public class SwingUtil { /** * Centers component < code > b < / code > within component < code > a < / code > . */ public static void centerComponent ( Component a , Component b ) { } }
Dimension asize = a . getSize ( ) , bsize = b . getSize ( ) ; b . setLocation ( ( asize . width - bsize . width ) / 2 , ( asize . height - bsize . height ) / 2 ) ;
public class InputBootstrapper { /** * Other private methods : */ private final void reportPseudoAttrProblem ( String attrName , String got , String expVal1 , String expVal2 ) throws WstxException { } }
String expStr = ( expVal1 == null ) ? "" : ( "; expected \"" + expVal1 + "\" or \"" + expVal2 + "\"" ) ; if ( got == null || got . length ( ) == 0 ) { throw new WstxParsingException ( "Missing XML pseudo-attribute '" + attrName + "' value" + expStr , getLocation ( ) ) ; } throw new WstxParsingException ( "Invalid XML pseudo-attribute '" + attrName + "' value " + got + expStr , getLocation ( ) ) ;
public class Gen { /** * Including super classes ' fields . */ List < VariableElement > getAllFields ( TypeElement subclazz ) { } }
List < VariableElement > fields = new ArrayList < > ( ) ; TypeElement cd = null ; Stack < TypeElement > s = new Stack < > ( ) ; cd = subclazz ; while ( true ) { s . push ( cd ) ; TypeElement c = ( TypeElement ) ( types . asElement ( cd . getSuperclass ( ) ) ) ; if ( c == null ) break ; cd = c ; } while ( ! s . empty ( ) ) { cd = s . pop ( ) ; fields . addAll ( ElementFilter . fieldsIn ( cd . getEnclosedElements ( ) ) ) ; } return fields ;
public class R { /** * Renders the Childrens of a Component * @ param fc * @ param component * @ throws IOException */ public static void renderChildren ( FacesContext fc , UIComponent component ) throws IOException { } }
for ( Iterator < UIComponent > iterator = component . getChildren ( ) . iterator ( ) ; iterator . hasNext ( ) ; ) { UIComponent child = ( UIComponent ) iterator . next ( ) ; renderChild ( fc , child ) ; }
public class xen_brvpx_image { /** * < pre > * Converts API response of bulk operation into object and returns the object array in case of get request . * < / pre > */ protected base_resource [ ] get_nitro_bulk_response ( nitro_service service , String response ) throws Exception { } }
xen_brvpx_image_responses result = ( xen_brvpx_image_responses ) service . get_payload_formatter ( ) . string_to_resource ( xen_brvpx_image_responses . class , response ) ; if ( result . errorcode != 0 ) { if ( result . errorcode == SESSION_NOT_EXISTS ) service . clear_session ( ) ; throw new nitro_exception ( result . message , result . errorcode , ( base_response [ ] ) result . xen_brvpx_image_response_array ) ; } xen_brvpx_image [ ] result_xen_brvpx_image = new xen_brvpx_image [ result . xen_brvpx_image_response_array . length ] ; for ( int i = 0 ; i < result . xen_brvpx_image_response_array . length ; i ++ ) { result_xen_brvpx_image [ i ] = result . xen_brvpx_image_response_array [ i ] . xen_brvpx_image [ 0 ] ; } return result_xen_brvpx_image ;
public class DefaultFaceletContext { /** * @ Override * public TemplateManager popCompositeComponentClient ( boolean cleanClientStack ) * if ( ! this . _ compositeComponentClients . isEmpty ( ) ) * if ( cleanClientStack ) * _ clientsStack . get ( _ currentClientStack ) . clear ( ) ; * _ currentClientStack - - ; * return this . _ compositeComponentClients . remove ( 0 ) ; * if ( _ currentTemplateContext > 0) * TemplateManager tm = _ isolatedTemplateContext . get ( _ currentTemplateContext ) . getCompositeComponentClient ( ) ; * if ( cleanClientStack ) * _ isolatedTemplateContext . get ( _ currentTemplateContext ) . clear ( ) ; * _ currentTemplateContext - - ; * return tm ; * return null ; * @ Override * public void pushCompositeComponentClient ( final TemplateClient client ) * this . _ compositeComponentClients . add ( 0 , new CompositeComponentTemplateManager ( this . _ facelet , client ) ) ; * if ( _ currentClientStack + 1 < = _ clientsStack . size ( ) ) * _ clientsStack . add ( new LinkedList < TemplateManager > ( ) ) ; * _ currentClientStack + + ; * if ( _ currentTemplateContext + 1 < = _ isolatedTemplateContext . size ( ) ) * _ isolatedTemplateContext . add ( new IsolatedTemplateContextImpl ( ) ) ; * _ currentTemplateContext + + ; * _ isolatedTemplateContext . get ( _ currentTemplateContext ) . setCompositeComponentClient ( * new CompositeComponentTemplateManager ( this . _ facelet , client ) ) ; * @ Override * public void pushCompositeComponentClient ( final TemplateManager client ) * this . _ compositeComponentClients . add ( 0 , client ) ; * if ( _ currentClientStack + 1 < _ clientsStack . size ( ) ) * _ clientsStack . add ( new LinkedList < TemplateManager > ( ) ) ; * _ currentClientStack + + ; * if ( _ currentTemplateContext + 1 < _ isolatedTemplateContext . size ( ) ) * _ isolatedTemplateContext . add ( new IsolatedTemplateContextImpl ( ) ) ; * _ currentTemplateContext + + ; * _ isolatedTemplateContext . get ( _ currentTemplateContext ) . setCompositeComponentClient ( client ) ; */ @ Override public void pushCompositeComponentClient ( final TemplateClient client ) { } }
TemplateContext itc = new TemplateContextImpl ( ) ; itc . setCompositeComponentClient ( new CompositeComponentTemplateManager ( this . _facelet , client , getPageContext ( ) ) ) ; _isolatedTemplateContext . add ( itc ) ; _currentTemplateContext ++ ; _defaultVarMapper . setTemplateContext ( itc ) ;
public class FileReceiver { /** * Handles a group input file . */ private void handleFile ( final String filePath , final AsyncFile file , final InputGroup group ) { } }
final AtomicLong position = new AtomicLong ( ) ; final AtomicBoolean complete = new AtomicBoolean ( ) ; final AtomicInteger handlerCount = new AtomicInteger ( ) ; group . messageHandler ( new Handler < Buffer > ( ) { @ Override public synchronized void handle ( Buffer buffer ) { file . write ( buffer , position . get ( ) , new Handler < AsyncResult < Void > > ( ) { @ Override public void handle ( AsyncResult < Void > result ) { if ( result . failed ( ) ) { file . close ( ) ; group . messageHandler ( null ) ; group . endHandler ( null ) ; try { input . vertx ( ) . fileSystem ( ) . deleteSync ( filePath ) ; } catch ( Exception e ) { } if ( exceptionHandler != null ) { exceptionHandler . handle ( result . cause ( ) ) ; } } // Decrement the handler count . handlerCount . decrementAndGet ( ) ; // We need to handle for cases where the end handler was called // before all the data was written to the file . if ( complete . get ( ) && handlerCount . get ( ) == 0 ) { closeFile ( filePath , file ) ; } } } ) ; // Increment the handler count and current buffer position . handlerCount . incrementAndGet ( ) ; position . addAndGet ( buffer . length ( ) ) ; } } ) ; group . endHandler ( new Handler < Void > ( ) { @ Override public void handle ( Void event ) { // If there are still handlers processing the data then wait for // those handlers to complete by simply setting the complete flag . complete . set ( true ) ; if ( handlerCount . get ( ) == 0 ) { closeFile ( filePath , file ) ; } } } ) ;
public class SharedUtils { /** * Determine whether String is a value binding expression or not . */ static boolean isExpression ( String expression ) { } }
if ( null == expression ) { return false ; } // check to see if attribute has an expression int start = expression . indexOf ( "#{" ) ; return start != - 1 && expression . indexOf ( '}' , start + 2 ) != - 1 ;
public class SailthruClient { /** * Get template information * @ param template template name * @ throws IOException */ public JsonResponse getTemplate ( String template ) throws IOException { } }
Map < String , Object > data = new HashMap < String , Object > ( ) ; data . put ( Template . PARAM_TEMPLATE , template ) ; return apiGet ( ApiAction . template , data ) ;
public class ModelPackageStatusItemMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ModelPackageStatusItem modelPackageStatusItem , ProtocolMarshaller protocolMarshaller ) { } }
if ( modelPackageStatusItem == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( modelPackageStatusItem . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( modelPackageStatusItem . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( modelPackageStatusItem . getFailureReason ( ) , FAILUREREASON_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsCheckableDatePanel { /** * Returns all dates with the specified check state , if the check state is < code > null < / code > , all dates are returned . * @ param checkState the check state , the returned dates should have . * @ return all dates with the specified check state , if the check state is < code > null < / code > , all dates are returned . */ public SortedSet < Date > getDates ( Boolean checkState ) { } }
TreeSet < Date > result = new TreeSet < Date > ( ) ; for ( CmsCheckBox cb : m_checkBoxes ) { if ( ( checkState == null ) || ( cb . isChecked ( ) == checkState . booleanValue ( ) ) ) { Date date = ( Date ) cb . getElement ( ) . getPropertyObject ( "date" ) ; result . add ( date ) ; } } return result ;
public class PiwikRequest { /** * We recommend to set the * search count to the number of search results displayed on the results page . * When keywords are tracked with { @ code Search Results Count = 0 } they will appear in * the " No Result Search Keyword " report . SearchQuery must first be set . * @ param searchResultsCount the search results count to set . A null value will remove this parameter */ public void setSearchResultsCount ( Long searchResultsCount ) { } }
if ( searchResultsCount != null && getSearchQuery ( ) == null ) { throw new IllegalStateException ( "SearchQuery must be set before SearchResultsCount can be set." ) ; } setParameter ( SEARCH_RESULTS_COUNT , searchResultsCount ) ;
public class RelationalOperations { /** * Returns true if pt _ a equals pt _ b . */ private static boolean pointEqualsPoint_ ( Point2D pt_a , Point2D pt_b , double tolerance , ProgressTracker progress_tracker ) { } }
if ( Point2D . sqrDistance ( pt_a , pt_b ) <= tolerance * tolerance ) return true ; return false ;
public class WebSocketHandler { /** * Sends a text message using this object ' s websocket session . * @ param message json binary message */ public void sendMessage ( final String message ) { } }
try { session . sendMessage ( new TextMessage ( message ) ) ; } catch ( IOException e ) { logger . error ( "[sendTextMessage]" , e ) ; }
public class DescribeBuildRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeBuildRequest describeBuildRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeBuildRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeBuildRequest . getBuildId ( ) , BUILDID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MetadataUtil { /** * Logs out metadata information . * @ param metadata */ public void log ( final Metadata metadata ) { } }
final StringBuilder sb = new StringBuilder ( ) ; for ( final MetadataItem item : metadata . getGroupList ( ) ) { sb . append ( LINE ) ; sb . append ( NEWLINE ) ; sb . append ( LINE ) ; sb . append ( "Group: " + item . getName ( ) ) ; sb . append ( NEWLINE ) ; for ( MetadataElement element : item . getElements ( ) ) { sb . append ( " Element : " + element . getName ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( " Type : " + element . getType ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( " MinOccurs: " + element . getMinOccurs ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( " MaxOccurs: " + element . getMaxOccurs ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( " IsAttr : " + element . getIsAttribute ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( NEWLINE ) ; } for ( MetadataElement element : item . getReferences ( ) ) { sb . append ( " Ref : " + element . getRef ( ) ) ; sb . append ( NEWLINE ) ; } sb . append ( NEWLINE ) ; } for ( MetadataEnum enumItem : metadata . getEnumList ( ) ) { sb . append ( LINE ) ; sb . append ( NEWLINE ) ; sb . append ( "Enum: " + enumItem . getName ( ) ) ; sb . append ( NEWLINE ) ; for ( String enumValue : enumItem . getValueList ( ) ) { sb . append ( " Value : " + enumValue ) ; sb . append ( NEWLINE ) ; } sb . append ( NEWLINE ) ; } for ( MetadataItem item : metadata . getClassList ( ) ) { sb . append ( LINE ) ; sb . append ( NEWLINE ) ; sb . append ( "Class: " + item . getName ( ) ) ; sb . append ( NEWLINE ) ; for ( MetadataElement element : item . getElements ( ) ) { sb . append ( " Element : " + element . getName ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( " Type : " + element . getType ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( " MinOccurs: " + element . getMinOccurs ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( " MaxOccurs: " + element . getMaxOccurs ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( " IsAttr : " + element . getIsAttribute ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( NEWLINE ) ; } for ( MetadataElement element : item . getReferences ( ) ) { sb . append ( " Ref : " + element . getRef ( ) ) ; sb . append ( NEWLINE ) ; } sb . append ( NEWLINE ) ; } for ( MetadataItem dataType : metadata . getDataTypeList ( ) ) { sb . append ( LINE ) ; sb . append ( NEWLINE ) ; sb . append ( "Name : " + dataType . getName ( ) ) ; sb . append ( NEWLINE ) ; sb . append ( "MappedTo: " + dataType . getMappedTo ( ) ) ; sb . append ( NEWLINE ) ; } // Log log . info ( sb . toString ( ) ) ;
public class CrystClustWriter { /** * Private procedures */ private void writeChemSequence ( IChemSequence cs ) throws UnsupportedChemObjectException { } }
int count = cs . getChemModelCount ( ) ; for ( int i = 0 ; i < count ; i ++ ) { writeln ( "frame: " + ( i + 1 ) ) ; writeCrystal ( cs . getChemModel ( i ) . getCrystal ( ) ) ; }
public class NameUtil { /** * d179573 Begins */ private static boolean allHexDigits ( String str , int start , int end ) { } }
boolean rtn = true ; for ( int i = start ; i < end ; ++ i ) { if ( "0123456789abcdefABCDEF" . indexOf ( str . charAt ( i ) ) == - 1 ) { rtn = false ; break ; } } return rtn ;
public class IfcCurveStyleFontImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ SuppressWarnings ( "unchecked" ) public EList < IfcCurveStyleFontPattern > getPatternList ( ) { } }
return ( EList < IfcCurveStyleFontPattern > ) eGet ( Ifc2x3tc1Package . Literals . IFC_CURVE_STYLE_FONT__PATTERN_LIST , true ) ;
public class FileManagerImpl { /** * Debugging and testing interface */ public static void main ( String args [ ] ) throws FileManagerException , IOException { } }
HTODDynacache hdc = new HTODDynacache ( ) ; FileManagerImpl mem_manager = new FileManagerImpl ( "foo1" , // filename false , // coalsece "rw" , // mode MULTIVOLUME , // type hdc ) ; mem_manager . run_tests ( ) ; OutputStreamWriter out = new OutputStreamWriter ( System . out ) ; mem_manager . dump_disk_memory ( out ) ; mem_manager . dump_memory ( out ) ; mem_manager . dump_stats ( out , false ) ; mem_manager . cache_free_storage_info ( ) ; mem_manager . dump_disk_memory ( out ) ; out . flush ( ) ; out . close ( ) ;
public class AbstractFunction { /** * Creates a trivial factory that always return the provided function . */ public static Function . Factory factory ( final Function fun ) { } }
return new Function . Factory ( ) { public Function create ( String ksName , String cfName ) { return fun ; } } ;
public class MonitorService { /** * Adds the given label to the monitor with the given id . * @ param monitorId The id of the monitor to update * @ param label The label to add * @ return The label that was added */ public Optional < Label > createLabel ( String monitorId , Label label ) { } }
HTTP . POST ( String . format ( "/v1/monitors/%s/labels" , monitorId ) , label . getKey ( ) ) ; return Optional . of ( label ) ;
public class CommerceAddressRestrictionLocalServiceUtil { /** * Returns a range of all the commerce address restrictions . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link com . liferay . portal . kernel . dao . orm . QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link com . liferay . commerce . model . impl . CommerceAddressRestrictionModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param start the lower bound of the range of commerce address restrictions * @ param end the upper bound of the range of commerce address restrictions ( not inclusive ) * @ return the range of commerce address restrictions */ public static java . util . List < com . liferay . commerce . model . CommerceAddressRestriction > getCommerceAddressRestrictions ( int start , int end ) { } }
return getService ( ) . getCommerceAddressRestrictions ( start , end ) ;
public class ByteIterator { /** * Returns an infinite { @ code ByteIterator } . * @ param supplier * @ return */ public static ByteIterator generate ( final ByteSupplier supplier ) { } }
N . checkArgNotNull ( supplier ) ; return new ByteIterator ( ) { @ Override public boolean hasNext ( ) { return true ; } @ Override public byte nextByte ( ) { return supplier . getAsByte ( ) ; } } ;
public class Curve { /** * Get the point at a particular location on the curve * @ param t A value between 0 and 1 defining the location of the curve the point is at * @ return The point on the curve */ public Vector2f pointAt ( float t ) { } }
float a = 1 - t ; float b = t ; float f1 = a * a * a ; float f2 = 3 * a * a * b ; float f3 = 3 * a * b * b ; float f4 = b * b * b ; float nx = ( p1 . x * f1 ) + ( c1 . x * f2 ) + ( c2 . x * f3 ) + ( p2 . x * f4 ) ; float ny = ( p1 . y * f1 ) + ( c1 . y * f2 ) + ( c2 . y * f3 ) + ( p2 . y * f4 ) ; return new Vector2f ( nx , ny ) ;
public class AmqpMessageHandlerService { /** * Method to update the action status of an action through the event . * @ param actionUpdateStatus * the object form the ampq message */ private void updateActionStatus ( final Message message ) { } }
final DmfActionUpdateStatus actionUpdateStatus = convertMessage ( message , DmfActionUpdateStatus . class ) ; final Action action = checkActionExist ( message , actionUpdateStatus ) ; final List < String > messages = actionUpdateStatus . getMessage ( ) ; if ( isCorrelationIdNotEmpty ( message ) ) { messages . add ( RepositoryConstants . SERVER_MESSAGE_PREFIX + "DMF message correlation-id " + message . getMessageProperties ( ) . getCorrelationId ( ) ) ; } final Status status = mapStatus ( message , actionUpdateStatus , action ) ; final ActionStatusCreate actionStatus = entityFactory . actionStatus ( ) . create ( action . getId ( ) ) . status ( status ) . messages ( messages ) ; final Action addUpdateActionStatus = getUpdateActionStatus ( status , actionStatus ) ; if ( ! addUpdateActionStatus . isActive ( ) || ( addUpdateActionStatus . hasMaintenanceSchedule ( ) && addUpdateActionStatus . isMaintenanceWindowAvailable ( ) ) ) { lookIfUpdateAvailable ( action . getTarget ( ) ) ; }
public class DatabasesInner { /** * Gets a list of databases in an elastic pool . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; DatabaseInner & gt ; object */ public Observable < ServiceResponse < Page < DatabaseInner > > > listByElasticPoolNextWithServiceResponseAsync ( final String nextPageLink ) { } }
return listByElasticPoolNextSinglePageAsync ( nextPageLink ) . concatMap ( new Func1 < ServiceResponse < Page < DatabaseInner > > , Observable < ServiceResponse < Page < DatabaseInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < DatabaseInner > > > call ( ServiceResponse < Page < DatabaseInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listByElasticPoolNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class ApiOvhPackxdsl { /** * Alter this object properties * REST : PUT / pack / xdsl / { packName } * @ param body [ required ] New object properties * @ param packName [ required ] The internal name of your pack */ public void packName_PUT ( String packName , OvhPackAdsl body ) throws IOException { } }
String qPath = "/pack/xdsl/{packName}" ; StringBuilder sb = path ( qPath , packName ) ; exec ( qPath , "PUT" , sb . toString ( ) , body ) ;
public class NWiseExtension { /** * Finds all nWise combinations of a set of variables , each with a given domain of values * @ param nWise the number of variables in each combination * @ param coVariables the varisbles * @ param variableDomains the domains * @ return all nWise combinations of the set of variables */ public List < Map < String , String > > produceNWise ( int nWise , String [ ] coVariables , Map < String , String [ ] > variableDomains ) { } }
List < Set < String > > tuples = makeNWiseTuples ( coVariables , nWise ) ; List < Map < String , String > > testCases = new ArrayList < > ( ) ; for ( Set < String > tuple : tuples ) { testCases . addAll ( expandTupleIntoTestCases ( tuple , variableDomains ) ) ; } return testCases ;
public class UploadOfflineData { /** * Returns a new user identifier with the specified type and value . */ private static UserIdentifier createUserIdentifier ( OfflineDataUploadUserIdentifierType identifierType , String value ) throws UnsupportedEncodingException { } }
// If the user identifier type is a hashed type , also call hash function // on the value . if ( HASHED_IDENTIFIER_TYPES . contains ( identifierType ) ) { value = toSHA256String ( value ) ; } UserIdentifier userIdentifier = new UserIdentifier ( ) ; userIdentifier . setUserIdentifierType ( identifierType ) ; userIdentifier . setValue ( value ) ; return userIdentifier ;
public class ServletUtil { /** * Handles a request for static resources . * @ param request the http request . * @ param response the http response . */ public static void handleStaticResourceRequest ( final HttpServletRequest request , final HttpServletResponse response ) { } }
String staticRequest = request . getParameter ( WServlet . STATIC_RESOURCE_PARAM_NAME ) ; try { InternalResource staticResource = InternalResourceMap . getResource ( staticRequest ) ; boolean headersOnly = "HEAD" . equals ( request . getMethod ( ) ) ; if ( staticResource == null ) { LOG . warn ( "Static resource [" + staticRequest + "] not found." ) ; response . setStatus ( HttpServletResponse . SC_NOT_FOUND ) ; return ; } InputStream resourceStream = staticResource . getStream ( ) ; if ( resourceStream == null ) { LOG . warn ( "Static resource [" + staticRequest + "] not found. Stream for content is null." ) ; response . setStatus ( HttpServletResponse . SC_NOT_FOUND ) ; return ; } int size = resourceStream . available ( ) ; String fileName = WebUtilities . encodeForContentDispositionHeader ( staticRequest . substring ( staticRequest . lastIndexOf ( '/' ) + 1 ) ) ; if ( size > 0 ) { response . setContentLength ( size ) ; } response . setContentType ( WebUtilities . getContentType ( staticRequest ) ) ; response . setHeader ( "Cache-Control" , CacheType . CONTENT_CACHE . getSettings ( ) ) ; String param = request . getParameter ( WContent . URL_CONTENT_MODE_PARAMETER_KEY ) ; if ( "inline" . equals ( param ) ) { response . setHeader ( "Content-Disposition" , "inline; filename=" + fileName ) ; } else if ( "attach" . equals ( param ) ) { response . setHeader ( "Content-Disposition" , "attachment; filename=" + fileName ) ; } else { // added " filename = " to comply with https : / / tools . ietf . org / html / rfc6266 response . setHeader ( "Content-Disposition" , "filename=" + fileName ) ; } if ( ! headersOnly ) { StreamUtil . copy ( resourceStream , response . getOutputStream ( ) ) ; } } catch ( IOException e ) { LOG . warn ( "Could not process static resource [" + staticRequest + "]. " , e ) ; response . reset ( ) ; response . setStatus ( HttpServletResponse . SC_NOT_FOUND ) ; }
public class UtcProperty { /** * { @ inheritDoc } */ public void validate ( ) throws ValidationException { } }
super . validate ( ) ; if ( getDate ( ) != null && ! ( getDate ( ) instanceof DateTime ) ) { throw new ValidationException ( "Property must have a DATE-TIME value" ) ; } final DateTime dateTime = ( DateTime ) getDate ( ) ; if ( dateTime != null && ! dateTime . isUtc ( ) ) { throw new ValidationException ( getName ( ) + ": DATE-TIME value must be specified in UTC time" ) ; }
public class ActivitysInner { /** * Retrieve a list of activities in the module identified by module name . * ServiceResponse < PageImpl < ActivityInner > > * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the PagedList & lt ; ActivityInner & gt ; object wrapped in { @ link ServiceResponse } if successful . */ public Observable < ServiceResponse < Page < ActivityInner > > > listByModuleNextSinglePageAsync ( final String nextPageLink ) { } }
if ( nextPageLink == null ) { throw new IllegalArgumentException ( "Parameter nextPageLink is required and cannot be null." ) ; } String nextUrl = String . format ( "%s" , nextPageLink ) ; return service . listByModuleNext ( nextUrl , this . client . acceptLanguage ( ) , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponse < Page < ActivityInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < ActivityInner > > > call ( Response < ResponseBody > response ) { try { ServiceResponse < PageImpl < ActivityInner > > result = listByModuleNextDelegate ( response ) ; return Observable . just ( new ServiceResponse < Page < ActivityInner > > ( result . body ( ) , result . response ( ) ) ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class FileUtil { /** * Deletes a directory recursively . * @ param pFile the file to delete * @ return { @ code true } if the file was deleted sucessfully * @ throws IOException if an i / o error occurs during delete . */ private static boolean deleteDir ( final File pFile ) throws IOException { } }
// Recusively delete all files / subfolders // Deletes the files using visitor pattern , to avoid allocating // a file array , which may throw OutOfMemoryExceptions for // large directories / in low memory situations class DeleteFilesVisitor implements Visitor < File > { private int failedCount = 0 ; private IOException exception = null ; public void visit ( final File pFile ) { try { if ( ! delete ( pFile , true ) ) { failedCount ++ ; } } catch ( IOException e ) { failedCount ++ ; if ( exception == null ) { exception = e ; } } } boolean succeeded ( ) throws IOException { if ( exception != null ) { throw exception ; } return failedCount == 0 ; } } DeleteFilesVisitor fileDeleter = new DeleteFilesVisitor ( ) ; visitFiles ( pFile , null , fileDeleter ) ; // If any of the deletes above failed , this will fail ( or return false ) return fileDeleter . succeeded ( ) && pFile . delete ( ) ;
public class SlideStackModel { /** * Got to next slide . */ public void next ( final boolean skipSlideStep ) { } }
synchronized ( this ) { // Try to display the next slide step first // If no slide step is remaining , display the next slide if ( skipSlideStep || this . selectedSlideModel . nextStep ( ) && this . slidePosition < getPresentationService ( ) . getPresentation ( ) . getSlides ( ) . getSlide ( ) . size ( ) - 1 ) { this . slidePosition = Math . min ( this . slidePosition + 1 , getPresentationService ( ) . getPresentation ( ) . getSlides ( ) . getSlide ( ) . size ( ) - 1 ) ; displaySlide ( getPresentationService ( ) . getPresentation ( ) . getSlides ( ) . getSlide ( ) . get ( this . slidePosition ) , false ) ; } }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcFillAreaStyleTiles ( ) { } }
if ( ifcFillAreaStyleTilesEClass == null ) { ifcFillAreaStyleTilesEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 270 ) ; } return ifcFillAreaStyleTilesEClass ;
public class DoubleBond2DParity { /** * Calculate the configuration of the double bond as a parity . * @ return opposite ( + 1 ) , together ( - 1 ) or unspecified ( 0) */ @ Override public int parity ( ) { } }
return parity ( l1 , l2 , r ) * parity ( r1 , r2 , l ) ;
public class DnsBatch { /** * Adds a request representing the " create zone " operation to this batch . The { @ code options } can * be used to restrict the fields returned in the same way as for { @ link Dns # create ( ZoneInfo , * Dns . ZoneOption . . . ) } . Calling { @ link DnsBatchResult # get ( ) } on the return value yields the * created { @ link Zone } if successful and throws a { @ link DnsException } otherwise . */ public DnsBatchResult < Zone > createZone ( ZoneInfo zone , Dns . ZoneOption ... options ) { } }
DnsBatchResult < Zone > result = new DnsBatchResult < > ( ) ; // todo this can cause misleading report of a failure , intended to be fixed within # 924 RpcBatch . Callback < ManagedZone > callback = createZoneCallback ( this . options , result , false , true ) ; Map < DnsRpc . Option , ? > optionMap = DnsImpl . optionMap ( options ) ; batch . addCreateZone ( zone . toPb ( ) , callback , optionMap ) ; return result ;
public class IO { /** * Write the given elements to the data output . * @ param elements the elements to write * @ param writer the element writer * @ param out the data output * @ param < T > the element type * @ throws NullPointerException if one of the given arguments is { @ code null } * @ throws IOException if an I / O error occurs */ static < T > void writes ( final Collection < ? extends T > elements , final Writer < ? super T > writer , final DataOutput out ) throws IOException { } }
writeInt ( elements . size ( ) , out ) ; for ( T element : elements ) { writer . write ( element , out ) ; }
public class TcpConnecter { /** * Close the connecting socket . */ protected void close ( ) { } }
assert ( fd != null ) ; try { fd . close ( ) ; socket . eventClosed ( addr . toString ( ) , fd ) ; } catch ( IOException e ) { socket . eventCloseFailed ( addr . toString ( ) , ZError . exccode ( e ) ) ; } fd = null ;
public class RemoteConsumerDispatcher { /** * / * Called by our overridden unlock method in SIMPItem */ protected void eventPreUnlocked ( SIMPMessage msg , TransactionCommon tran ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "eventPreUnlocked" , new Object [ ] { msg , tran } ) ; if ( ( msg . guessRedeliveredCount ( ) + 1 ) >= _baseDestHandler . getMaxFailedDeliveries ( ) ) { // Override the check of the threshold from the local case because we only // reroute to the exception destination if we are local to the queuepoint . // If the threshold is reached we remove the AIMessageItem and drive the expiry // code - which should flow the relevant redeliveryCount back to the DME // in a reject . boolean tranCreated = false ; if ( tran == null ) { /* Create a new transaction under which to perform the reroute */ tran = _tranManager . createLocalTransaction ( false ) ; tranCreated = true ; } // Perform a forced expiry ( ( AIMessageItem ) msg ) . setRejectTransactionID ( tran . getPersistentTranId ( ) ) ; // Increment the unlockCount _aih . incrementUnlockCount ( msg . getMessage ( ) . getGuaranteedRemoteGetValueTick ( ) ) ; // Remove msg try { if ( msg . isInStore ( ) ) { Transaction msTran = _messageProcessor . resolveAndEnlistMsgStoreTransaction ( tran ) ; msg . remove ( msTran , msg . getLockID ( ) ) ; } if ( tranCreated ) ( ( LocalTransaction ) tran ) . commit ( ) ; // If successful , make sure we dont try to unlock the msg msg . setRedeliveryCountReached ( ) ; } catch ( MessageStoreException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.RemoteConsumerDispatcher.eventPreUnlocked" , "1:1090:1.97.2.21" , this ) ; SibTr . exception ( tc , e ) ; // Any exception will mean we wait until expiry for a reject } catch ( SIException e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.RemoteConsumerDispatcher.eventPreUnlocked" , "1:1102:1.97.2.21" , this ) ; SibTr . exception ( tc , e ) ; // Any exception will mean we wait until expiry for a reject } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "eventPreUnlocked" ) ;
public class HttpSubjectSecurityFilter { /** * Security code for subject - security LEGACY */ @ Override public void doMessageReceived ( NextFilter nextFilter , IoSession session , Object message ) throws Exception { } }
// GL . debug ( " http " , getClass ( ) . getSimpleName ( ) + " request received . " ) ; if ( ! httpRequestMessageReceived ( nextFilter , session , message ) ) return ; HttpRequestMessage httpRequest = ( HttpRequestMessage ) message ; // Make sure we start with the subject from the underlying transport session in case it already has an authenticated subject // ( e . g . we are httpxe and our transport is http or transport is SSL with a client certificate ) if ( httpRequest . getSubject ( ) == null ) { httpRequest . setSubject ( ( ( IoSessionEx ) session ) . getSubject ( ) ) ; } ResourceAddress httpAddress = httpRequest . getLocalAddress ( ) ; HttpRealmInfo [ ] realms = httpAddress . getOption ( HttpResourceAddress . REALMS ) ; if ( realms . length == 0 ) { ResultAwareLoginContext loginContext = null ; // Make sure we propagate the login context from the layer below in httpxe case if ( session instanceof DefaultHttpSession ) { loginContext = ( ( DefaultHttpSession ) session ) . getLoginContext ( ) ; } if ( loginContext != null ) { httpRequest . setLoginContext ( loginContext ) ; } else { setUnprotectedLoginContext ( httpRequest ) ; } final boolean loggerIsEnabled = logger != null && logger . isTraceEnabled ( ) ; if ( loggerIsEnabled ) { logger . trace ( "HttpSubjectSecurityFilter skipped because no realm is configured." ) ; } super . doMessageReceived ( nextFilter , session , message ) ; return ; } securityMessageReceived ( nextFilter , session , httpRequest ) ;
public class ScoreUtil { /** * Get a { @ link DataSetIterator } * from the given object whether it ' s a { @ link DataSetIterator } * or { @ link DataSetIteratorFactory } , any other type will throw * an { @ link IllegalArgumentException } * @ param o the object to get the iterator from * @ return the datasetiterator from the given objects */ public static DataSetIterator getIterator ( Object o ) { } }
if ( o instanceof DataSetIterator ) return ( DataSetIterator ) o ; else if ( o instanceof DataSetIteratorFactory ) { DataSetIteratorFactory factory = ( DataSetIteratorFactory ) o ; return factory . create ( ) ; } throw new IllegalArgumentException ( "Type must either be DataSetIterator or DataSetIteratorFactory" ) ;
public class DubiousListCollection { /** * builds a field annotation by finding the field in the classes ' field list * @ param fieldName the field for which to built the field annotation * @ return the field annotation of the specified field */ @ Nullable private FieldAnnotation getFieldAnnotation ( final String fieldName ) { } }
JavaClass cls = getClassContext ( ) . getJavaClass ( ) ; Field [ ] fields = cls . getFields ( ) ; for ( Field f : fields ) { if ( f . getName ( ) . equals ( fieldName ) ) { return new FieldAnnotation ( cls . getClassName ( ) , fieldName , f . getSignature ( ) , f . isStatic ( ) ) ; } } return null ; // shouldn ' t happen
public class KNXnetIPTunnel { /** * Sends a cEMI frame to the remote server communicating with this endpoint . * Sending in busmonitor mode is not permitted . < br > * @ param frame cEMI message to send , the expected cEMI type is according to the used * tunneling layer */ public void send ( CEMI frame , BlockingMode mode ) throws KNXTimeoutException , KNXConnectionClosedException { } }
if ( layer == BUSMONITOR_LAYER ) throw new KNXIllegalStateException ( "send not permitted in busmonitor mode" ) ; if ( ! ( frame instanceof CEMILData ) ) throw new KNXIllegalArgumentException ( "unsupported cEMI type" ) ; super . send ( frame , mode ) ;
public class CustomRequestHeaderPlugin { /** * Update header value * @ param headerValue new value of the header */ public void setHeaderValue ( @ NotNull String headerValue ) { } }
if ( StringUtils . isBlank ( headerValue ) ) { throw new IllegalArgumentException ( "Parameter 'headerValue' cannot be blank." + " If you want to disable this header invoke disable() method." ) ; } this . headerValue = headerValue ;
public class AmazonElasticFileSystemClient { /** * Creates a mount target for a file system . You can then mount the file system on EC2 instances by using the mount * target . * You can create one mount target in each Availability Zone in your VPC . All EC2 instances in a VPC within a given * Availability Zone share a single mount target for a given file system . If you have multiple subnets in an * Availability Zone , you create a mount target in one of the subnets . EC2 instances do not need to be in the same * subnet as the mount target in order to access their file system . For more information , see < a * href = " https : / / docs . aws . amazon . com / efs / latest / ug / how - it - works . html " > Amazon EFS : How it Works < / a > . * In the request , you also specify a file system ID for which you are creating the mount target and the file * system ' s lifecycle state must be < code > available < / code > . For more information , see < a > DescribeFileSystems < / a > . * In the request , you also provide a subnet ID , which determines the following : * < ul > * < li > * VPC in which Amazon EFS creates the mount target * < / li > * < li > * Availability Zone in which Amazon EFS creates the mount target * < / li > * < li > * IP address range from which Amazon EFS selects the IP address of the mount target ( if you don ' t specify an IP * address in the request ) * < / li > * < / ul > * After creating the mount target , Amazon EFS returns a response that includes , a < code > MountTargetId < / code > and an * < code > IpAddress < / code > . You use this IP address when mounting the file system in an EC2 instance . You can also * use the mount target ' s DNS name when mounting the file system . The EC2 instance on which you mount the file * system by using the mount target can resolve the mount target ' s DNS name to its IP address . For more information , * see < a href = " https : / / docs . aws . amazon . com / efs / latest / ug / how - it - works . html # how - it - works - implementation " > How it * Works : Implementation Overview < / a > . * Note that you can create mount targets for a file system in only one VPC , and there can be only one mount target * per Availability Zone . That is , if the file system already has one or more mount targets created for it , the * subnet specified in the request to add another mount target must meet the following requirements : * < ul > * < li > * Must belong to the same VPC as the subnets of the existing mount targets * < / li > * < li > * Must not be in the same Availability Zone as any of the subnets of the existing mount targets * < / li > * < / ul > * If the request satisfies the requirements , Amazon EFS does the following : * < ul > * < li > * Creates a new mount target in the specified subnet . * < / li > * < li > * Also creates a new network interface in the subnet as follows : * < ul > * < li > * If the request provides an < code > IpAddress < / code > , Amazon EFS assigns that IP address to the network interface . * Otherwise , Amazon EFS assigns a free address in the subnet ( in the same way that the Amazon EC2 * < code > CreateNetworkInterface < / code > call does when a request does not specify a primary private IP address ) . * < / li > * < li > * If the request provides < code > SecurityGroups < / code > , this network interface is associated with those security * groups . Otherwise , it belongs to the default security group for the subnet ' s VPC . * < / li > * < li > * Assigns the description < code > Mount target < i > fsmt - id < / i > for file system < i > fs - id < / i > < / code > where * < code > < i > fsmt - id < / i > < / code > is the mount target ID , and < code > < i > fs - id < / i > < / code > is the * < code > FileSystemId < / code > . * < / li > * < li > * Sets the < code > requesterManaged < / code > property of the network interface to < code > true < / code > , and the * < code > requesterId < / code > value to < code > EFS < / code > . * < / li > * < / ul > * Each Amazon EFS mount target has one corresponding requester - managed EC2 network interface . After the network * interface is created , Amazon EFS sets the < code > NetworkInterfaceId < / code > field in the mount target ' s description * to the network interface ID , and the < code > IpAddress < / code > field to its address . If network interface creation * fails , the entire < code > CreateMountTarget < / code > operation fails . * < / li > * < / ul > * < note > * The < code > CreateMountTarget < / code > call returns only after creating the network interface , but while the mount * target state is still < code > creating < / code > , you can check the mount target creation status by calling the * < a > DescribeMountTargets < / a > operation , which among other things returns the mount target state . * < / note > * We recommend that you create a mount target in each of the Availability Zones . There are cost considerations for * using a file system in an Availability Zone through a mount target created in another Availability Zone . For more * information , see < a href = " http : / / aws . amazon . com / efs / " > Amazon EFS < / a > . In addition , by always using a mount target * local to the instance ' s Availability Zone , you eliminate a partial failure scenario . If the Availability Zone in * which your mount target is created goes down , then you can ' t access your file system through that mount target . * This operation requires permissions for the following action on the file system : * < ul > * < li > * < code > elasticfilesystem : CreateMountTarget < / code > * < / li > * < / ul > * This operation also requires permissions for the following Amazon EC2 actions : * < ul > * < li > * < code > ec2 : DescribeSubnets < / code > * < / li > * < li > * < code > ec2 : DescribeNetworkInterfaces < / code > * < / li > * < li > * < code > ec2 : CreateNetworkInterface < / code > * < / li > * < / ul > * @ param createMountTargetRequest * @ return Result of the CreateMountTarget operation returned by the service . * @ throws BadRequestException * Returned if the request is malformed or contains an error such as an invalid parameter value or a missing * required parameter . * @ throws InternalServerErrorException * Returned if an error occurred on the server side . * @ throws FileSystemNotFoundException * Returned if the specified < code > FileSystemId < / code > value doesn ' t exist in the requester ' s AWS account . * @ throws IncorrectFileSystemLifeCycleStateException * Returned if the file system ' s lifecycle state is not " available " . * @ throws MountTargetConflictException * Returned if the mount target would violate one of the specified restrictions based on the file system ' s * existing mount targets . * @ throws SubnetNotFoundException * Returned if there is no subnet with ID < code > SubnetId < / code > provided in the request . * @ throws NoFreeAddressesInSubnetException * Returned if < code > IpAddress < / code > was not specified in the request and there are no free IP addresses in * the subnet . * @ throws IpAddressInUseException * Returned if the request specified an < code > IpAddress < / code > that is already in use in the subnet . * @ throws NetworkInterfaceLimitExceededException * The calling account has reached the limit for elastic network interfaces for the specific AWS Region . The * client should try to delete some elastic network interfaces or get the account limit raised . For more * information , see < a * href = " https : / / docs . aws . amazon . com / AmazonVPC / latest / UserGuide / VPC _ Appendix _ Limits . html " > Amazon VPC * Limits < / a > in the < i > Amazon VPC User Guide < / i > ( see the Network interfaces per VPC entry in the table ) . * @ throws SecurityGroupLimitExceededException * Returned if the size of < code > SecurityGroups < / code > specified in the request is greater than five . * @ throws SecurityGroupNotFoundException * Returned if one of the specified security groups doesn ' t exist in the subnet ' s VPC . * @ throws UnsupportedAvailabilityZoneException * @ sample AmazonElasticFileSystem . CreateMountTarget * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / elasticfilesystem - 2015-02-01 / CreateMountTarget " * target = " _ top " > AWS API Documentation < / a > */ @ Override public CreateMountTargetResult createMountTarget ( CreateMountTargetRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeCreateMountTarget ( request ) ;
public class BigQueryUtils { /** * Parses the given JSON string and returns the extracted schema . * @ param fields a string to read the TableSchema from . * @ return the List of TableFieldSchema described by the string fields . */ public static List < TableFieldSchema > getSchemaFromString ( String fields ) { } }
logger . atFine ( ) . log ( "getSchemaFromString('%s')" , fields ) ; // Parse the output schema for Json from fields . JsonParser jsonParser = new JsonParser ( ) ; JsonArray json = jsonParser . parse ( fields ) . getAsJsonArray ( ) ; List < TableFieldSchema > fieldsList = new ArrayList < > ( ) ; // For each item in the list of fields . for ( JsonElement jsonElement : json ) { checkArgument ( jsonElement . isJsonObject ( ) , "Expected JsonObject for element, got '%s'." , jsonElement ) ; JsonObject jsonObject = jsonElement . getAsJsonObject ( ) ; // Set the name and type . checkArgument ( jsonObject . get ( "name" ) != null , "Expected non-null entry for key 'name' in JsonObject '%s'" , jsonObject ) ; checkArgument ( jsonObject . get ( "type" ) != null , "Expected non-null entry for key 'type' in JsonObject '%s'" , jsonObject ) ; TableFieldSchema fieldDef = new TableFieldSchema ( ) ; fieldDef . setName ( jsonObject . get ( "name" ) . getAsString ( ) ) ; fieldDef . setType ( jsonObject . get ( "type" ) . getAsString ( ) ) ; // If mode is not null , set mode . if ( jsonObject . get ( "mode" ) != null ) { fieldDef . setMode ( jsonObject . get ( "mode" ) . getAsString ( ) ) ; } // If the type is RECORD set the fields . if ( jsonObject . get ( "type" ) . getAsString ( ) . equals ( "RECORD" ) ) { checkArgument ( jsonObject . get ( "fields" ) != null , "Expected non-null entry for key 'fields' in JsonObject of type RECORD: '%s'" , jsonObject ) ; fieldDef . setFields ( getSchemaFromString ( jsonObject . get ( "fields" ) . toString ( ) ) ) ; } fieldsList . add ( fieldDef ) ; } // Return list of TableFieldSchema . return fieldsList ;
public class NewJFrame { /** * GEN - LAST : event _ datePopupChanged */ private void timePopupChanged ( java . beans . PropertyChangeEvent evt ) { } }
// GEN - FIRST : event _ timePopupChanged if ( evt . getNewValue ( ) instanceof Date ) setDateTime ( ( Date ) evt . getNewValue ( ) ) ;
public class SessionDataManager { /** * Return item by absolute path in this transient storage then in workspace container . * @ param path * - absolute path to the searched item * @ param pool * - indicates does the item fall in pool * @ return existed item or null if not found * @ throws RepositoryException */ public ItemImpl getItem ( QPath path , boolean pool ) throws RepositoryException { } }
long start = 0 ; if ( LOG . isDebugEnabled ( ) ) { start = System . currentTimeMillis ( ) ; LOG . debug ( "getItem(" + path . getAsString ( ) + " ) >>>>>" ) ; } ItemImpl item = null ; try { return item = readItem ( getItemData ( path ) , pool ) ; } finally { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "getItem(" + path . getAsString ( ) + ") --> " + ( item != null ? item . getPath ( ) : "null" ) + " <<<<< " + ( ( System . currentTimeMillis ( ) - start ) / 1000d ) + "sec" ) ; } }