signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class AssociationValue { /** * Set the attributes for the associated object .
* @ param attributes attributes for associated objects
* @ deprecated replaced by { @ link # setAllAttributes ( Map ) } after introduction of nested associations */
@ Deprecated @ SuppressWarnings ( { } } | "rawtypes" , "unchecked" } ) public void setAttributes ( Map < String , PrimitiveAttribute < ? > > attributes ) { if ( ! isPrimitiveOnly ( ) ) { throw new UnsupportedOperationException ( "Primitive API not supported for nested association values" ) ; } this . attributes = ( Map ) attributes ; |
public class FodselsnummerValidator { /** * Returns an object that represents a Fodselsnummer .
* @ param fodselsnummer
* A String containing a Fodselsnummer
* @ return A Fodselsnummer instance
* @ throws IllegalArgumentException
* thrown if String contains an invalid Fodselsnummer */
public static no . bekk . bekkopen . person . Fodselsnummer getFodselsnummer ( String fodselsnummer ) throws IllegalArgumentException { } } | validateSyntax ( fodselsnummer ) ; validateIndividnummer ( fodselsnummer ) ; validateDate ( fodselsnummer ) ; validateChecksums ( fodselsnummer ) ; return new no . bekk . bekkopen . person . Fodselsnummer ( fodselsnummer ) ; |
public class PriceListUrl { /** * Get Resource Url for GetPriceList
* @ param priceListCode The unique code of the price list for which you want to retrieve the details .
* @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss .
* @ return String Resource Url */
public static MozuUrl getPriceListUrl ( String priceListCode , String responseFields ) { } } | UrlFormatter formatter = new UrlFormatter ( "/api/commerce/catalog/storefront/pricelists/{priceListCode}?responseFields={responseFields}" ) ; formatter . formatUrl ( "priceListCode" , priceListCode ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ; |
public class RemoteMetaDataImpl { /** * Returns the type of the specified column . The method first finds the name
* of the field in that column , and then looks up its type in the schema .
* @ see org . vanilladb . core . remote . jdbc . RemoteMetaData # getColumnType ( int ) */
@ Override public int getColumnType ( int column ) throws RemoteException { } } | String fldname = getColumnName ( column ) ; return schema . type ( fldname ) . getSqlType ( ) ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getMCA ( ) { } } | if ( mcaEClass == null ) { mcaEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 288 ) ; } return mcaEClass ; |
public class Counters { /** * Finds and returns the key in the Counter with the largest count . Returning
* null if count is empty .
* @ param c
* The Counter
* @ return The key in the Counter with the largest count . */
public static < E > E argmax ( Counter < E > c ) { } } | double max = Double . NEGATIVE_INFINITY ; E argmax = null ; for ( E key : c . keySet ( ) ) { double count = c . getCount ( key ) ; if ( argmax == null || count > max ) { // | | ( count = = max & &
// tieBreaker . compare ( key , argmax ) <
max = count ; argmax = key ; } } return argmax ; |
public class URLUtils { /** * Set the element ID from the path
* @ param relativePath path
* @ param id element ID
* @ return element ID , may be { @ code null } */
public static URI setElementID ( final URI relativePath , final String id ) { } } | String topic = getTopicID ( relativePath ) ; if ( topic != null ) { return setFragment ( relativePath , topic + ( id != null ? SLASH + id : "" ) ) ; } else if ( id == null ) { return stripFragment ( relativePath ) ; } else { throw new IllegalArgumentException ( relativePath . toString ( ) ) ; } |
public class PerformanceTracker { /** * Updates the saved jsRoot and resets the size tracking fields accordingly .
* @ param jsRoot */
void updateAfterDeserialize ( Node jsRoot ) { } } | // TODO ( bradfordcsmith ) : Restore line counts for inputs and externs .
this . jsRoot = jsRoot ; if ( ! tracksAstSize ( ) ) { return ; } this . initAstSize = this . astSize = NodeUtil . countAstSize ( this . jsRoot ) ; if ( ! tracksSize ( ) ) { return ; } PerformanceTrackerCodeSizeEstimator estimator = PerformanceTrackerCodeSizeEstimator . estimate ( this . jsRoot , tracksGzSize ( ) ) ; this . initCodeSize = this . codeSize = estimator . getCodeSize ( ) ; if ( tracksGzSize ( ) ) { this . initGzCodeSize = this . gzCodeSize = estimator . getZippedCodeSize ( ) ; } |
public class ModbusResponse { /** * Factory method creating the required specialized < tt > ModbusResponse < / tt >
* instance .
* @ param functionCode the function code of the response as < tt > int < / tt > .
* @ return a ModbusResponse instance specific for the given function code . */
public static ModbusResponse createModbusResponse ( int functionCode ) { } } | ModbusResponse response ; switch ( functionCode ) { case Modbus . READ_COILS : response = new ReadCoilsResponse ( ) ; break ; case Modbus . READ_INPUT_DISCRETES : response = new ReadInputDiscretesResponse ( ) ; break ; case Modbus . READ_MULTIPLE_REGISTERS : response = new ReadMultipleRegistersResponse ( ) ; break ; case Modbus . READ_INPUT_REGISTERS : response = new ReadInputRegistersResponse ( ) ; break ; case Modbus . WRITE_COIL : response = new WriteCoilResponse ( ) ; break ; case Modbus . WRITE_SINGLE_REGISTER : response = new WriteSingleRegisterResponse ( ) ; break ; case Modbus . WRITE_MULTIPLE_COILS : response = new WriteMultipleCoilsResponse ( ) ; break ; case Modbus . WRITE_MULTIPLE_REGISTERS : response = new WriteMultipleRegistersResponse ( ) ; break ; case Modbus . READ_EXCEPTION_STATUS : response = new ReadExceptionStatusResponse ( ) ; break ; case Modbus . READ_SERIAL_DIAGNOSTICS : response = new ReadSerialDiagnosticsResponse ( ) ; break ; case Modbus . READ_COMM_EVENT_COUNTER : response = new ReadCommEventCounterResponse ( ) ; break ; case Modbus . READ_COMM_EVENT_LOG : response = new ReadCommEventLogResponse ( ) ; break ; case Modbus . REPORT_SLAVE_ID : response = new ReportSlaveIDResponse ( ) ; break ; case Modbus . READ_FILE_RECORD : response = new ReadFileRecordResponse ( ) ; break ; case Modbus . WRITE_FILE_RECORD : response = new WriteFileRecordResponse ( ) ; break ; case Modbus . MASK_WRITE_REGISTER : response = new MaskWriteRegisterResponse ( ) ; break ; case Modbus . READ_WRITE_MULTIPLE : response = new ReadWriteMultipleResponse ( ) ; break ; case Modbus . READ_FIFO_QUEUE : response = new ReadFIFOQueueResponse ( ) ; break ; case Modbus . READ_MEI : response = new ReadMEIResponse ( ) ; break ; default : if ( ( functionCode & 0x80 ) != 0 ) { response = new ExceptionResponse ( functionCode ) ; } else { response = new ExceptionResponse ( ) ; } break ; } return response ; |
public class Range { /** * Create a two dimensional range < code > 0 . . _ globalWidth * 0 . . _ globalHeight * 0 . . / _ globalDepth < / code >
* in groups defined by < code > localWidth < / code > * < code > localHeight < / code > * < code > localDepth < / code > .
* Note that for this range to be valid < code > _ globalWidth > 0 & & _ globalHeight > 0 _ globalDepth > 0 & & _ localWidth > 0 & & _ localHeight > 0 & & _ localDepth > 0 & & _ localWidth * _ localHeight * _ localDepth < MAX _ GROUP _ SIZE & & _ globalWidth % _ localWidth = = 0 & & _ globalHeight % _ localHeight = = 0 & & _ globalDepth % _ localDepth = = 0 < / code > .
* @ param _ globalWidth the width of the 3D grid we wish to process
* @ param _ globalHeight the height of the 3D grid we wish to process
* @ param _ globalDepth the depth of the 3D grid we wish to process
* @ param _ localWidth the width of the 3D group we wish to process
* @ param _ localHeight the height of the 3D group we wish to process
* @ param _ localDepth the depth of the 3D group we wish to process
* @ return */
public static Range create3D ( Device _device , int _globalWidth , int _globalHeight , int _globalDepth , int _localWidth , int _localHeight , int _localDepth ) { } } | final Range range = new Range ( _device , 3 ) ; range . setGlobalSize_0 ( _globalWidth ) ; range . setLocalSize_0 ( _localWidth ) ; range . setGlobalSize_1 ( _globalHeight ) ; range . setLocalSize_1 ( _localHeight ) ; range . setGlobalSize_2 ( _globalDepth ) ; range . setLocalSize_2 ( _localDepth ) ; range . setValid ( ( range . getLocalSize_0 ( ) > 0 ) && ( range . getLocalSize_1 ( ) > 0 ) && ( range . getLocalSize_2 ( ) > 0 ) && ( ( range . getLocalSize_0 ( ) * range . getLocalSize_1 ( ) * range . getLocalSize_2 ( ) ) <= range . getMaxWorkGroupSize ( ) ) && ( range . getLocalSize_0 ( ) <= range . getMaxWorkItemSize ( ) [ 0 ] ) && ( range . getLocalSize_1 ( ) <= range . getMaxWorkItemSize ( ) [ 1 ] ) && ( range . getLocalSize_2 ( ) <= range . getMaxWorkItemSize ( ) [ 2 ] ) && ( ( range . getGlobalSize_0 ( ) % range . getLocalSize_0 ( ) ) == 0 ) && ( ( range . getGlobalSize_1 ( ) % range . getLocalSize_1 ( ) ) == 0 ) && ( ( range . getGlobalSize_2 ( ) % range . getLocalSize_2 ( ) ) == 0 ) ) ; return ( range ) ; |
public class UserLayoutNodeDescription { /** * Add all of common node attributes to the < code > Element < / code > .
* @ param node an < code > Element < / code > value */
@ Override public void addNodeAttributes ( Element node ) { } } | node . setAttribute ( "ID" , this . getId ( ) ) ; node . setAttribute ( "name" , this . getName ( ) ) ; node . setAttribute ( "unremovable" , String . valueOf ( this . isUnremovable ( ) ) ) ; node . setAttribute ( "immutable" , String . valueOf ( this . isImmutable ( ) ) ) ; node . setAttribute ( "hidden" , String . valueOf ( this . isHidden ( ) ) ) ; if ( ! this . isDeleteAllowed ( ) ) node . setAttributeNS ( Constants . NS_URI , Constants . ATT_DELETE_ALLOWED , "false" ) ; if ( ! this . isMoveAllowed ( ) ) node . setAttributeNS ( Constants . NS_URI , Constants . ATT_MOVE_ALLOWED , "false" ) ; if ( ! this . isEditAllowed ( ) ) node . setAttributeNS ( Constants . NS_URI , Constants . ATT_EDIT_ALLOWED , "false" ) ; if ( ! this . isAddChildAllowed ( ) ) node . setAttributeNS ( Constants . NS_URI , Constants . ATT_ADD_CHILD_ALLOWED , "false" ) ; if ( this . getPrecedence ( ) != 0.0 ) node . setAttributeNS ( Constants . NS_URI , Constants . ATT_PRECEDENCE , Double . toString ( this . getPrecedence ( ) ) ) ; |
public class Util { /** * Compute the maximum of the integer logarithms ( ceil ( log ( x + 1 ) ) of a the
* successive differences ( deltas ) of a range of value
* @ param initoffset
* initial vallue for the computation of the deltas
* @ param i
* source array
* @ param pos
* starting position
* @ param length
* number of integers to consider
* @ return integer logarithm */
public static int maxdiffbits ( int initoffset , int [ ] i , int pos , int length ) { } } | int mask = 0 ; mask |= ( i [ pos ] - initoffset ) ; for ( int k = pos + 1 ; k < pos + length ; ++ k ) { mask |= i [ k ] - i [ k - 1 ] ; } return bits ( mask ) ; |
public class Windows { /** * Constructs a list of window of size windowSize .
* Note that padding for each window is created as well .
* @ param words the words to tokenize and construct windows from
* @ param tokenizerFactory tokenizer factory to use
* @ return the list of windows for the tokenized string */
public static List < Window > windows ( String words , TokenizerFactory tokenizerFactory ) { } } | Tokenizer tokenizer = tokenizerFactory . create ( words ) ; List < String > list = new ArrayList < > ( ) ; while ( tokenizer . hasMoreTokens ( ) ) list . add ( tokenizer . nextToken ( ) ) ; return windows ( list , 5 ) ; |
public class NotificationHubsInner { /** * test send a push notification .
* @ param resourceGroupName The name of the resource group .
* @ param namespaceName The namespace name .
* @ param notificationHubName The notification hub name .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the DebugSendResponseInner object */
public Observable < DebugSendResponseInner > debugSendAsync ( String resourceGroupName , String namespaceName , String notificationHubName ) { } } | return debugSendWithServiceResponseAsync ( resourceGroupName , namespaceName , notificationHubName ) . map ( new Func1 < ServiceResponse < DebugSendResponseInner > , DebugSendResponseInner > ( ) { @ Override public DebugSendResponseInner call ( ServiceResponse < DebugSendResponseInner > response ) { return response . body ( ) ; } } ) ; |
public class Copiers { /** * 基于 Orika 实现的简单拷贝 , 满足基本需求
* @ param sourceClass 源对象类型
* @ param targetClass 目标对象类型
* @ return copier */
public static < F , T > Copier < F , T > create ( Class < F > sourceClass , Class < T > targetClass ) { } } | return CopierFactory . getOrCreateOrikaCopier ( sourceClass , targetClass ) ; |
public class RtfFont { /** * Compares this < code > RtfFont < / code > to either a { @ link com . lowagie . text . Font } or
* an < code > RtfFont < / code > .
* @ since 2.1.0 */
public int compareTo ( Object object ) { } } | if ( object == null ) { return - 1 ; } if ( object instanceof RtfFont ) { if ( this . getFontName ( ) . compareTo ( ( ( RtfFont ) object ) . getFontName ( ) ) != 0 ) { return 1 ; } else { return super . compareTo ( object ) ; } } else if ( object instanceof Font ) { return super . compareTo ( object ) ; } else { return - 3 ; } |
public class TmdbGenres { /** * Get the list of genres for movies or TV
* @ param language
* @ param sub
* @ return
* @ throws MovieDbException */
private ResultList < Genre > getGenreList ( String language , MethodSub sub ) throws MovieDbException { } } | TmdbParameters parameters = new TmdbParameters ( ) ; parameters . add ( Param . LANGUAGE , language ) ; URL url = new ApiUrl ( apiKey , MethodBase . GENRE ) . subMethod ( sub ) . buildUrl ( parameters ) ; String webpage = httpTools . getRequest ( url ) ; try { WrapperGenres wrapper = MAPPER . readValue ( webpage , WrapperGenres . class ) ; ResultList < Genre > results = new ResultList < > ( wrapper . getGenres ( ) ) ; wrapper . setResultProperties ( results ) ; return results ; } catch ( IOException ex ) { throw new MovieDbException ( ApiExceptionType . MAPPING_FAILED , "Failed to get genre " + sub . toString ( ) , url , ex ) ; } |
public class IOUtils { /** * helper method for getPDBCharacter and getPDBConservation */
private static String getPDBString ( boolean web , char c1 , char c2 , boolean similar , String m , String sm , String dm , String qg ) { } } | if ( c1 == c2 ) return web ? "<span class=\"m\">" + m + "</span>" : m ; else if ( similar ) return web ? "<span class=\"sm\">" + sm + "</span>" : sm ; else if ( c1 == '-' || c2 == '-' ) return web ? "<span class=\"dm\">" + dm + "</span>" : dm ; else return web ? "<span class=\"qg\">" + qg + "</span>" : qg ; |
public class MtasSpanSequenceSpans { /** * Glue .
* @ param subMatchesQueue the sub matches queue
* @ param subMatchesOptional the sub matches optional
* @ param item the item
* @ return the list
* @ throws IOException Signals that an I / O exception has occurred . */
private List < Match > _glue ( List < Match > subMatchesQueue , Boolean subMatchesOptional , QueueItem item ) throws IOException { } } | List < Match > newSubMatchesQueue = new ArrayList < > ( ) ; // no previous queue , only use current item
if ( subMatchesQueue . isEmpty ( ) ) { if ( item . filledPosition ) { for ( Integer endPosition : item . queue . get ( item . lowestPosition ) ) { Match m = new Match ( item . lowestPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( m ) ) { newSubMatchesQueue . add ( m ) ; } } } return newSubMatchesQueue ; // previous queue
} else { // startposition from queue
int startPosition = subMatchesQueue . get ( 0 ) . startPosition ; // previous queue optional , current item optional
if ( subMatchesOptional && item . sequenceSpans . optional ) { // forget previous , because current has lower startposition
if ( item . filledPosition && item . lowestPosition < startPosition ) { for ( Integer endPosition : item . queue . get ( item . lowestPosition ) ) { Match m = new Match ( item . lowestPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( m ) ) { newSubMatchesQueue . add ( m ) ; } } // merge with previous
} else if ( item . filledPosition ) { if ( item . lowestPosition . equals ( startPosition ) ) { for ( Integer endPosition : item . queue . get ( item . lowestPosition ) ) { Match m = new Match ( item . lowestPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( m ) ) { newSubMatchesQueue . add ( m ) ; } } } newSubMatchesQueue . addAll ( subMatchesQueue ) ; for ( Match m : subMatchesQueue ) { if ( item . queue . containsKey ( m . endPosition ) ) { for ( Integer endPosition : item . queue . get ( m . endPosition ) ) { Match o = new Match ( m . startPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( o ) ) { newSubMatchesQueue . add ( o ) ; } } } } // no filled position
} else { newSubMatchesQueue . addAll ( subMatchesQueue ) ; } // previous queue optional , current item not optional
} else if ( subMatchesOptional && ! item . sequenceSpans . optional ) { assert item . filledPosition : "span not optional, should contain items" ; // forget previous
if ( item . lowestPosition < startPosition ) { for ( Integer endPosition : item . queue . get ( item . lowestPosition ) ) { Match m = new Match ( item . lowestPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( m ) ) { newSubMatchesQueue . add ( m ) ; } } // merge with previous
} else { if ( item . lowestPosition . equals ( startPosition ) ) { for ( Integer endPosition : item . queue . get ( item . lowestPosition ) ) { Match m = new Match ( item . lowestPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( m ) ) { newSubMatchesQueue . add ( m ) ; } } } for ( Match m : subMatchesQueue ) { if ( item . queue . containsKey ( m . endPosition ) ) { for ( Integer endPosition : item . queue . get ( m . endPosition ) ) { Match o = new Match ( m . startPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( o ) ) { newSubMatchesQueue . add ( o ) ; } } } } } // previous queue not optional , current item optional
} else if ( ! subMatchesOptional && item . sequenceSpans . optional ) { newSubMatchesQueue . addAll ( subMatchesQueue ) ; // merge with previous
if ( item . filledPosition ) { for ( Match m : subMatchesQueue ) { if ( item . queue . containsKey ( m . endPosition ) ) { for ( Integer endPosition : item . queue . get ( m . endPosition ) ) { Match o = new Match ( m . startPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( o ) ) { newSubMatchesQueue . add ( o ) ; } } } } } // previous queue not optional , current item not optional
} else if ( ! subMatchesOptional && ! item . sequenceSpans . optional && item . filledPosition ) { for ( Match m : subMatchesQueue ) { Set < Integer > ignoreList = ignoreItem . getFullEndPositionList ( docId , m . endPosition ) ; Integer [ ] checkList ; if ( ignoreList == null ) { checkList = new Integer [ ] { m . endPosition } ; } else { checkList = new Integer [ 1 + ignoreList . size ( ) ] ; checkList = ignoreList . toArray ( checkList ) ; checkList [ ignoreList . size ( ) ] = m . endPosition ; } for ( Integer checkEndPosition : checkList ) { if ( item . queue . containsKey ( checkEndPosition ) ) { for ( Integer endPosition : item . queue . get ( checkEndPosition ) ) { Match o = new Match ( m . startPosition , endPosition ) ; if ( ! newSubMatchesQueue . contains ( o ) ) { newSubMatchesQueue . add ( o ) ; } } } } } } } return newSubMatchesQueue ; |
public class PlatformBitmapFactory { /** * Creates a bitmap from subset of the source bitmap ,
* transformed by the optional matrix . It is initialized with the same
* density as the original bitmap .
* @ param source The bitmap we are subsetting
* @ param x The x coordinate of the first pixel in source
* @ param y The y coordinate of the first pixel in source
* @ param width The number of pixels in each row
* @ param height The number of rows
* @ param matrix Optional matrix to be applied to the pixels
* @ param filter true if the source should be filtered .
* Only applies if the matrix contains more than just
* translation .
* @ return a reference to the bitmap
* @ throws IllegalArgumentException if the x , y , width , height values are
* outside of the dimensions of the source bitmap , or width is < = 0,
* or height is < = 0
* @ throws TooManyBitmapsException if the pool is full
* @ throws java . lang . OutOfMemoryError if the Bitmap cannot be allocated */
public CloseableReference < Bitmap > createBitmap ( Bitmap source , int x , int y , int width , int height , @ Nullable Matrix matrix , boolean filter ) { } } | return createBitmap ( source , x , y , width , height , matrix , filter , null ) ; |
public class WindowsProcessOutputHandler { /** * Updates the fax job based on the data from the process output .
* @ param faxClientSpi
* The fax client SPI
* @ param faxJob
* The fax job object
* @ param processOutput
* The process output
* @ param faxActionType
* The fax action type */
public void updateFaxJob ( FaxClientSpi faxClientSpi , FaxJob faxJob , ProcessOutput processOutput , FaxActionType faxActionType ) { } } | // get output
String output = WindowsFaxClientSpiHelper . getOutputPart ( processOutput , Fax4jExeConstants . FAX_JOB_ID_OUTPUT_PREFIX . toString ( ) ) ; if ( output != null ) { // validate fax job ID
WindowsFaxClientSpiHelper . validateFaxJobID ( output ) ; // set fax job ID
faxJob . setID ( output ) ; } |
public class JvmInnerTypeReferenceImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ Override public void eUnset ( int featureID ) { } } | switch ( featureID ) { case TypesPackage . JVM_INNER_TYPE_REFERENCE__OUTER : setOuter ( ( JvmParameterizedTypeReference ) null ) ; return ; } super . eUnset ( featureID ) ; |
public class AbstractAppender { /** * Gets the previous entry . */
@ SuppressWarnings ( "unused" ) protected Entry getPrevEntry ( MemberState member ) { } } | long prevIndex = Math . min ( member . getNextIndex ( ) - 1 , context . getLog ( ) . lastIndex ( ) ) ; while ( prevIndex > 0 ) { Entry entry = context . getLog ( ) . get ( prevIndex ) ; if ( entry != null ) { return entry ; } prevIndex -- ; } return null ; |
public class PathHelper { /** * Check if the passed file can read and write . If the file already exists ,
* the file itself is checked . If the file does not exist , the parent
* directory
* @ param aFile
* The file to be checked . May be < code > null < / code > .
* @ return < code > true < / code > if the file can be read and written */
public static boolean canReadAndWriteFile ( @ Nullable final Path aFile ) { } } | if ( aFile == null ) return false ; // The Files API seem to be slow
return FileHelper . canReadAndWriteFile ( aFile . toFile ( ) ) ; // if ( Files . isRegularFile ( aFile ) )
// / / Path exists
// if ( ! Files . isReadable ( aFile ) | | ! Files . isWritable ( aFile ) )
// return false ;
// else
// / / Path does not exist ( yet )
// / / Check parent directory
// final Path aParentDir = aFile . getParent ( ) ;
// if ( aParentDir = = null | |
// ! Files . isDirectory ( aParentDir ) | |
// ! Files . isReadable ( aParentDir ) | |
// ! Files . isWritable ( aParentDir ) )
// return false ;
// return true ; |
public class OidcLoginConfigImpl { /** * { @ inheritDoc } */
@ Override @ FFDCIgnore ( SocialLoginException . class ) public String getTrustStoreRef ( ) { } } | if ( this . sslRefInfo == null ) { SocialLoginService service = socialLoginServiceRef . getService ( ) ; if ( service == null ) { if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "Social login service is not available" ) ; } return null ; } sslRefInfo = createSslRefInfoImpl ( service ) ; } try { return sslRefInfo . getTrustStoreName ( ) ; } catch ( SocialLoginException e ) { // TODO - NLS message ?
e . logErrorMessage ( ) ; } return null ; |
public class ExternalEntryPointHelper { /** * Based on the input for scanning annotations , look for @ ExternalEntryPoint and get the decorated name from it , if any .
* @ param method method to check
* @ param scanEntryPointAnnotation annotation
* @ return String */
public static String getEntryPointDecoratedName ( Method method , boolean scanEntryPointAnnotation ) { } } | String decoratedName = method . getName ( ) ; if ( scanEntryPointAnnotation ) { // we look at the method level
if ( method . isAnnotationPresent ( ExternalEntryPoint . class ) ) { final ExternalEntryPoint externalEntryPoint = method . getAnnotation ( ExternalEntryPoint . class ) ; if ( StringUtils . isNotBlank ( externalEntryPoint . name ( ) ) ) { decoratedName = externalEntryPoint . name ( ) ; } } } return decoratedName ; |
public class MethodCompiler { /** * Set field in class
* < p > Stack : . . . , value = & gt ; . . .
* @ param cls
* @ param name
* @ throws IOException */
public void putField ( Class < ? > cls , String name ) throws IOException { } } | putField ( El . getField ( cls , name ) ) ; |
public class MethodsStringConverter { /** * Converts the { @ code String } to an object .
* @ param cls the class to convert to , not null
* @ param str the string to convert , not null
* @ return the converted object , may be null but generally not */
@ Override public T convertFromString ( Class < ? extends T > cls , String str ) { } } | try { return cls . cast ( fromString . invoke ( null , str ) ) ; } catch ( IllegalAccessException ex ) { throw new IllegalStateException ( "Method is not accessible: " + fromString ) ; } catch ( InvocationTargetException ex ) { if ( ex . getCause ( ) instanceof RuntimeException ) { throw ( RuntimeException ) ex . getCause ( ) ; } throw new RuntimeException ( ex . getMessage ( ) , ex . getCause ( ) ) ; } |
public class XReportBreakScreen { /** * Display the start form in input format .
* @ param out The out stream .
* @ param iPrintOptions The view specific attributes . */
public void printControlEndForm ( PrintWriter out , int iPrintOptions ) { } } | out . println ( Utility . endTag ( XMLTags . CONTROLS ) ) ; if ( ( iPrintOptions & HtmlConstants . FOOTING_SCREEN ) == HtmlConstants . FOOTING_SCREEN ) out . println ( Utility . endTag ( XMLTags . FOOTING ) ) ; else out . println ( Utility . endTag ( XMLTags . HEADING ) ) ; |
public class AnnotationTypeFieldWriterImpl { /** * { @ inheritDoc } */
protected Content getNavSummaryLink ( TypeElement typeElement , boolean link ) { } } | if ( link ) { return writer . getHyperLink ( SectionName . ANNOTATION_TYPE_FIELD_SUMMARY , contents . navField ) ; } else { return contents . navField ; } |
public class PublicanPODocBookBuilder { /** * TODO Look at how to do editor links for translation builds */
protected void processPOTopicEditorLink ( final BuildData buildData , final SpecTopic specTopic , final Map < String , TranslationDetails > translations ) { } } | // EDITOR LINK
if ( buildData . getBuildOptions ( ) . getInsertEditorLinks ( ) ) { final DocBookXMLPreProcessor preProcessor = buildData . getXMLPreProcessor ( ) ; final BaseTopicWrapper < ? > topic = specTopic . getTopic ( ) ; final String editorUrl = topic . getEditorURL ( buildData . getZanataDetails ( ) ) ; if ( editorUrl != null ) { final String editorLink = preProcessor . createExternalLinkElement ( buildData . getDocBookVersion ( ) , "Edit this topic" , editorUrl ) ; translations . put ( editorLink , new TranslationDetails ( null , false , "para" ) ) ; } else { /* * Since the getEditorURL method only returns null for translations we don ' t need to check the topic
* type . */
translations . put ( "No editor available for this topic, as it hasn't been pushed for translation." , new TranslationDetails ( null , false , "para" ) ) ; } // Add a link for additional translated content for Revision Histories and author groups
if ( topic instanceof TranslatedTopicWrapper && ( specTopic . getTopicType ( ) == TopicType . REVISION_HISTORY || specTopic . getTopicType ( ) == TopicType . AUTHOR_GROUP ) ) { final String additionalXMLEditorUrl = topic . getPressGangURL ( ) ; if ( additionalXMLEditorUrl != null ) { final String editorLink = preProcessor . createExternalLinkElement ( buildData . getDocBookVersion ( ) , "Edit the Additional Translated XML" , editorUrl ) ; translations . put ( editorLink , new TranslationDetails ( null , false , "para" ) ) ; } } } |
public class MetricRegistry { /** * Concatenates elements to form a dotted name , eliding any null values or empty strings .
* @ param name the first element of the name
* @ param names the remaining elements of the name
* @ return { @ code name } and { @ code names } concatenated by periods */
public static String name ( String name , String ... names ) { } } | final StringBuilder builder = new StringBuilder ( ) ; append ( builder , name ) ; if ( names != null ) { for ( String s : names ) { append ( builder , s ) ; } } return builder . toString ( ) ; |
public class SpecializedOps_DDRM { /** * Copies just the upper or lower triangular portion of a matrix .
* @ param src Matrix being copied . Not modified .
* @ param dst Where just a triangle from src is copied . If null a new one will be created . Modified .
* @ param upper If the upper or lower triangle should be copied .
* @ return The copied matrix . */
public static DMatrixRMaj copyTriangle ( DMatrixRMaj src , DMatrixRMaj dst , boolean upper ) { } } | if ( dst == null ) { dst = new DMatrixRMaj ( src . numRows , src . numCols ) ; } else if ( src . numRows != dst . numRows || src . numCols != dst . numCols ) { throw new IllegalArgumentException ( "src and dst must have the same dimensions." ) ; } if ( upper ) { int N = Math . min ( src . numRows , src . numCols ) ; for ( int i = 0 ; i < N ; i ++ ) { int index = i * src . numCols + i ; System . arraycopy ( src . data , index , dst . data , index , src . numCols - i ) ; } } else { for ( int i = 0 ; i < src . numRows ; i ++ ) { int length = Math . min ( i + 1 , src . numCols ) ; int index = i * src . numCols ; System . arraycopy ( src . data , index , dst . data , index , length ) ; } } return dst ; |
public class ArchiveRecord { /** * Skip over this records content .
* @ throws IOException */
protected void skip ( ) throws IOException { } } | if ( this . eor ) { return ; } // Read to the end of the body of the record . Exhaust the stream .
// Can ' t skip direct to end because underlying stream may be compressed
// and we ' re calculating the digest for the record .
int r = available ( ) ; while ( r > 0 && ! this . eor ) { skip ( r ) ; r = available ( ) ; } |
public class TaskTracker { /** * Start a new task .
* All exceptions are handled locally , so that we don ' t mess up the
* task tracker . */
private void startNewTask ( TaskInProgress tip ) { } } | try { boolean launched = localizeAndLaunchTask ( tip ) ; if ( ! launched ) { // Free the slot .
tip . kill ( true ) ; tip . cleanup ( true ) ; } } catch ( Throwable e ) { String msg = ( "Error initializing " + tip . getTask ( ) . getTaskID ( ) + ":\n" + StringUtils . stringifyException ( e ) ) ; LOG . error ( msg , e ) ; tip . reportDiagnosticInfo ( msg ) ; try { tip . kill ( true ) ; tip . cleanup ( true ) ; } catch ( IOException ie2 ) { LOG . info ( "Error cleaning up " + tip . getTask ( ) . getTaskID ( ) + ":\n" + StringUtils . stringifyException ( ie2 ) ) ; } // Careful !
// This might not be an ' Exception ' - don ' t handle ' Error ' here !
if ( e instanceof Error ) { throw ( ( Error ) e ) ; } } |
public class WeightVectors { /** * Read a set of weight vector from a file in the resources folder in jMetal
* @ param filePath The name of file in the resources folder of jMetal
* @ return A set of weight vectors */
public static double [ ] [ ] readFromResourcesInJMetal ( String filePath ) { } } | double [ ] [ ] weights ; Vector < double [ ] > listOfWeights = new Vector < > ( ) ; try { InputStream in = WeightVectors . class . getResourceAsStream ( "/" + filePath ) ; InputStreamReader isr = new InputStreamReader ( in ) ; BufferedReader br = new BufferedReader ( isr ) ; int numberOfObjectives = 0 ; int j ; String aux = br . readLine ( ) ; while ( aux != null ) { StringTokenizer st = new StringTokenizer ( aux ) ; j = 0 ; numberOfObjectives = st . countTokens ( ) ; double [ ] weight = new double [ numberOfObjectives ] ; while ( st . hasMoreTokens ( ) ) { weight [ j ] = new Double ( st . nextToken ( ) ) ; j ++ ; } listOfWeights . add ( weight ) ; aux = br . readLine ( ) ; } br . close ( ) ; weights = new double [ listOfWeights . size ( ) ] [ numberOfObjectives ] ; for ( int indexWeight = 0 ; indexWeight < listOfWeights . size ( ) ; indexWeight ++ ) { System . arraycopy ( listOfWeights . get ( indexWeight ) , 0 , weights [ indexWeight ] , 0 , numberOfObjectives ) ; } } catch ( Exception e ) { throw new JMetalException ( "readFromResourcesInJMetal: failed when reading for file: " + filePath + "" , e ) ; } return weights ; |
public class Options { /** * Set an option in this object , based on a String array in the style of
* commandline flags . The option is only processed with respect to
* options directly known by the Options object .
* Some options ( there are many others ; see the source code ) :
* < ul >
* < li > < code > - maxLength n < / code > set the maximum length sentence to parse ( inclusively )
* < li > < code > - printTT < / code > print the training trees in raw , annotated , and annotated + binarized form . Useful for debugging and other miscellany .
* < li > < code > - printAnnotated filename < / code > use only in conjunction with - printTT . Redirects printing of annotated training trees to < code > filename < / code > .
* < li > < code > - forceTags < / code > when the parser is tested against a set of gold standard trees , use the tagged yield , instead of just the yield , as input .
* < / ul >
* @ param args An array of options arguments , command - line style . E . g . { " - maxLength " , " 50 " } .
* @ param i The index in args to start at when processing an option
* @ return The index in args of the position after the last element used in
* processing this option , or the value i unchanged if a valid option couldn ' t
* be processed starting at position i . */
private int setOptionFlag ( String [ ] args , int i ) { } } | if ( args [ i ] . equalsIgnoreCase ( "-PCFG" ) ) { doDep = false ; doPCFG = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-dep" ) ) { doDep = true ; doPCFG = false ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-factored" ) ) { doDep = true ; doPCFG = true ; testOptions . useFastFactored = false ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-fastFactored" ) ) { doDep = true ; doPCFG = true ; testOptions . useFastFactored = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-noRecoveryTagging" ) ) { testOptions . noRecoveryTagging = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-useLexiconToScoreDependencyPwGt" ) ) { testOptions . useLexiconToScoreDependencyPwGt = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-useSmoothTagProjection" ) ) { useSmoothTagProjection = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-useUnigramWordSmoothing" ) ) { useUnigramWordSmoothing = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-useNonProjectiveDependencyParser" ) ) { testOptions . useNonProjectiveDependencyParser = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-maxLength" ) && ( i + 1 < args . length ) ) { testOptions . maxLength = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-MAX_ITEMS" ) && ( i + 1 < args . length ) ) { testOptions . MAX_ITEMS = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-trainLength" ) && ( i + 1 < args . length ) ) { // train on only short sentences
trainOptions . trainLengthLimit = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-lengthNormalization" ) ) { testOptions . lengthNormalization = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-iterativeCKY" ) ) { testOptions . iterativeCKY = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-vMarkov" ) && ( i + 1 < args . length ) ) { int order = Integer . parseInt ( args [ i + 1 ] ) ; if ( order <= 1 ) { trainOptions . PA = false ; trainOptions . gPA = false ; } else if ( order == 2 ) { trainOptions . PA = true ; trainOptions . gPA = false ; } else if ( order >= 3 ) { trainOptions . PA = true ; trainOptions . gPA = true ; } i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-vSelSplitCutOff" ) && ( i + 1 < args . length ) ) { trainOptions . selectiveSplitCutOff = Double . parseDouble ( args [ i + 1 ] ) ; trainOptions . selectiveSplit = trainOptions . selectiveSplitCutOff > 0.0 ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-vSelPostSplitCutOff" ) && ( i + 1 < args . length ) ) { trainOptions . selectivePostSplitCutOff = Double . parseDouble ( args [ i + 1 ] ) ; trainOptions . selectivePostSplit = trainOptions . selectivePostSplitCutOff > 0.0 ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-deleteSplitters" ) && ( i + 1 < args . length ) ) { String [ ] toDel = args [ i + 1 ] . split ( " *, *" ) ; trainOptions . deleteSplitters = new HashSet < String > ( Arrays . asList ( toDel ) ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-postSplitWithBaseCategory" ) ) { trainOptions . postSplitWithBaseCategory = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-vPostMarkov" ) && ( i + 1 < args . length ) ) { int order = Integer . parseInt ( args [ i + 1 ] ) ; if ( order <= 1 ) { trainOptions . postPA = false ; trainOptions . postGPA = false ; } else if ( order == 2 ) { trainOptions . postPA = true ; trainOptions . postGPA = false ; } else if ( order >= 3 ) { trainOptions . postPA = true ; trainOptions . postGPA = true ; } i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-hMarkov" ) && ( i + 1 < args . length ) ) { int order = Integer . parseInt ( args [ i + 1 ] ) ; if ( order >= 0 ) { trainOptions . markovOrder = order ; trainOptions . markovFactor = true ; } else { trainOptions . markovFactor = false ; } i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-distanceBins" ) && ( i + 1 < args . length ) ) { int numBins = Integer . parseInt ( args [ i + 1 ] ) ; if ( numBins <= 1 ) { distance = false ; } else if ( numBins == 4 ) { distance = true ; coarseDistance = true ; } else if ( numBins == 5 ) { distance = true ; coarseDistance = false ; } else { throw new IllegalArgumentException ( "Invalid value for -distanceBin: " + args [ i + 1 ] ) ; } i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-noStop" ) ) { genStop = false ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-nonDirectional" ) ) { directional = false ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-depWeight" ) && ( i + 1 < args . length ) ) { testOptions . depWeight = Double . parseDouble ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-printPCFGkBest" ) && ( i + 1 < args . length ) ) { testOptions . printPCFGkBest = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-printFactoredKGood" ) && ( i + 1 < args . length ) ) { testOptions . printFactoredKGood = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-smoothTagsThresh" ) && ( i + 1 < args . length ) ) { lexOptions . smoothInUnknownsThreshold = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-unseenSmooth" ) && ( i + 1 < args . length ) ) { testOptions . unseenSmooth = Double . parseDouble ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-fractionBeforeUnseenCounting" ) && ( i + 1 < args . length ) ) { trainOptions . fractionBeforeUnseenCounting = Double . parseDouble ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-hSelSplitThresh" ) && ( i + 1 < args . length ) ) { trainOptions . HSEL_CUT = Integer . parseInt ( args [ i + 1 ] ) ; trainOptions . hSelSplit = trainOptions . HSEL_CUT > 0 ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-tagPA" ) ) { trainOptions . tagPA = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-tagSelSplitCutOff" ) && ( i + 1 < args . length ) ) { trainOptions . tagSelectiveSplitCutOff = Double . parseDouble ( args [ i + 1 ] ) ; trainOptions . tagSelectiveSplit = trainOptions . tagSelectiveSplitCutOff > 0.0 ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-tagSelPostSplitCutOff" ) && ( i + 1 < args . length ) ) { trainOptions . tagSelectivePostSplitCutOff = Double . parseDouble ( args [ i + 1 ] ) ; trainOptions . tagSelectivePostSplit = trainOptions . tagSelectivePostSplitCutOff > 0.0 ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-noTagSplit" ) ) { trainOptions . noTagSplit = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-uwm" ) && ( i + 1 < args . length ) ) { lexOptions . useUnknownWordSignatures = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-unknownSuffixSize" ) && ( i + 1 < args . length ) ) { lexOptions . unknownSuffixSize = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-unknownPrefixSize" ) && ( i + 1 < args . length ) ) { lexOptions . unknownPrefixSize = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-uwModelTrainer" ) && ( i + 1 < args . length ) ) { lexOptions . uwModelTrainer = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-openClassThreshold" ) && ( i + 1 < args . length ) ) { trainOptions . openClassTypesThreshold = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-unary" ) && i + 1 < args . length ) { trainOptions . markUnary = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-unaryTags" ) ) { trainOptions . markUnaryTags = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-mutate" ) ) { lexOptions . smartMutation = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-useUnicodeType" ) ) { lexOptions . useUnicodeType = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-rightRec" ) ) { trainOptions . rightRec = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-noRightRec" ) ) { trainOptions . rightRec = false ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-preTag" ) ) { testOptions . preTag = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-forceTags" ) ) { testOptions . forceTags = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-taggerSerializedFile" ) ) { testOptions . taggerSerializedFile = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-forceTagBeginnings" ) ) { testOptions . forceTagBeginnings = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-noFunctionalForcing" ) ) { testOptions . noFunctionalForcing = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-scTags" ) ) { dcTags = false ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-dcTags" ) ) { dcTags = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-basicCategoryTagsInDependencyGrammar" ) ) { trainOptions . basicCategoryTagsInDependencyGrammar = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-evalb" ) ) { testOptions . evalb = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-v" ) || args [ i ] . equalsIgnoreCase ( "-verbose" ) ) { testOptions . verbose = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-outputFilesDirectory" ) && i + 1 < args . length ) { testOptions . outputFilesDirectory = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-outputFilesExtension" ) && i + 1 < args . length ) { testOptions . outputFilesExtension = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-outputFilesPrefix" ) && i + 1 < args . length ) { testOptions . outputFilesPrefix = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-outputkBestEquivocation" ) && i + 1 < args . length ) { testOptions . outputkBestEquivocation = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-writeOutputFiles" ) ) { testOptions . writeOutputFiles = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-printAllBestParses" ) ) { testOptions . printAllBestParses = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-outputTreeFormat" ) || args [ i ] . equalsIgnoreCase ( "-outputFormat" ) ) { testOptions . outputFormat = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-outputTreeFormatOptions" ) || args [ i ] . equalsIgnoreCase ( "-outputFormatOptions" ) ) { testOptions . outputFormatOptions = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-addMissingFinalPunctuation" ) ) { testOptions . addMissingFinalPunctuation = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-flexiTag" ) ) { lexOptions . flexiTag = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-lexiTag" ) ) { lexOptions . flexiTag = false ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-useSignatureForKnownSmoothing" ) ) { lexOptions . useSignatureForKnownSmoothing = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-compactGrammar" ) ) { trainOptions . compactGrammar = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-markFinalStates" ) ) { trainOptions . markFinalStates = args [ i + 1 ] . equalsIgnoreCase ( "true" ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-leftToRight" ) ) { trainOptions . leftToRight = args [ i + 1 ] . equals ( "true" ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-cnf" ) ) { forceCNF = true ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-smoothRules" ) ) { trainOptions . ruleSmoothing = true ; trainOptions . ruleSmoothingAlpha = Double . valueOf ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-nodePrune" ) && i + 1 < args . length ) { nodePrune = args [ i + 1 ] . equalsIgnoreCase ( "true" ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-noDoRecovery" ) ) { testOptions . doRecovery = false ; i += 1 ; } else if ( args [ i ] . equalsIgnoreCase ( "-acl03chinese" ) ) { trainOptions . markovOrder = 1 ; trainOptions . markovFactor = true ; // no increment
} else if ( args [ i ] . equalsIgnoreCase ( "-wordFunction" ) ) { wordFunction = ReflectionLoading . loadByReflection ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-acl03pcfg" ) ) { doDep = false ; doPCFG = true ; // lexOptions . smoothInUnknownsThreshold = 30;
trainOptions . markUnary = 1 ; trainOptions . PA = true ; trainOptions . gPA = false ; trainOptions . tagPA = true ; trainOptions . tagSelectiveSplit = false ; trainOptions . rightRec = true ; trainOptions . selectiveSplit = true ; trainOptions . selectiveSplitCutOff = 400.0 ; trainOptions . markovFactor = true ; trainOptions . markovOrder = 2 ; trainOptions . hSelSplit = true ; lexOptions . useUnknownWordSignatures = 2 ; lexOptions . flexiTag = true ; // DAN : Tag double - counting is BAD for PCFG - only parsing
dcTags = false ; // don ' t increment i so it gets language specific stuff as well
} else if ( args [ i ] . equalsIgnoreCase ( "-jenny" ) ) { doDep = false ; doPCFG = true ; // lexOptions . smoothInUnknownsThreshold = 30;
trainOptions . markUnary = 1 ; trainOptions . PA = false ; trainOptions . gPA = false ; trainOptions . tagPA = false ; trainOptions . tagSelectiveSplit = false ; trainOptions . rightRec = true ; trainOptions . selectiveSplit = false ; // trainOptions . selectiveSplitCutOff = 400.0;
trainOptions . markovFactor = false ; // trainOptions . markovOrder = 2;
trainOptions . hSelSplit = false ; lexOptions . useUnknownWordSignatures = 2 ; lexOptions . flexiTag = true ; // DAN : Tag double - counting is BAD for PCFG - only parsing
dcTags = false ; // don ' t increment i so it gets language specific stuff as well
} else if ( args [ i ] . equalsIgnoreCase ( "-goodPCFG" ) ) { doDep = false ; doPCFG = true ; // op . lexOptions . smoothInUnknownsThreshold = 30;
trainOptions . markUnary = 1 ; trainOptions . PA = true ; trainOptions . gPA = false ; trainOptions . tagPA = true ; trainOptions . tagSelectiveSplit = false ; trainOptions . rightRec = true ; trainOptions . selectiveSplit = true ; trainOptions . selectiveSplitCutOff = 400.0 ; trainOptions . markovFactor = true ; trainOptions . markovOrder = 2 ; trainOptions . hSelSplit = true ; lexOptions . useUnknownWordSignatures = 2 ; lexOptions . flexiTag = true ; // DAN : Tag double - counting is BAD for PCFG - only parsing
dcTags = false ; String [ ] delSplit = new String [ ] { "-deleteSplitters" , "VP^NP,VP^VP,VP^SINV,VP^SQ" } ; if ( this . setOptionFlag ( delSplit , 0 ) != 2 ) { System . err . println ( "Error processing deleteSplitters" ) ; } // don ' t increment i so it gets language specific stuff as well
} else if ( args [ i ] . equalsIgnoreCase ( "-linguisticPCFG" ) ) { doDep = false ; doPCFG = true ; // op . lexOptions . smoothInUnknownsThreshold = 30;
trainOptions . markUnary = 1 ; trainOptions . PA = true ; trainOptions . gPA = false ; trainOptions . tagPA = true ; // on at the moment , but iffy
trainOptions . tagSelectiveSplit = false ; trainOptions . rightRec = false ; // not for linguistic
trainOptions . selectiveSplit = true ; trainOptions . selectiveSplitCutOff = 400.0 ; trainOptions . markovFactor = true ; trainOptions . markovOrder = 2 ; trainOptions . hSelSplit = true ; lexOptions . useUnknownWordSignatures = 5 ; // different from acl03pcfg
lexOptions . flexiTag = false ; // different from acl03pcfg
// DAN : Tag double - counting is BAD for PCFG - only parsing
dcTags = false ; // don ' t increment i so it gets language specific stuff as well
} else if ( args [ i ] . equalsIgnoreCase ( "-ijcai03" ) ) { doDep = true ; doPCFG = true ; trainOptions . markUnary = 0 ; trainOptions . PA = true ; trainOptions . gPA = false ; trainOptions . tagPA = false ; trainOptions . tagSelectiveSplit = false ; trainOptions . rightRec = false ; trainOptions . selectiveSplit = true ; trainOptions . selectiveSplitCutOff = 300.0 ; trainOptions . markovFactor = true ; trainOptions . markovOrder = 2 ; trainOptions . hSelSplit = true ; trainOptions . compactGrammar = 0 ; // / cdm : May 2005 compacting bad for factored ?
lexOptions . useUnknownWordSignatures = 2 ; lexOptions . flexiTag = false ; dcTags = true ; // op . nodePrune = true ; / / cdm : May 2005 : this doesn ' t help
// don ' t increment i so it gets language specific stuff as well
} else if ( args [ i ] . equalsIgnoreCase ( "-goodFactored" ) ) { doDep = true ; doPCFG = true ; trainOptions . markUnary = 0 ; trainOptions . PA = true ; trainOptions . gPA = false ; trainOptions . tagPA = false ; trainOptions . tagSelectiveSplit = false ; trainOptions . rightRec = false ; trainOptions . selectiveSplit = true ; trainOptions . selectiveSplitCutOff = 300.0 ; trainOptions . markovFactor = true ; trainOptions . markovOrder = 2 ; trainOptions . hSelSplit = true ; trainOptions . compactGrammar = 0 ; // / cdm : May 2005 compacting bad for factored ?
lexOptions . useUnknownWordSignatures = 5 ; // different from ijcai03
lexOptions . flexiTag = false ; dcTags = true ; // op . nodePrune = true ; / / cdm : May 2005 : this doesn ' t help
// don ' t increment i so it gets language specific stuff as well
} else if ( args [ i ] . equalsIgnoreCase ( "-chineseFactored" ) ) { // Single counting tag - > word rewrite is also much better for Chinese
// Factored . Bracketing F1 goes up about 0.7 % .
dcTags = false ; lexOptions . useUnicodeType = true ; trainOptions . markovOrder = 2 ; trainOptions . hSelSplit = true ; trainOptions . markovFactor = true ; trainOptions . HSEL_CUT = 50 ; // trainOptions . openClassTypesThreshold = 1 ; / / so can get unseen punctuation
// trainOptions . fractionBeforeUnseenCounting = 0.0 ; / / so can get unseen punctuation
// don ' t increment i so it gets language specific stuff as well
} else if ( args [ i ] . equalsIgnoreCase ( "-arabicFactored" ) ) { doDep = true ; doPCFG = true ; dcTags = false ; // " false " seems to help Arabic about 0.1 % F1
trainOptions . markovFactor = true ; trainOptions . markovOrder = 2 ; trainOptions . hSelSplit = true ; trainOptions . HSEL_CUT = 75 ; // 75 bit better than 50 , 100 a bit worse
trainOptions . PA = true ; trainOptions . gPA = false ; trainOptions . selectiveSplit = true ; trainOptions . selectiveSplitCutOff = 300.0 ; trainOptions . markUnary = 1 ; // Helps PCFG and marginally factLB
// trainOptions . compactGrammar = 0 ; / / Doesn ' t seem to help or only 0.05 % F1
lexOptions . useUnknownWordSignatures = 9 ; lexOptions . unknownPrefixSize = 1 ; lexOptions . unknownSuffixSize = 1 ; testOptions . MAX_ITEMS = 500000 ; // Arabic sentences are long enough that this helps a fraction
// don ' t increment i so it gets language specific stuff as well
} else if ( args [ i ] . equalsIgnoreCase ( "-frenchFactored" ) ) { doDep = true ; doPCFG = true ; dcTags = false ; // wsg2011 : Setting to false improves F1 by 0.5%
trainOptions . markovFactor = true ; trainOptions . markovOrder = 2 ; trainOptions . hSelSplit = true ; trainOptions . HSEL_CUT = 75 ; trainOptions . PA = true ; trainOptions . gPA = false ; trainOptions . selectiveSplit = true ; trainOptions . selectiveSplitCutOff = 300.0 ; trainOptions . markUnary = 0 ; // Unary rule marking bad for french . . setting to 0 gives + 0.3 F1
lexOptions . useUnknownWordSignatures = 1 ; lexOptions . unknownPrefixSize = 1 ; lexOptions . unknownSuffixSize = 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-chinesePCFG" ) ) { trainOptions . markovOrder = 2 ; trainOptions . markovFactor = true ; trainOptions . HSEL_CUT = 5 ; trainOptions . PA = true ; trainOptions . gPA = true ; trainOptions . selectiveSplit = false ; doDep = false ; doPCFG = true ; // Single counting tag - > word rewrite is also much better for Chinese PCFG
// Bracketing F1 is up about 2 % and tag accuracy about 1 % ( exact by 6 % )
dcTags = false ; // no increment
} else if ( args [ i ] . equalsIgnoreCase ( "-printTT" ) && ( i + 1 < args . length ) ) { trainOptions . printTreeTransformations = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-printAnnotatedRuleCounts" ) ) { trainOptions . printAnnotatedRuleCounts = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-printAnnotatedStateCounts" ) ) { trainOptions . printAnnotatedStateCounts = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-printAnnotated" ) && ( i + 1 < args . length ) ) { try { trainOptions . printAnnotatedPW = tlpParams . pw ( new FileOutputStream ( args [ i + 1 ] ) ) ; } catch ( IOException ioe ) { trainOptions . printAnnotatedPW = null ; } i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-printBinarized" ) && ( i + 1 < args . length ) ) { try { trainOptions . printBinarizedPW = tlpParams . pw ( new FileOutputStream ( args [ i + 1 ] ) ) ; } catch ( IOException ioe ) { trainOptions . printBinarizedPW = null ; } i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-printStates" ) ) { trainOptions . printStates = true ; i ++ ; } else if ( args [ i ] . equalsIgnoreCase ( "-preTransformer" ) && ( i + 1 < args . length ) ) { String [ ] classes = args [ i + 1 ] . split ( "," ) ; i += 2 ; if ( classes . length == 1 ) { trainOptions . preTransformer = ReflectionLoading . loadByReflection ( classes [ 0 ] , this ) ; } else if ( classes . length > 1 ) { CompositeTreeTransformer composite = new CompositeTreeTransformer ( ) ; trainOptions . preTransformer = composite ; for ( String clazz : classes ) { TreeTransformer transformer = ReflectionLoading . loadByReflection ( clazz , this ) ; composite . addTransformer ( transformer ) ; } } } else if ( args [ i ] . equalsIgnoreCase ( "-taggedFiles" ) && ( i + 1 < args . length ) ) { trainOptions . taggedFiles = args [ i + 1 ] ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-evals" ) ) { testOptions . evals = StringUtils . stringToProperties ( args [ i + 1 ] , testOptions . evals ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-fastFactoredCandidateMultiplier" ) ) { testOptions . fastFactoredCandidateMultiplier = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } else if ( args [ i ] . equalsIgnoreCase ( "-fastFactoredCandidateAddend" ) ) { testOptions . fastFactoredCandidateAddend = Integer . parseInt ( args [ i + 1 ] ) ; i += 2 ; } return i ; |
public class TableExpandableContentModelExample { /** * Override preparePaintComponent in order to set up the example data the first time that the example is accessed by
* each user .
* @ param request the request being responded to . */
@ Override protected void preparePaintComponent ( final Request request ) { } } | super . preparePaintComponent ( request ) ; if ( ! isInitialised ( ) ) { // This model holds the data so would be included on the user session .
ExampleExpandableModel data = new ExampleExpandableModel ( ExampleDataUtil . createExampleData ( ) , TravelDocPanel . class ) ; table . setTableModel ( data ) ; setInitialised ( true ) ; } |
public class VersionSummaryMarshaller { /** * Marshall the given parameter object . */
public void marshall ( VersionSummary versionSummary , ProtocolMarshaller protocolMarshaller ) { } } | if ( versionSummary == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( versionSummary . getApplicationId ( ) , APPLICATIONID_BINDING ) ; protocolMarshaller . marshall ( versionSummary . getCreationTime ( ) , CREATIONTIME_BINDING ) ; protocolMarshaller . marshall ( versionSummary . getSemanticVersion ( ) , SEMANTICVERSION_BINDING ) ; protocolMarshaller . marshall ( versionSummary . getSourceCodeUrl ( ) , SOURCECODEURL_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class SLP { /** * Creates a new UserAgent with the given configuration ; the UserAgent must be started .
* @ param settings the configuration for the UserAgent
* @ return a new UserAgent with the given configuration */
public static UserAgent newUserAgent ( Settings settings ) { } } | UserAgent . Factory factory = Factories . newInstance ( settings , Keys . UA_FACTORY_KEY ) ; return factory . newUserAgent ( settings ) ; |
public class DDSClient { /** * convenience method to do a http request on url , and return result as a string
* @ param url
* @ return
* @ throws IOException */
private static String fetchHttpResponse ( URL url ) throws IOException { } } | HttpURLConnection conn = null ; try { logger . info ( "fetching from url = " + url ) ; conn = ( HttpURLConnection ) url . openConnection ( ) ; int response = conn . getResponseCode ( ) ; if ( response != HttpURLConnection . HTTP_ACCEPTED ) { String responseContent = IOUtils . toString ( conn . getInputStream ( ) ) ; logger . info ( "responseContent = " + responseContent ) ; return responseContent ; } throw new IOException ( "response from service not valid" ) ; } catch ( IOException e ) { logger . severe ( "unable to do proper http request url = " + url . toString ( ) ) ; throw e ; } finally { conn . disconnect ( ) ; } |
public class DataModelFactory { /** * Generates a module regarding the parameters .
* @ param name String
* @ param version String
* @ return Module */
public static Module createModule ( final String name , final String version ) { } } | final Module module = new Module ( ) ; module . setName ( name ) ; module . setVersion ( version ) ; module . setPromoted ( false ) ; return module ; |
public class BlockDataHandler { /** * Server only . < br >
* Sends the chunks coordinates to the client when they get watched by them .
* @ param event the event */
@ SubscribeEvent public void onChunkWatched ( ChunkWatchEvent . Watch event ) { } } | Chunk chunk = event . getPlayer ( ) . world . getChunkFromChunkCoords ( event . getChunk ( ) . x , event . getChunk ( ) . z ) ; for ( HandlerInfo < ? > handlerInfo : handlerInfos . values ( ) ) { ChunkData < ? > chunkData = instance . chunkData ( handlerInfo . identifier , chunk . getWorld ( ) , chunk ) ; if ( chunkData != null && chunkData . hasData ( ) ) BlockDataMessage . sendBlockData ( chunk , handlerInfo . identifier , chunkData . toBytes ( Unpooled . buffer ( ) ) , event . getPlayer ( ) ) ; } |
public class PasswordlessLock { /** * Builds a new intent to launch LockActivity with the previously configured options
* @ param context a valid Context
* @ return the intent to which the user has to call startActivity or startActivityForResult */
@ SuppressWarnings ( "unused" ) public Intent newIntent ( Context context ) { } } | Intent lockIntent = new Intent ( context , PasswordlessLockActivity . class ) ; lockIntent . putExtra ( Constants . OPTIONS_EXTRA , options ) ; lockIntent . addFlags ( Intent . FLAG_ACTIVITY_NEW_TASK ) ; return lockIntent ; |
public class CardAPI { /** * 更改卡券信息接口 ( 兑换券 )
* @ param accessToken accessToken
* @ param updateGift updateGift
* @ return result */
public static UpdateResult update ( String accessToken , UpdateGift updateGift ) { } } | return update ( accessToken , JsonUtil . toJSONString ( updateGift ) ) ; |
public class JavametricsWebSocket { /** * ( non - Javadoc )
* @ see com . ibm . javametrics . MetricsEmitter # emit ( java . lang . String ) */
public void emit ( String message ) { } } | openSessions . forEach ( ( session ) -> { try { if ( session . isOpen ( ) ) { session . getBasicRemote ( ) . sendText ( message ) ; } |
public class FileUtils { /** * Return true only if path is a jar file .
* @ param path to a file / dir
* @ return true if file with { @ code . jar } ending */
public static boolean isJarFile ( Path path ) { } } | return Files . isRegularFile ( path ) && path . toString ( ) . toLowerCase ( ) . endsWith ( ".jar" ) ; |
public class ExtensionInfo { /** * Load an { @ link ExtensionInfo } for a certain class .
* @ param className absolute class name
* @ param classLoader class loader to access the class
* @ return the { @ link ExtensionInfo } , if the class was annotated with an { @ link Extension } , otherwise null */
public static ExtensionInfo load ( String className , ClassLoader classLoader ) { } } | try ( InputStream input = classLoader . getResourceAsStream ( className . replace ( '.' , '/' ) + ".class" ) ) { ExtensionInfo info = new ExtensionInfo ( className ) ; new ClassReader ( input ) . accept ( new ExtensionVisitor ( info ) , ClassReader . SKIP_DEBUG ) ; return info ; } catch ( IOException e ) { log . error ( e . getMessage ( ) , e ) ; return null ; } |
public class DataFileCache { /** * Parameter write indicates either an orderly close , or a fast close
* without backup .
* When false , just closes the file .
* When true , writes out all cached rows that have been modified and the
* free position pointer for the * . data file and then closes the file . */
public void close ( boolean write ) { } } | SimpleLog appLog = database . logger . appLog ; try { if ( cacheReadonly ) { if ( dataFile != null ) { dataFile . close ( ) ; dataFile = null ; } return ; } StopWatch sw = new StopWatch ( ) ; appLog . sendLine ( SimpleLog . LOG_NORMAL , "DataFileCache.close(" + write + ") : start" ) ; if ( write ) { cache . saveAll ( ) ; Error . printSystemOut ( "saveAll: " + sw . elapsedTime ( ) ) ; appLog . sendLine ( SimpleLog . LOG_NORMAL , "DataFileCache.close() : save data" ) ; if ( fileModified || freeBlocks . isModified ( ) ) { // set empty
dataFile . seek ( LONG_EMPTY_SIZE ) ; dataFile . writeLong ( freeBlocks . getLostBlocksSize ( ) ) ; // set end
dataFile . seek ( LONG_FREE_POS_POS ) ; dataFile . writeLong ( fileFreePosition ) ; // set saved flag ;
dataFile . seek ( FLAGS_POS ) ; int flag = BitMap . set ( 0 , FLAG_ISSAVED ) ; if ( hasRowInfo ) { flag = BitMap . set ( flag , FLAG_ROWINFO ) ; } dataFile . writeInt ( flag ) ; appLog . sendLine ( SimpleLog . LOG_NORMAL , "DataFileCache.close() : flags" ) ; if ( dataFile . length ( ) != fileFreePosition ) { dataFile . seek ( fileFreePosition ) ; } appLog . sendLine ( SimpleLog . LOG_NORMAL , "DataFileCache.close() : seek end" ) ; Error . printSystemOut ( "pos and flags: " + sw . elapsedTime ( ) ) ; } } if ( dataFile != null ) { dataFile . close ( ) ; appLog . sendLine ( SimpleLog . LOG_NORMAL , "DataFileCache.close() : close" ) ; dataFile = null ; Error . printSystemOut ( "close: " + sw . elapsedTime ( ) ) ; } boolean empty = fileFreePosition == INITIAL_FREE_POS ; if ( empty ) { fa . removeElement ( fileName ) ; fa . removeElement ( backupFileName ) ; } } catch ( Throwable e ) { appLog . logContext ( e , null ) ; throw Error . error ( ErrorCode . FILE_IO_ERROR , ErrorCode . M_DataFileCache_close , new Object [ ] { e , fileName } ) ; } |
public class CanalEmbedSelector { /** * 记录一下message对象 */
private synchronized void dumpMessages ( Message message , String startPosition , String endPosition , int total ) { } } | try { MDC . put ( OtterConstants . splitPipelineSelectLogFileKey , String . valueOf ( pipelineId ) ) ; logger . info ( SEP + "****************************************************" + SEP ) ; logger . info ( MessageDumper . dumpMessageInfo ( message , startPosition , endPosition , total ) ) ; logger . info ( "****************************************************" + SEP ) ; if ( dumpDetail ) { // 判断一下是否需要打印详细信息
dumpEventDatas ( message . getDatas ( ) ) ; logger . info ( "****************************************************" + SEP ) ; } } finally { MDC . remove ( OtterConstants . splitPipelineSelectLogFileKey ) ; } |
public class CmsDriverManager { /** * Returns the correct project id . < p >
* @ param dbc the database context
* @ return the correct project id */
private CmsUUID getProjectIdForContext ( CmsDbContext dbc ) { } } | CmsUUID projectId = dbc . getProjectId ( ) ; if ( projectId . isNullUUID ( ) ) { projectId = dbc . currentProject ( ) . getUuid ( ) ; } return projectId ; |
public class RegisteredResources { /** * Send end to all registered resources
* @ param flags
* @ return whether we managed to successfully end all the resources */
public boolean distributeEnd ( int flags ) { } } | if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "distributeEnd" , Util . printFlag ( flags ) ) ; boolean result = true ; for ( int i = _resourceObjects . size ( ) ; -- i >= 0 ; ) { final JTAResource resource = _resourceObjects . get ( i ) ; if ( ! sendEnd ( resource , flags ) ) { result = false ; } } if ( _sameRMResource != null ) { if ( ! sendEnd ( _sameRMResource , flags ) ) { result = false ; } } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "distributeEnd" , result ) ; return result ; |
public class ArrayListAnalyzer { /** * This method calculates and returns information relating the operation to be performed .
* @ param destination destination field to be analyzed
* @ param source source field to be analyzed
* @ return all information relating the operation to be performed */
public InfoOperation getInfoOperation ( final Field destination , final Field source ) { } } | Class < ? > dClass = destination . getType ( ) ; Class < ? > sClass = source . getType ( ) ; Class < ? > dItem = null ; Class < ? > sItem = null ; InfoOperation operation = new InfoOperation ( ) . setConversionType ( UNDEFINED ) ; // Array [ ] = Collection < >
if ( dClass . isArray ( ) && collectionIsAssignableFrom ( sClass ) ) { dItem = dClass . getComponentType ( ) ; sItem = getCollectionItemClass ( source ) ; operation . setInstructionType ( ARRAY_LIST ) ; if ( areMappedObjects ( dItem , sItem , xml ) ) return operation . setInstructionType ( ARRAY_LIST_WITH_MAPPED_ITEMS ) . setConfigChosen ( configChosen ( dItem , sItem , xml ) ) ; } // Collection < > = Array [ ]
if ( collectionIsAssignableFrom ( dClass ) && sClass . isArray ( ) ) { dItem = getCollectionItemClass ( destination ) ; sItem = sClass . getComponentType ( ) ; operation . setInstructionType ( LIST_ARRAY ) ; if ( areMappedObjects ( dItem , sItem , xml ) ) return operation . setInstructionType ( LIST_ARRAY_WITH_MAPPED_ITEMS ) . setConfigChosen ( configChosen ( dItem , sItem , xml ) ) ; } if ( isAssignableFrom ( dItem , sItem ) ) return operation . setConversionType ( ABSENT ) ; // if components are primitive or wrapper types , apply implicit conversion
if ( areBasic ( dItem , sItem ) ) return operation . setConversionType ( getConversionType ( dItem , sItem ) ) ; return operation ; |
public class NetworkDataReceiverThread2 { /** * Remove any trace writers that are writing to the given window name .
* @ param nameCriteria */
public void removeTraceWriter ( String nameCriteria ) { } } | for ( ITraceWriter tw : getTraceWriters ( ) ) { if ( LOG . isDebugEnabled ( ) ) LOG . debug ( "Trying to remove a trace writer. Looking for [" + nameCriteria + "] found [" + tw . getName ( ) + "]" ) ; if ( tw . getName ( ) . equals ( nameCriteria ) ) getTraceWriters ( ) . remove ( tw ) ; } |
public class UnconditionalValueDerefDataflowFactory { /** * ( non - Javadoc )
* @ see
* edu . umd . cs . findbugs . classfile . IAnalysisEngine # analyze ( edu . umd . cs . findbugs
* . classfile . IAnalysisCache , java . lang . Object ) */
@ Override public UnconditionalValueDerefDataflow analyze ( IAnalysisCache analysisCache , MethodDescriptor descriptor ) throws CheckedAnalysisException { } } | MethodGen methodGen = getMethodGen ( analysisCache , descriptor ) ; if ( methodGen == null ) { throw new MethodUnprofitableException ( descriptor ) ; } CFG cfg = getCFG ( analysisCache , descriptor ) ; ValueNumberDataflow vnd = getValueNumberDataflow ( analysisCache , descriptor ) ; UnconditionalValueDerefAnalysis analysis = new UnconditionalValueDerefAnalysis ( getReverseDepthFirstSearch ( analysisCache , descriptor ) , getDepthFirstSearch ( analysisCache , descriptor ) , cfg , getMethod ( analysisCache , descriptor ) , methodGen , vnd , getAssertionMethods ( analysisCache , descriptor . getClassDescriptor ( ) ) ) ; IsNullValueDataflow inv = getIsNullValueDataflow ( analysisCache , descriptor ) ; // XXX : hack to clear derefs on not - null branches
analysis . clearDerefsOnNonNullBranches ( inv ) ; TypeDataflow typeDataflow = getTypeDataflow ( analysisCache , descriptor ) ; // XXX : type analysis is needed to resolve method calls for
// checking whether call targets unconditionally dereference parameters
analysis . setTypeDataflow ( typeDataflow ) ; UnconditionalValueDerefDataflow dataflow = new UnconditionalValueDerefDataflow ( cfg , analysis ) ; dataflow . execute ( ) ; if ( ClassContext . DUMP_DATAFLOW_ANALYSIS ) { dataflow . dumpDataflow ( analysis ) ; } if ( UnconditionalValueDerefAnalysis . DEBUG ) { ClassContext . dumpDataflowInformation ( getMethod ( analysisCache , descriptor ) , cfg , vnd , inv , dataflow , typeDataflow ) ; } return dataflow ; |
public class CPOptionLocalServiceWrapper { /** * Updates the cp option in the database or adds it if it does not yet exist . Also notifies the appropriate model listeners .
* @ param cpOption the cp option
* @ return the cp option that was updated */
@ Override public com . liferay . commerce . product . model . CPOption updateCPOption ( com . liferay . commerce . product . model . CPOption cpOption ) { } } | return _cpOptionLocalService . updateCPOption ( cpOption ) ; |
public class UtcTimeUtilities { /** * Get String of format : YYYY - MM - DD HH : MM from { @ link DateTime } .
* @ param dateTime the { @ link DateTime } .
* @ return the date string . */
public static String toStringWithMinutes ( DateTime dateTime ) { } } | String dtStr = dateTime . toString ( withMinutesformatter ) ; return dtStr ; |
public class AbstrCFMLExprTransformer { /** * Liest alle folgenden Komentare ein . < br / >
* EBNF : < br / >
* < code > { ? - " \ n " } " \ n " ; < / code >
* @ param data
* @ throws TemplateException */
protected void comments ( Data data ) throws TemplateException { } } | data . srcCode . removeSpace ( ) ; while ( comment ( data ) ) { data . srcCode . removeSpace ( ) ; } |
public class DialogFragmentUtils { /** * Show { @ link android . support . v4 . app . DialogFragment } with the specified tag on the loader callbacks .
* @ param handler the handler , in most case , this handler is the main handler .
* @ param manager the manager .
* @ param fragment the fragment .
* @ param tag the tag string that is related to the { @ link android . support . v4 . app . DialogFragment } . */
public static void supportShowOnLoaderCallback ( Handler handler , final android . support . v4 . app . FragmentManager manager , final android . support . v4 . app . DialogFragment fragment , final String tag ) { } } | handler . post ( new Runnable ( ) { @ Override public void run ( ) { fragment . show ( manager , tag ) ; } } ) ; |
public class Bot { /** * Method to send a reply back to Slack after receiving an { @ link Event } .
* Learn < a href = " https : / / api . slack . com / rtm " > more on sending responses to Slack . < / a >
* @ param session websocket session between bot and slack
* @ param event received from slack
* @ param reply the message to send to slack */
protected final void reply ( WebSocketSession session , Event event , Message reply ) { } } | try { if ( StringUtils . isEmpty ( reply . getType ( ) ) ) { reply . setType ( EventType . MESSAGE . name ( ) . toLowerCase ( ) ) ; } reply . setText ( encode ( reply . getText ( ) ) ) ; if ( reply . getChannel ( ) == null && event . getChannelId ( ) != null ) { reply . setChannel ( event . getChannelId ( ) ) ; } synchronized ( sendMessageLock ) { session . sendMessage ( new TextMessage ( reply . toJSONString ( ) ) ) ; } if ( logger . isDebugEnabled ( ) ) { // For debugging purpose only
logger . debug ( "Reply (Message): {}" , reply . toJSONString ( ) ) ; } } catch ( IOException e ) { logger . error ( "Error sending event: {}. Exception: {}" , event . getText ( ) , e . getMessage ( ) ) ; } |
public class H2DBLock { /** * Obtains a lock on the H2 database .
* @ throws H2DBLockException thrown if a lock could not be obtained */
public void lock ( ) throws H2DBLockException { } } | try { final File dir = settings . getDataDirectory ( ) ; lockFile = new File ( dir , "odc.update.lock" ) ; checkState ( ) ; int ctr = 0 ; do { try { if ( ! lockFile . exists ( ) && lockFile . createNewFile ( ) ) { file = new RandomAccessFile ( lockFile , "rw" ) ; lock = file . getChannel ( ) . lock ( ) ; file . writeBytes ( magic ) ; file . getChannel ( ) . force ( true ) ; Thread . sleep ( 20 ) ; file . seek ( 0 ) ; final String current = file . readLine ( ) ; if ( current != null && ! current . equals ( magic ) ) { lock . close ( ) ; lock = null ; LOGGER . debug ( "Another process obtained a lock first ({})" , Thread . currentThread ( ) . getName ( ) ) ; } else { addShutdownHook ( ) ; final Timestamp timestamp = new Timestamp ( System . currentTimeMillis ( ) ) ; LOGGER . debug ( "Lock file created ({}) {} @ {}" , Thread . currentThread ( ) . getName ( ) , magic , timestamp . toString ( ) ) ; } } } catch ( IOException | InterruptedException ex ) { LOGGER . trace ( "Expected error as another thread has likely locked the file" , ex ) ; } finally { if ( lock == null && file != null ) { try { file . close ( ) ; file = null ; } catch ( IOException ex ) { LOGGER . trace ( "Unable to close the lock file" , ex ) ; } } } if ( lock == null || ! lock . isValid ( ) ) { try { final Timestamp timestamp = new Timestamp ( System . currentTimeMillis ( ) ) ; LOGGER . debug ( "Sleeping thread {} ({}) for {} seconds because an exclusive lock on the database could not be obtained ({})" , Thread . currentThread ( ) . getName ( ) , magic , SLEEP_DURATION / 1000 , timestamp . toString ( ) ) ; Thread . sleep ( SLEEP_DURATION ) ; } catch ( InterruptedException ex ) { LOGGER . debug ( "sleep was interrupted." , ex ) ; Thread . currentThread ( ) . interrupt ( ) ; } } } while ( ++ ctr < MAX_SLEEP_COUNT && ( lock == null || ! lock . isValid ( ) ) ) ; if ( lock == null || ! lock . isValid ( ) ) { throw new H2DBLockException ( "Unable to obtain the update lock, skipping the database update. Skippinig the database update." ) ; } } catch ( IOException ex ) { throw new H2DBLockException ( ex . getMessage ( ) , ex ) ; } |
public class ClusterHeartbeatManager { /** * Remove the { @ code member } ' s heartbeat timestamps */
void removeMember ( MemberImpl member ) { } } | heartbeatFailureDetector . remove ( member ) ; if ( icmpParallelMode ) { icmpFailureDetector . remove ( member ) ; } |
public class FeaturesImpl { /** * Deletes a phraselist feature .
* @ param appId The application ID .
* @ param versionId The version ID .
* @ param phraselistId The ID of the feature to be deleted .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws ErrorResponseException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the OperationStatus object if successful . */
public OperationStatus deletePhraseList ( UUID appId , String versionId , int phraselistId ) { } } | return deletePhraseListWithServiceResponseAsync ( appId , versionId , phraselistId ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Uris { /** * Returns the raw ( and normalized ) path of the given URI - prefixed with a " / " . This means that an empty path
* becomes a single slash .
* @ param uri the URI to extract the path from
* @ param strict whether or not to do strict escaping
* @ return the extracted path */
public static String getRawPath ( final URI uri , final boolean strict ) { } } | return esc ( strict ) . escapePath ( prependSlash ( Strings . nullToEmpty ( uri . getRawPath ( ) ) ) ) ; |
public class CreateFollowResponse { /** * add status message : user identifier invalid
* @ param userId
* user identifier passed */
public void userIdentifierInvalid ( final String userId ) { } } | this . addStatusMessage ( ProtocolConstants . StatusCodes . Create . Follow . USER_NOT_EXISTING , "there is no user with the identifier \"" + userId + "\". Please provide a valid user identifier." ) ; |
public class _PackageInfo { /** * < p > Traverse a directory structure starting at < code > base < / code > , adding
* matching files to the specified list . < / p >
* @ param classes List of matching classes being accumulated
* @ param base Base file from which to recurse
* @ param cld ClassLoader being searched for matching classes
* @ param pckgname Package name used to select matching classes */
protected void listFilesRecursive ( final List < Class > classes , final File base , final ClassLoader cld , final String pckgname ) { } } | base . listFiles ( new FileFilter ( ) { public boolean accept ( File file ) { if ( file . isDirectory ( ) ) { listFilesRecursive ( classes , file , cld , pckgname + "." + file . getName ( ) ) ; return false ; } if ( ! file . getName ( ) . toLowerCase ( ) . endsWith ( ".class" ) ) { return false ; } String className = filenameToClassname ( pckgname + "." + file . getName ( ) ) ; loadClass ( classes , cld , className ) ; return false ; } } ) ; |
public class Types { /** * Get a long value from a buffer at a given index for a given { @ link Encoding } .
* @ param buffer from which to read .
* @ param index at which he integer should be read .
* @ param encoding of the value .
* @ return the value of the encoded long . */
public static long getLong ( final DirectBuffer buffer , final int index , final Encoding encoding ) { } } | switch ( encoding . primitiveType ( ) ) { case CHAR : return buffer . getByte ( index ) ; case INT8 : return buffer . getByte ( index ) ; case INT16 : return buffer . getShort ( index , encoding . byteOrder ( ) ) ; case INT32 : return buffer . getInt ( index , encoding . byteOrder ( ) ) ; case INT64 : return buffer . getLong ( index , encoding . byteOrder ( ) ) ; case UINT8 : return ( short ) ( buffer . getByte ( index ) & 0xFF ) ; case UINT16 : return buffer . getShort ( index , encoding . byteOrder ( ) ) & 0xFFFF ; case UINT32 : return buffer . getInt ( index , encoding . byteOrder ( ) ) & 0xFFFF_FFFFL ; case UINT64 : return buffer . getLong ( index , encoding . byteOrder ( ) ) ; default : throw new IllegalArgumentException ( "Unsupported type for long: " + encoding . primitiveType ( ) ) ; } |
public class DownloadAction { /** * Store the ETag header from the given response in the { @ link # cachedETagsFile }
* @ param host the queried host
* @ param file the queried file
* @ param response the HTTP response
* @ throws IOException if the tag could not be written */
@ SuppressWarnings ( "unchecked" ) private void storeETag ( HttpHost host , String file , HttpResponse response ) throws IOException { } } | // get ETag header
Header etagHdr = response . getFirstHeader ( "ETag" ) ; if ( etagHdr == null ) { if ( ! quiet ) { project . getLogger ( ) . warn ( "Server response does not include an " + "entity tag (ETag)." ) ; } return ; } String etag = etagHdr . getValue ( ) ; // handle weak ETags
if ( isWeakETag ( etag ) ) { if ( useETag . displayWarningForWeak && ! quiet ) { project . getLogger ( ) . warn ( "Weak entity tag (ETag) encountered. " + "Please make sure you want to compare resources based on " + "weak ETags. If yes, set the 'useETag' flag to \"all\", " + "otherwise set it to \"strongOnly\"." ) ; } if ( ! useETag . useWeakETags ) { // do not save weak etags
return ; } } // create directory for cached etags file
File parent = getCachedETagsFile ( ) . getParentFile ( ) ; if ( parent != null ) { parent . mkdirs ( ) ; } // read existing cached etags file
Map < String , Object > cachedETags = readCachedETags ( ) ; // create new entry in cached ETags file
Map < String , String > etagMap = new LinkedHashMap < String , String > ( ) ; etagMap . put ( "ETag" , etag ) ; String uri = host . toURI ( ) ; Map < String , Object > hostMap = ( Map < String , Object > ) cachedETags . get ( uri ) ; if ( hostMap == null ) { hostMap = new LinkedHashMap < String , Object > ( ) ; cachedETags . put ( uri , hostMap ) ; } hostMap . put ( file , etagMap ) ; // write cached ETags file
String cachedETagsContents = JsonOutput . toJson ( cachedETags ) ; PrintWriter writer = new PrintWriter ( getCachedETagsFile ( ) , "UTF-8" ) ; try { writer . write ( cachedETagsContents ) ; writer . flush ( ) ; } finally { writer . close ( ) ; } |
public class DiskTypeId { /** * Returns a disk type identity given the zone identity and the disk type name . */
public static DiskTypeId of ( ZoneId zoneId , String type ) { } } | return new DiskTypeId ( zoneId . getProject ( ) , zoneId . getZone ( ) , type ) ; |
public class AbstractServerPredicate { /** * Create an instance from a predicate . */
public static AbstractServerPredicate ofKeyPredicate ( final Predicate < PredicateKey > p ) { } } | return new AbstractServerPredicate ( ) { @ Override @ edu . umd . cs . findbugs . annotations . SuppressWarnings ( value = "NP" ) public boolean apply ( PredicateKey input ) { return p . apply ( input ) ; } } ; |
public class PeerEurekaNodes { /** * Checks if the given service url matches the supplied instance
* @ param url the service url of the replica node that the check is made .
* @ param instance the instance to check the service url against
* @ return true , if the url represents the supplied instance , false otherwise . */
public boolean isInstanceURL ( String url , InstanceInfo instance ) { } } | String hostName = hostFromUrl ( url ) ; String myInfoComparator = instance . getHostName ( ) ; if ( clientConfig . getTransportConfig ( ) . applicationsResolverUseIp ( ) ) { myInfoComparator = instance . getIPAddr ( ) ; } return hostName != null && hostName . equals ( myInfoComparator ) ; |
public class EntityUtilities { /** * Finds the matching locale entity from a locale string .
* @ param localeProvider
* @ param localeString
* @ return */
public static LocaleWrapper findLocaleFromString ( final CollectionWrapper < LocaleWrapper > locales , final String localeString ) { } } | if ( localeString == null ) return null ; for ( final LocaleWrapper locale : locales . getItems ( ) ) { if ( localeString . equals ( locale . getValue ( ) ) ) { return locale ; } } return null ; |
public class ServiceOptions { /** * Returns the default project ID , or { @ code null } if no default project ID could be found . This
* method returns the first available project ID among the following sources :
* < ol >
* < li > The project ID specified by the GOOGLE _ CLOUD _ PROJECT environment variable
* < li > The App Engine project ID
* < li > The project ID specified in the JSON credentials file pointed by the { @ code
* GOOGLE _ APPLICATION _ CREDENTIALS } environment variable
* < li > The Google Cloud SDK project ID
* < li > The Compute Engine project ID
* < / ol > */
public static String getDefaultProjectId ( ) { } } | String projectId = System . getProperty ( PROJECT_ENV_NAME , System . getenv ( PROJECT_ENV_NAME ) ) ; if ( projectId == null ) { projectId = System . getProperty ( LEGACY_PROJECT_ENV_NAME , System . getenv ( LEGACY_PROJECT_ENV_NAME ) ) ; } if ( projectId == null ) { projectId = getAppEngineProjectId ( ) ; } if ( projectId == null ) { projectId = getServiceAccountProjectId ( ) ; } return projectId != null ? projectId : getGoogleCloudProjectId ( ) ; |
public class Tags { /** * Resolves all the tags IDs asynchronously ( name followed by value ) into a map .
* This function is the opposite of { @ link # resolveAll } .
* @ param tsdb The TSDB to use for UniqueId lookups .
* @ param tags The tag IDs to resolve .
* @ return A map mapping tag names to tag values .
* @ throws NoSuchUniqueId if one of the elements in the array contained an
* invalid ID .
* @ throws IllegalArgumentException if one of the elements in the array had
* the wrong number of bytes .
* @ since 2.0 */
public static Deferred < HashMap < String , String > > resolveIdsAsync ( final TSDB tsdb , final List < byte [ ] > tags ) throws NoSuchUniqueId { } } | final short name_width = tsdb . tag_names . width ( ) ; final short value_width = tsdb . tag_values . width ( ) ; final short tag_bytes = ( short ) ( name_width + value_width ) ; final HashMap < String , String > result = new HashMap < String , String > ( tags . size ( ) ) ; final ArrayList < Deferred < String > > deferreds = new ArrayList < Deferred < String > > ( tags . size ( ) ) ; for ( final byte [ ] tag : tags ) { final byte [ ] tmp_name = new byte [ name_width ] ; final byte [ ] tmp_value = new byte [ value_width ] ; if ( tag . length != tag_bytes ) { throw new IllegalArgumentException ( "invalid length: " + tag . length + " (expected " + tag_bytes + "): " + Arrays . toString ( tag ) ) ; } System . arraycopy ( tag , 0 , tmp_name , 0 , name_width ) ; deferreds . add ( tsdb . tag_names . getNameAsync ( tmp_name ) ) ; System . arraycopy ( tag , name_width , tmp_value , 0 , value_width ) ; deferreds . add ( tsdb . tag_values . getNameAsync ( tmp_value ) ) ; } class GroupCB implements Callback < HashMap < String , String > , ArrayList < String > > { public HashMap < String , String > call ( final ArrayList < String > names ) throws Exception { for ( int i = 0 ; i < names . size ( ) ; i ++ ) { if ( i % 2 != 0 ) { result . put ( names . get ( i - 1 ) , names . get ( i ) ) ; } } return result ; } } return Deferred . groupInOrder ( deferreds ) . addCallback ( new GroupCB ( ) ) ; |
public class CrdtSetDelegate { /** * Decodes the given element from a string .
* @ param element the element to decode
* @ return the decoded element */
protected E decode ( String element ) { } } | return elementSerializer . decode ( BaseEncoding . base16 ( ) . decode ( element ) ) ; |
public class HBaseClientTemplate { /** * Create an EntityBatch that can be used to write batches of entities .
* @ param entityMapper
* The EntityMapper to use to map rows to entities .
* @ param writeBufferSize
* The buffer size used when writing batches
* @ return EntityBatch */
public < E > EntityBatch < E > createBatch ( EntityMapper < E > entityMapper , long writeBufferSize ) { } } | return new BaseEntityBatch < E > ( this , entityMapper , pool , tableName , writeBufferSize ) ; |
public class DwgFile { /** * Returns the color of the layer of a DWG object
* @ param entity DWG object which we want to know its layer color
* @ return int Layer color of the DWG object in the Autocad color code */
public int getColorByLayer ( DwgObject entity ) { } } | int colorByLayer = 0 ; int layer = entity . getLayerHandle ( ) ; for ( int j = 0 ; j < layerTable . size ( ) ; j ++ ) { Vector layerTableRecord = ( Vector ) layerTable . get ( j ) ; int lHandle = ( ( Integer ) layerTableRecord . get ( 0 ) ) . intValue ( ) ; if ( lHandle == layer ) { colorByLayer = ( ( Integer ) layerTableRecord . get ( 2 ) ) . intValue ( ) ; } } return colorByLayer ; |
public class Channel { /** * Not public */
Collection < LifecycleQueryInstalledChaincodeProposalResponse > lifecycleQueryInstalledChaincode ( LifecycleQueryInstalledChaincodeRequest lifecycleQueryInstalledChaincodeRequest , Collection < Peer > peers ) throws InvalidArgumentException , ProposalException { } } | if ( null == lifecycleQueryInstalledChaincodeRequest ) { throw new InvalidArgumentException ( "The lifecycleQueryInstalledChaincodeRequest parameter can not be null." ) ; } checkPeers ( peers ) ; if ( ! isSystemChannel ( ) ) { throw new InvalidArgumentException ( "LifecycleQueryInstalledChaincodes should only be invoked on system channel." ) ; } try { logger . trace ( format ( "LifecycleQueryInstalledChaincode packageID: %s" , lifecycleQueryInstalledChaincodeRequest . getPackageId ( ) ) ) ; TransactionContext context = getTransactionContext ( lifecycleQueryInstalledChaincodeRequest ) ; LifecycleQueryInstalledChaincodeBuilder lifecycleQueryInstalledChaincodeBuilder = LifecycleQueryInstalledChaincodeBuilder . newBuilder ( ) ; lifecycleQueryInstalledChaincodeBuilder . setPackageId ( lifecycleQueryInstalledChaincodeRequest . getPackageId ( ) ) ; lifecycleQueryInstalledChaincodeBuilder . context ( context ) ; SignedProposal qProposal = getSignedProposal ( context , lifecycleQueryInstalledChaincodeBuilder . build ( ) ) ; return sendProposalToPeers ( peers , qProposal , context , LifecycleQueryInstalledChaincodeProposalResponse . class ) ; } catch ( ProposalException e ) { throw e ; } catch ( Exception e ) { throw new ProposalException ( format ( "Query for peer %s channels failed. " + e . getMessage ( ) , name ) , e ) ; } |
public class UIContextHolder { /** * Retrieves the current effective UIContext . Note that this method will return null if called from outside normal
* request processing , for example during the intial application UI construction .
* @ return the current effective UIContext . */
public static UIContext getCurrent ( ) { } } | Stack < UIContext > stack = CONTEXT_STACK . get ( ) ; if ( stack == null || stack . isEmpty ( ) ) { return null ; } return getStack ( ) . peek ( ) ; |
public class Try { /** * Fail trivially , yielding { @ code x } .
* @ param x The result
* @ param < F > The type of failure values .
* @ param < S > The type of success values .
* @ return A computation that yields { @ code x } . */
public static < F , S > TryType < F , S > failure ( final F x ) { } } | return Failure . failure ( x ) ; |
public class ShiroWebModuleWith435 { /** * Binds the security manager . Override this method in order to provide your own security manager binding .
* By default , a { @ link org . apache . shiro . web . mgt . DefaultWebSecurityManager } is bound as an eager singleton .
* @ param bind */
protected void bindWebSecurityManager ( AnnotatedBindingBuilder < ? super WebSecurityManager > bind ) { } } | try { bind . toConstructor ( DefaultWebSecurityManager . class . getConstructor ( Collection . class ) ) . asEagerSingleton ( ) ; } catch ( NoSuchMethodException e ) { throw new ConfigurationException ( "This really shouldn't happen. Either something has changed in Shiro, or there's a bug in ShiroModule." , e ) ; } |
public class JwtGenerator { /** * Generate a JWT from a map of claims .
* @ param claims the map of claims
* @ return the created JWT */
public String generate ( final Map < String , Object > claims ) { } } | // claims builder
final JWTClaimsSet . Builder builder = new JWTClaimsSet . Builder ( ) ; // add claims
for ( final Map . Entry < String , Object > entry : claims . entrySet ( ) ) { builder . claim ( entry . getKey ( ) , entry . getValue ( ) ) ; } if ( this . expirationTime != null ) { builder . expirationTime ( this . expirationTime ) ; } return internalGenerate ( builder . build ( ) ) ; |
public class DataPersisterManager { /** * Register a data type with the manager . */
public static void registerDataPersisters ( DataPersister ... dataPersisters ) { } } | // we build the map and replace it to lower the chance of concurrency issues
List < DataPersister > newList = new ArrayList < DataPersister > ( ) ; if ( registeredPersisters != null ) { newList . addAll ( registeredPersisters ) ; } for ( DataPersister persister : dataPersisters ) { newList . add ( persister ) ; } registeredPersisters = newList ; |
public class DefaultWhenFileSystem { /** * Change the permissions on the file represented by { @ code path } to { @ code perms } , asynchronously .
* The permission String takes the form rwxr - x - - - as
* specified < a href = " http : / / download . oracle . com / javase / 7 / docs / api / java / nio / file / attribute / PosixFilePermissions . html " > here < / a > .
* @ param path the path to the file
* @ param perms the permissions string
* @ return a promise for completion */
@ Override public Promise < Void > chmod ( String path , String perms ) { } } | return adapter . toPromise ( handler -> vertx . fileSystem ( ) . chmod ( path , perms , handler ) ) ; |
public class CommercePriceListUserSegmentEntryRelPersistenceImpl { /** * Returns the commerce price list user segment entry rel where uuid = & # 63 ; and groupId = & # 63 ; or returns < code > null < / code > if it could not be found . Uses the finder cache .
* @ param uuid the uuid
* @ param groupId the group ID
* @ return the matching commerce price list user segment entry rel , or < code > null < / code > if a matching commerce price list user segment entry rel could not be found */
@ Override public CommercePriceListUserSegmentEntryRel fetchByUUID_G ( String uuid , long groupId ) { } } | return fetchByUUID_G ( uuid , groupId , true ) ; |
public class ConfigImpl { /** * Common routine to load text or js plugin delegators
* @ param cfg
* the config object as a { @ link Scriptable }
* @ param name
* the name of the plugin delegator selector
* @ return the set of plugin delegators for the selector */
protected Set < String > loadPluginDelegators ( Scriptable cfg , String name ) { } } | Set < String > result = null ; Object delegators = cfg . get ( name , cfg ) ; if ( delegators != Scriptable . NOT_FOUND && delegators instanceof Scriptable ) { result = new HashSet < String > ( ) ; for ( Object id : ( ( Scriptable ) delegators ) . getIds ( ) ) { if ( id instanceof Number ) { Number i = ( Number ) id ; Object entry = ( ( Scriptable ) delegators ) . get ( ( Integer ) i , ( Scriptable ) delegators ) ; result . add ( entry . toString ( ) ) ; } } result = Collections . unmodifiableSet ( result ) ; } else { result = Collections . emptySet ( ) ; } return result ; |
public class ClientSocket { /** * closes the stream . */
void closeImpl ( ) { } } | ReadStreamOld is = _is ; _is = null ; WriteStreamOld os = _os ; _os = null ; try { if ( is != null ) is . close ( ) ; } catch ( Throwable e ) { log . log ( Level . FINER , e . toString ( ) , e ) ; } try { if ( os != null ) os . close ( ) ; } catch ( Throwable e ) { log . log ( Level . FINER , e . toString ( ) , e ) ; } if ( is != null ) { _connProbe . end ( ) ; if ( _requestStartTime > 0 ) _requestTimeProbe . end ( _requestStartTime ) ; if ( _isIdle ) _idleProbe . end ( ) ; } |
public class QueryParser { /** * src / riemann / Query . g : 32:1 : not : NOT ( WS ) * ( not | primary ) ; */
public final QueryParser . not_return not ( ) throws RecognitionException { } } | QueryParser . not_return retval = new QueryParser . not_return ( ) ; retval . start = input . LT ( 1 ) ; CommonTree root_0 = null ; Token NOT15 = null ; Token WS16 = null ; QueryParser . not_return not17 = null ; QueryParser . primary_return primary18 = null ; CommonTree NOT15_tree = null ; CommonTree WS16_tree = null ; try { // src / riemann / Query . g : 32:5 : ( NOT ( WS ) * ( not | primary ) )
// src / riemann / Query . g : 32:7 : NOT ( WS ) * ( not | primary )
{ root_0 = ( CommonTree ) adaptor . nil ( ) ; NOT15 = ( Token ) match ( input , NOT , FOLLOW_NOT_in_not216 ) ; NOT15_tree = ( CommonTree ) adaptor . create ( NOT15 ) ; root_0 = ( CommonTree ) adaptor . becomeRoot ( NOT15_tree , root_0 ) ; // src / riemann / Query . g : 32:12 : ( WS ) *
loop9 : do { int alt9 = 2 ; int LA9_0 = input . LA ( 1 ) ; if ( ( LA9_0 == WS ) ) { alt9 = 1 ; } switch ( alt9 ) { case 1 : // src / riemann / Query . g : 32:12 : WS
{ WS16 = ( Token ) match ( input , WS , FOLLOW_WS_in_not219 ) ; WS16_tree = ( CommonTree ) adaptor . create ( WS16 ) ; adaptor . addChild ( root_0 , WS16_tree ) ; } break ; default : break loop9 ; } } while ( true ) ; // src / riemann / Query . g : 32:16 : ( not | primary )
int alt10 = 2 ; int LA10_0 = input . LA ( 1 ) ; if ( ( LA10_0 == NOT ) ) { alt10 = 1 ; } else if ( ( LA10_0 == TAGGED || LA10_0 == 25 || ( LA10_0 >= 27 && LA10_0 <= 38 ) ) ) { alt10 = 2 ; } else { NoViableAltException nvae = new NoViableAltException ( "" , 10 , 0 , input ) ; throw nvae ; } switch ( alt10 ) { case 1 : // src / riemann / Query . g : 32:17 : not
{ pushFollow ( FOLLOW_not_in_not223 ) ; not17 = not ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , not17 . getTree ( ) ) ; } break ; case 2 : // src / riemann / Query . g : 32:23 : primary
{ pushFollow ( FOLLOW_primary_in_not227 ) ; primary18 = primary ( ) ; state . _fsp -- ; adaptor . addChild ( root_0 , primary18 . getTree ( ) ) ; } break ; } } retval . stop = input . LT ( - 1 ) ; retval . tree = ( CommonTree ) adaptor . rulePostProcessing ( root_0 ) ; adaptor . setTokenBoundaries ( retval . tree , retval . start , retval . stop ) ; } catch ( RecognitionException re ) { reportError ( re ) ; recover ( input , re ) ; retval . tree = ( CommonTree ) adaptor . errorNode ( input , retval . start , input . LT ( - 1 ) , re ) ; } finally { } return retval ; |
public class Equ { /** * Create a reverse Polish notation form of the equation
* @ param oldTokens a { @ link java . util . Collection } object .
* @ return a { @ link java . util . Collection } object . */
protected List < EquPart > rpnize ( final List < EquPart > oldTokens ) { } } | final List < EquPart > _rpn = new Stack < > ( ) ; final Stack < EquPart > ops = new Stack < > ( ) ; Operation leftOp ; Operation rightOp ; for ( final EquPart token : oldTokens ) if ( token instanceof Token ) _rpn . add ( token ) ; else { rightOp = ( Operation ) token ; if ( ops . empty ( ) ) { if ( rightOp . includeInRpn ( ) ) ops . push ( rightOp ) ; } else { leftOp = ( Operation ) ops . peek ( ) ; if ( leftOp . preceeds ( rightOp ) ) { _rpn . add ( ops . pop ( ) ) ; /* * while knocking one off scan back as long as the level
* doesn ' t change . A left paren causes the level to
* increase and the left paren has that new level . A
* right paren causes the level to decrease and has this
* new lesser level . So the right paren is one level
* less than the operators within the parens . Since we
* want to only scan those operators we have to adjust
* for the level that they are . */
int level = rightOp . getLevel ( ) ; if ( rightOp instanceof OpRightParen ) level = level + 1 ; Operation compareOp = rightOp ; while ( ! ops . empty ( ) ) { leftOp = ( Operation ) ops . peek ( ) ; if ( leftOp . getLevel ( ) < level ) break ; if ( leftOp . preceeds ( compareOp ) ) { _rpn . add ( ops . pop ( ) ) ; compareOp = leftOp ; } else break ; break ; } } if ( rightOp . includeInRpn ( ) ) ops . push ( rightOp ) ; } } while ( ! ops . empty ( ) ) _rpn . add ( ops . pop ( ) ) ; return _rpn ; |
public class CPDefinitionLinkUtil { /** * Returns the last cp definition link in the ordered set where CProductId = & # 63 ; .
* @ param CProductId the c product ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the last matching cp definition link , or < code > null < / code > if a matching cp definition link could not be found */
public static CPDefinitionLink fetchByCProductId_Last ( long CProductId , OrderByComparator < CPDefinitionLink > orderByComparator ) { } } | return getPersistence ( ) . fetchByCProductId_Last ( CProductId , orderByComparator ) ; |
public class Configuration { /** * Return the input stream to the builder XML .
* @ return the input steam to the builder XML .
* @ throws FileNotFoundException when the given XML file cannot be found . */
public InputStream getBuilderXML ( ) throws IOException { } } | return builderXMLPath == null ? Configuration . class . getResourceAsStream ( DEFAULT_BUILDER_XML ) : DocFile . createFileForInput ( this , builderXMLPath ) . openInputStream ( ) ; |
public class ObjectEnvelopeOrdering { /** * Reorders the object envelopes . The new order is available from the
* < code > ordering < / code > property .
* @ see # getOrdering ( ) */
public void reorder ( ) { } } | int newOrderIndex = 0 ; long t1 = 0 , t2 = 0 , t3 ; if ( log . isDebugEnabled ( ) ) { t1 = System . currentTimeMillis ( ) ; } newOrder = new Identity [ originalOrder . size ( ) ] ; if ( log . isDebugEnabled ( ) ) log . debug ( "Orginal order: " + originalOrder ) ; // set up the vertex array in the order the envelopes were added
List vertexList = new ArrayList ( originalOrder . size ( ) ) ; // int vertexIndex = 0;
for ( Iterator it = originalOrder . iterator ( ) ; it . hasNext ( ) ; ) { ObjectEnvelope envelope = ( ObjectEnvelope ) envelopes . get ( it . next ( ) ) ; if ( envelope . needsUpdate ( ) || envelope . needsInsert ( ) || envelope . needsDelete ( ) ) { Vertex vertex = new Vertex ( envelope ) ; vertexList . add ( vertex ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Add new Vertex object " + envelope . getIdentity ( ) + " to VertexList" ) ; } } else { // envelope is clean - just add identity to new order
newOrder [ newOrderIndex ++ ] = envelope . getIdentity ( ) ; if ( log . isDebugEnabled ( ) ) { log . debug ( "Add unmodified object " + envelope . getIdentity ( ) + " to new OrderList" ) ; } } } vertices = ( Vertex [ ] ) vertexList . toArray ( new Vertex [ vertexList . size ( ) ] ) ; // set up the edges
edgeList = new ArrayList ( 2 * vertices . length ) ; for ( int i = 0 ; i < vertices . length ; i ++ ) { addEdgesForVertex ( vertices [ i ] ) ; } if ( log . isDebugEnabled ( ) ) { t2 = System . currentTimeMillis ( ) ; log . debug ( "Building object envelope graph took " + ( t2 - t1 ) + " ms" ) ; log . debug ( "Object envelope graph contains " + vertices . length + " vertices" + " and " + edgeList . size ( ) + " edges" ) ; } int remainingVertices = vertices . length ; int iterationCount = 0 ; while ( remainingVertices > 0 ) { // update iteration count
iterationCount ++ ; // update incoming edge counts
for ( Iterator it = edgeList . iterator ( ) ; it . hasNext ( ) ; ) { Edge edge = ( Edge ) it . next ( ) ; if ( ! edge . isProcessed ( ) ) { if ( log . isDebugEnabled ( ) ) { final String msg = "Add weight '" + edge . getWeight ( ) + "' for terminal vertex " + edge . getTerminalVertex ( ) + " of edge " + edge ; log . debug ( msg ) ; } edge . getTerminalVertex ( ) . incrementIncomingEdgeWeight ( edge . getWeight ( ) ) ; } } // find minimum weight of incoming edges of a vertex
int minIncomingEdgeWeight = Integer . MAX_VALUE ; for ( int i = 0 ; i < vertices . length ; i ++ ) { Vertex vertex = vertices [ i ] ; if ( ! vertex . isProcessed ( ) && minIncomingEdgeWeight > vertex . getIncomingEdgeWeight ( ) ) { minIncomingEdgeWeight = vertex . getIncomingEdgeWeight ( ) ; if ( minIncomingEdgeWeight == 0 ) { // we won ' t get any lower
break ; } } } // process vertices having minimum incoming edge weight
int processCount = 0 ; for ( int i = 0 ; i < vertices . length ; i ++ ) { Vertex vertex = vertices [ i ] ; if ( ! vertex . isProcessed ( ) && vertex . getIncomingEdgeWeight ( ) == minIncomingEdgeWeight ) { newOrder [ newOrderIndex ++ ] = vertex . getEnvelope ( ) . getIdentity ( ) ; vertex . markProcessed ( ) ; processCount ++ ; if ( log . isDebugEnabled ( ) ) { log . debug ( "add minimum edge weight - " + minIncomingEdgeWeight + ", newOrderList: " + ArrayUtils . toString ( newOrder ) ) ; } } vertex . resetIncomingEdgeWeight ( ) ; } if ( log . isDebugEnabled ( ) ) { log . debug ( "Processed " + processCount + " of " + remainingVertices + " remaining vertices in iteration #" + iterationCount ) ; } remainingVertices -= processCount ; } if ( log . isDebugEnabled ( ) ) { t3 = System . currentTimeMillis ( ) ; log . debug ( "New ordering: " + ArrayUtils . toString ( newOrder ) ) ; log . debug ( "Processing object envelope graph took " + ( t3 - t2 ) + " ms" ) ; } |
public class CallerUtil { /** * Returns the signature of the method inside the monitored codebase which was last executed . */
public static String getCallerSignature ( ) { } } | if ( Stagemonitor . getPlugin ( CorePlugin . class ) . getIncludePackages ( ) . isEmpty ( ) ) { return null ; } if ( javaLangAccessObject != null ) { return getCallerSignatureSharedSecrets ( ) ; } else { return getCallerSignatureGetStackTrace ( ) ; } |
public class FilterLogEventsResult { /** * Indicates which log streams have been searched and whether each has been searched completely .
* @ return Indicates which log streams have been searched and whether each has been searched completely . */
public java . util . List < SearchedLogStream > getSearchedLogStreams ( ) { } } | if ( searchedLogStreams == null ) { searchedLogStreams = new com . amazonaws . internal . SdkInternalList < SearchedLogStream > ( ) ; } return searchedLogStreams ; |
public class BackgroundLruEvictionStrategy { /** * Returns true if trace should be printed . */
private boolean isTraceEnabled ( boolean debug ) // d581579
{ } } | if ( debug ? tc . isDebugEnabled ( ) : tc . isEntryEnabled ( ) ) { return true ; } if ( ivCache . numSweeps % NUM_SWEEPS_PER_OOMTRACE == 1 && ( debug ? tcOOM . isDebugEnabled ( ) : tcOOM . isEntryEnabled ( ) ) ) { return true ; } return false ; |
public class DistributedCache { /** * Returns the relative path of the dir this cache will be localized in
* relative path that this cache will be localized in . For
* hdfs : / / hostname : port / absolute _ path - - the relative path is
* hostname / absolute path - - if it is just / absolute _ path - - then the
* relative path is hostname of DFS this mapred cluster is running
* on / absolute _ path */
public static String makeRelative ( URI cache , Configuration conf ) throws IOException { } } | String host = cache . getHost ( ) ; if ( host == null ) { host = cache . getScheme ( ) ; } if ( host == null ) { URI defaultUri = FileSystem . get ( conf ) . getUri ( ) ; host = defaultUri . getHost ( ) ; if ( host == null ) { host = defaultUri . getScheme ( ) ; } } String path = host + cache . getPath ( ) ; path = path . replace ( ":/" , "/" ) ; // remove windows device colon
return path ; |
public class MarkupSelectorFilter { /** * Element handling */
boolean matchStandaloneElement ( final boolean blockMatching , final int markupLevel , final int markupBlockIndex , final SelectorElementBuffer elementBuffer ) { } } | checkMarkupLevel ( markupLevel ) ; if ( this . markupSelectorItem . anyLevel ( ) || markupLevel == 0 || ( this . prev != null && this . prev . matchedMarkupLevels [ markupLevel - 1 ] ) ) { // This element has not matched yet , but might match , so we should check
this . matchesThisLevel = this . markupSelectorItem . matchesElement ( markupBlockIndex , elementBuffer , this . markupBlockMatchingCounter ) ; if ( matchesPreviousOrCurrentLevel ( markupLevel ) ) { // This filter was already matched by a previous level ( through an " open " event ) , so just delegate to next .
if ( this . next != null ) { return this . next . matchStandaloneElement ( blockMatching , markupLevel , markupBlockIndex , elementBuffer ) ; } return ( blockMatching ? true : this . matchesThisLevel ) ; } else if ( this . matchesThisLevel ) { // This filter was not matched before . So the fact that it matches now means we need to consume it ,
// therefore not delegating .
return ( this . next == null ) ; } } else if ( matchesPreviousOrCurrentLevel ( markupLevel ) ) { // This filter was already matched by a previous level ( through an " open " event ) , so just delegate to next .
if ( this . next != null ) { return this . next . matchStandaloneElement ( blockMatching , markupLevel , markupBlockIndex , elementBuffer ) ; } return blockMatching ; } return false ; |
public class SerializedFormBuilder { /** * Build method description .
* @ param node the XML element that specifies which components to document
* @ param methodsContentTree content tree to which the documentation will be added */
public void buildMethodDescription ( XMLNode node , Content methodsContentTree ) { } } | methodWriter . addMemberDescription ( ( MethodDoc ) currentMember , methodsContentTree ) ; |
public class PippoSettings { /** * Returns the integer value for the specified name . If the name does not
* exist or the value for the name can not be interpreted as an integer , the
* defaultValue is returned .
* @ param name
* @ param defaultValue
* @ return name value or defaultValue */
public int getInteger ( String name , int defaultValue ) { } } | try { String value = getString ( name , null ) ; if ( ! StringUtils . isNullOrEmpty ( value ) ) { return Integer . parseInt ( value . trim ( ) ) ; } } catch ( NumberFormatException e ) { log . warn ( "Failed to parse integer for " + name + USING_DEFAULT_OF + defaultValue ) ; } return defaultValue ; |
public class ClientConnection { /** * Connects to the cluster . */
private CompletableFuture < Connection > connect ( ) { } } | // If the address selector has been reset then reset the connection .
if ( selector . state ( ) == AddressSelector . State . RESET && connection != null ) { if ( connectFuture != null ) { return connectFuture ; } CompletableFuture < Connection > future = new OrderedCompletableFuture < > ( ) ; future . whenComplete ( ( r , e ) -> this . connectFuture = null ) ; this . connectFuture = future ; Connection oldConnection = this . connection ; this . connection = null ; oldConnection . close ( ) ; connect ( future ) ; return future ; } // If a connection was already established then use that connection .
if ( connection != null ) { return CompletableFuture . completedFuture ( connection ) ; } // If a connection is currently being established then piggyback on the connect future .
if ( connectFuture != null ) { return connectFuture ; } // Create a new connect future and connect to the first server in the cluster .
CompletableFuture < Connection > future = new OrderedCompletableFuture < > ( ) ; future . whenComplete ( ( r , e ) -> this . connectFuture = null ) ; this . connectFuture = future ; reset ( ) . connect ( future ) ; return future ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.