signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class SessionState { /** * Check if the current sequence numbers for all partitions are > = the ones set as end . * @ return true if all are at the end , false otherwise . */ public boolean isAtEnd ( ) { } }
final AtomicBoolean atEnd = new AtomicBoolean ( true ) ; foreachPartition ( ps -> { if ( ! ps . isAtEnd ( ) ) { atEnd . set ( false ) ; } } ) ; return atEnd . get ( ) ;
public class FileResource { /** * Endpoints for artifacts download */ @ GET @ Path ( "/download/{file}" ) public Response downloadFile ( final @ PathParam ( "file" ) String file ) { } }
Config config = createConfig ( ) ; String uploadDir = config . getStringValue ( FILE_SYSTEM_DIRECTORY ) ; String filePath = uploadDir + "/" + file ; return getResponseByFile ( filePath ) ;
public class SharedObject { /** * Broadcast event to event handler * @ param handler * Event handler * @ param arguments * Arguments */ protected void sendMessage ( String handler , List < ? > arguments ) { } }
final SharedObjectEvent event = new SharedObjectEvent ( Type . CLIENT_SEND_MESSAGE , handler , arguments ) ; if ( ownerMessage . addEvent ( event ) ) { syncEvents . add ( event ) ; sendStats . incrementAndGet ( ) ; if ( log . isTraceEnabled ( ) ) { log . trace ( "Send message: {}" , arguments ) ; } }
public class SymbolTable { /** * public String findSymbol ( char [ ] buffer , int start , int len ) * return findSymbol ( buffer , start , len , calcHash ( buffer , start , len ) ) ; */ public String findSymbol ( char [ ] buffer , int start , int len , int hash ) { } }
// Sanity check : if ( len < 1 ) { return EMPTY_STRING ; } hash &= mIndexMask ; String sym = mSymbols [ hash ] ; // Optimal case ; checking existing primary symbol for hash index : if ( sym != null ) { // Let ' s inline primary String equality checking : if ( sym . length ( ) == len ) { int i = 0 ; do { if ( sym . charAt ( i ) != buffer [ start + i ] ) { break ; } } while ( ++ i < len ) ; // Optimal case ; primary match found if ( i == len ) { return sym ; } } // How about collision bucket ? Bucket b = mBuckets [ hash >> 1 ] ; if ( b != null ) { sym = b . find ( buffer , start , len ) ; if ( sym != null ) { return sym ; } } } // Need to expand ? if ( mSize >= mSizeThreshold ) { rehash ( ) ; /* Need to recalc hash ; rare occurence ( index mask has been * recalculated as part of rehash ) */ hash = calcHash ( buffer , start , len ) & mIndexMask ; } else if ( ! mDirty ) { // Or perhaps we need to do copy - on - write ? copyArrays ( ) ; mDirty = true ; } ++ mSize ; String newSymbol = new String ( buffer , start , len ) ; if ( mInternStrings ) { newSymbol = newSymbol . intern ( ) ; } // Ok ; do we need to add primary entry , or a bucket ? if ( mSymbols [ hash ] == null ) { mSymbols [ hash ] = newSymbol ; } else { int bix = hash >> 1 ; mBuckets [ bix ] = new Bucket ( newSymbol , mBuckets [ bix ] ) ; } return newSymbol ;
public class TransactionLogger { /** * Add component processing time to given map * @ param mapComponentTimes * @ param component */ private static void addTimePerComponent ( HashMap < String , Long > mapComponentTimes , Component component ) { } }
Long currentTimeOfComponent = 0L ; String key = component . getComponentType ( ) ; if ( mapComponentTimes . containsKey ( key ) ) { currentTimeOfComponent = mapComponentTimes . get ( key ) ; } // when transactions are run in parallel , we should log the longest transaction only to avoid that // for ex ' Total time ' would be 100ms and transactions in parallel to hornetQ will be 2000ms Long maxTime = Math . max ( component . getTime ( ) , currentTimeOfComponent ) ; mapComponentTimes . put ( key , maxTime ) ;
public class AmazonEC2Client { /** * Adds an ingress authorization rule to a Client VPN endpoint . Ingress authorization rules act as firewall rules * that grant access to networks . You must configure ingress authorization rules to enable clients to access * resources in AWS or on - premises networks . * @ param authorizeClientVpnIngressRequest * @ return Result of the AuthorizeClientVpnIngress operation returned by the service . * @ sample AmazonEC2 . AuthorizeClientVpnIngress * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / AuthorizeClientVpnIngress " target = " _ top " > AWS * API Documentation < / a > */ @ Override public AuthorizeClientVpnIngressResult authorizeClientVpnIngress ( AuthorizeClientVpnIngressRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeAuthorizeClientVpnIngress ( request ) ;
public class ChatApi { /** * Resume async interaction chat * Resume for the specified chat . * @ param id The ID of the chat interaction . ( required ) * @ param asyncResumeData Request parameters . ( required ) * @ return ApiSuccessResponse * @ throws ApiException If fail to call the API , e . g . server error or cannot deserialize the response body */ public ApiSuccessResponse asyncResume ( String id , AsyncResumeData asyncResumeData ) throws ApiException { } }
ApiResponse < ApiSuccessResponse > resp = asyncResumeWithHttpInfo ( id , asyncResumeData ) ; return resp . getData ( ) ;
public class Forker { /** * Obtain the { @ link ForkedStream } for the input { @ link RecordStreamWithMetadata } and { @ link ForkOperator } . * @ param inputStream input { @ link Flowable } of records . * @ param forkOperator { @ link ForkOperator } specifying the fork behavior . * @ param workUnitState work unit configuration . * @ return a { @ link ForkedStream } with the forked streams . * @ throws Exception if the { @ link ForkOperator } throws any exceptions . */ public < D , S > ForkedStream < D , S > forkStream ( RecordStreamWithMetadata < D , S > inputStream , ForkOperator < S , D > forkOperator , WorkUnitState workUnitState ) throws Exception { } }
int branches = forkOperator . getBranches ( workUnitState ) ; // Set fork . branches explicitly here so the rest task flow can pick it up workUnitState . setProp ( ConfigurationKeys . FORK_BRANCHES_KEY , branches ) ; forkOperator . init ( workUnitState ) ; List < Boolean > forkedSchemas = forkOperator . forkSchema ( workUnitState , inputStream . getGlobalMetadata ( ) . getSchema ( ) ) ; int activeForks = ( int ) forkedSchemas . stream ( ) . filter ( b -> b ) . count ( ) ; Preconditions . checkState ( forkedSchemas . size ( ) == branches , String . format ( "Number of forked schemas [%d] is not equal to number of branches [%d]" , forkedSchemas . size ( ) , branches ) ) ; Flowable < RecordWithForkMap < D > > forkedStream = inputStream . getRecordStream ( ) . map ( r -> { if ( r instanceof RecordEnvelope ) { RecordEnvelope < D > recordEnvelope = ( RecordEnvelope < D > ) r ; return new RecordWithForkMap < > ( recordEnvelope , forkOperator . forkDataRecord ( workUnitState , recordEnvelope . getRecord ( ) ) ) ; } else if ( r instanceof ControlMessage ) { return new RecordWithForkMap < D > ( ( ControlMessage < D > ) r , branches ) ; } else { throw new IllegalStateException ( "Expected RecordEnvelope or ControlMessage." ) ; } } ) ; if ( activeForks > 1 ) { forkedStream = forkedStream . share ( ) ; } List < RecordStreamWithMetadata < D , S > > forkStreams = Lists . newArrayList ( ) ; boolean mustCopy = mustCopy ( forkedSchemas ) ; for ( int i = 0 ; i < forkedSchemas . size ( ) ; i ++ ) { if ( forkedSchemas . get ( i ) ) { final int idx = i ; Flowable < StreamEntity < D > > thisStream = forkedStream . filter ( new ForkFilter < > ( idx ) ) . map ( RecordWithForkMap :: getRecordCopyIfNecessary ) ; forkStreams . add ( inputStream . withRecordStream ( thisStream , mustCopy ? ( GlobalMetadata < S > ) CopyHelper . copy ( inputStream . getGlobalMetadata ( ) ) : inputStream . getGlobalMetadata ( ) ) ) ; } else { forkStreams . add ( null ) ; } } return new ForkedStream < > ( forkStreams ) ;
public class MaterialCutOut { /** * Setups the cut out position when the screen changes size or is scrolled . */ protected void setupCutOutPosition ( Element cutOut , Element relativeTo , int padding , boolean circle ) { } }
float top = relativeTo . getOffsetTop ( ) - ( Math . max ( $ ( "html" ) . scrollTop ( ) , $ ( "body" ) . scrollTop ( ) ) ) ; float left = relativeTo . getAbsoluteLeft ( ) ; float width = relativeTo . getOffsetWidth ( ) ; float height = relativeTo . getOffsetHeight ( ) ; if ( circle ) { if ( width != height ) { float dif = width - height ; if ( width > height ) { height += dif ; top -= dif / 2 ; } else { dif = - dif ; width += dif ; left -= dif / 2 ; } } } top -= padding ; left -= padding ; width += padding * 2 ; height += padding * 2 ; $ ( cutOut ) . css ( "top" , top + "px" ) ; $ ( cutOut ) . css ( "left" , left + "px" ) ; $ ( cutOut ) . css ( "width" , width + "px" ) ; $ ( cutOut ) . css ( "height" , height + "px" ) ;
public class MesosConfiguration { /** * A utility method to log relevant Mesos connection info . */ public static void logMesosConfig ( Logger log , MesosConfiguration config ) { } }
Map < String , String > env = System . getenv ( ) ; Protos . FrameworkInfo . Builder info = config . frameworkInfo ( ) ; log . info ( "--------------------------------------------------------------------------------" ) ; log . info ( " Mesos Info:" ) ; log . info ( " Master URL: {}" , config . masterUrl ( ) ) ; log . info ( " Framework Info:" ) ; log . info ( " ID: {}" , info . hasId ( ) ? info . getId ( ) . getValue ( ) : "(none)" ) ; log . info ( " Name: {}" , info . hasName ( ) ? info . getName ( ) : "(none)" ) ; log . info ( " Failover Timeout (secs): {}" , info . getFailoverTimeout ( ) ) ; log . info ( " Role: {}" , info . hasRole ( ) ? info . getRole ( ) : "(none)" ) ; log . info ( " Capabilities: {}" , info . getCapabilitiesList ( ) . size ( ) > 0 ? info . getCapabilitiesList ( ) : "(none)" ) ; log . info ( " Principal: {}" , info . hasPrincipal ( ) ? info . getPrincipal ( ) : "(none)" ) ; log . info ( " Host: {}" , info . hasHostname ( ) ? info . getHostname ( ) : "(none)" ) ; if ( env . containsKey ( "LIBPROCESS_IP" ) ) { log . info ( " LIBPROCESS_IP: {}" , env . get ( "LIBPROCESS_IP" ) ) ; } if ( env . containsKey ( "LIBPROCESS_PORT" ) ) { log . info ( " LIBPROCESS_PORT: {}" , env . get ( "LIBPROCESS_PORT" ) ) ; } log . info ( " Web UI: {}" , info . hasWebuiUrl ( ) ? info . getWebuiUrl ( ) : "(none)" ) ; log . info ( "--------------------------------------------------------------------------------" ) ;
public class MainSupport { /** * Create the { @ link StreamSet } used to register . */ @ VisibleForTesting protected StreamSet createStreamSet ( final Terminal terminal ) { } }
InputStream in = new FilterInputStream ( terminal . input ( ) ) { @ Override public void close ( ) throws IOException { // ignore } } ; PrintStream out = new PrintStream ( terminal . output ( ) , true ) { @ Override public void close ( ) { // ignore } } ; return new StreamSet ( in , out ) ;
public class InstanceClient { /** * Sets network tags for the specified instance to the data included in the request . * < p > Sample code : * < pre > < code > * try ( InstanceClient instanceClient = InstanceClient . create ( ) ) { * ProjectZoneInstanceName instance = ProjectZoneInstanceName . of ( " [ PROJECT ] " , " [ ZONE ] " , " [ INSTANCE ] " ) ; * Tags tagsResource = Tags . newBuilder ( ) . build ( ) ; * Operation response = instanceClient . setTagsInstance ( instance . toString ( ) , tagsResource ) ; * < / code > < / pre > * @ param instance Name of the instance scoping this request . * @ param tagsResource A set of instance tags . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final Operation setTagsInstance ( String instance , Tags tagsResource ) { } }
SetTagsInstanceHttpRequest request = SetTagsInstanceHttpRequest . newBuilder ( ) . setInstance ( instance ) . setTagsResource ( tagsResource ) . build ( ) ; return setTagsInstance ( request ) ;
public class WikiParser { /** * Greedy version of findEndOfNowiki ( ) . * It finds the last possible closing ' } } } ' before next opening ' { { { ' . * Also uses escapes ' ~ { { { ' and ' ~ } } } ' . * @ param startBlock points to first char after ' { { { ' * @ return position of first ' } ' in closing ' } } } ' */ @ SuppressWarnings ( "unused" ) private int findEndOfNowikiGreedy ( int startBlock ) { } }
// NOTE : this method could step back one char from startBlock position int nextBlock = startBlock - 3 ; do { do { nextBlock = wikiText . indexOf ( "{{{" , nextBlock + 3 ) ; } while ( nextBlock > 0 && wikiChars [ nextBlock - 1 ] == '~' ) ; if ( nextBlock < 0 ) nextBlock = wikiLength ; int endBlock = wikiText . lastIndexOf ( "}}}" , nextBlock ) ; if ( endBlock >= startBlock && wikiChars [ endBlock - 1 ] != '~' ) return endBlock ; } while ( nextBlock < wikiLength ) ; return wikiLength ;
public class DataSetExportServicesImpl { /** * Package private to enable testing */ SXSSFWorkbook dataSetToWorkbook ( DataSet dataSet ) { } }
// TODO ? : Excel 2010 limits : 1,048,576 rows by 16,384 columns ; row width 255 characters if ( dataSet == null ) { throw new IllegalArgumentException ( "Null dataSet specified!" ) ; } int columnCount = dataSet . getColumns ( ) . size ( ) ; int rowCount = dataSet . getRowCount ( ) + 1 ; // Include header row ; int row = 0 ; SXSSFWorkbook wb = new SXSSFWorkbook ( 100 ) ; // keep 100 rows in memory , exceeding rows will be flushed to disk Map < String , CellStyle > styles = createStyles ( wb ) ; SXSSFSheet sh = wb . createSheet ( "Sheet 1" ) ; // General setup sh . setDisplayGridlines ( true ) ; sh . setPrintGridlines ( false ) ; sh . setFitToPage ( true ) ; sh . setHorizontallyCenter ( true ) ; sh . trackAllColumnsForAutoSizing ( ) ; PrintSetup printSetup = sh . getPrintSetup ( ) ; printSetup . setLandscape ( true ) ; // Create header Row header = sh . createRow ( row ++ ) ; header . setHeightInPoints ( 20f ) ; for ( int i = 0 ; i < columnCount ; i ++ ) { Cell cell = header . createCell ( i ) ; cell . setCellStyle ( styles . get ( "header" ) ) ; cell . setCellValue ( dataSet . getColumnByIndex ( i ) . getId ( ) ) ; } // Create data rows for ( ; row < rowCount ; row ++ ) { Row _row = sh . createRow ( row ) ; for ( int cellnum = 0 ; cellnum < columnCount ; cellnum ++ ) { Cell cell = _row . createCell ( cellnum ) ; Object value = dataSet . getValueAt ( row - 1 , cellnum ) ; if ( value instanceof Short || value instanceof Long || value instanceof Integer || value instanceof BigInteger ) { cell . setCellType ( CellType . NUMERIC ) ; cell . setCellStyle ( styles . get ( "integer_number_cell" ) ) ; cell . setCellValue ( ( ( Number ) value ) . doubleValue ( ) ) ; } else if ( value instanceof Float || value instanceof Double || value instanceof BigDecimal ) { cell . setCellType ( CellType . NUMERIC ) ; cell . setCellStyle ( styles . get ( "decimal_number_cell" ) ) ; cell . setCellValue ( ( ( Number ) value ) . doubleValue ( ) ) ; } else if ( value instanceof Date ) { cell . setCellType ( CellType . STRING ) ; cell . setCellStyle ( styles . get ( "date_cell" ) ) ; cell . setCellValue ( ( Date ) value ) ; } else if ( value instanceof Interval ) { cell . setCellType ( CellType . STRING ) ; cell . setCellStyle ( styles . get ( TEXT_CELL ) ) ; cell . setCellValue ( ( ( Interval ) value ) . getName ( ) ) ; } else { cell . setCellType ( CellType . STRING ) ; cell . setCellStyle ( styles . get ( TEXT_CELL ) ) ; String val = value == null ? "" : value . toString ( ) ; cell . setCellValue ( val ) ; } } } // Adjust column size for ( int i = 0 ; i < columnCount ; i ++ ) { sh . autoSizeColumn ( i ) ; } return wb ;
public class ValuelessColumnsMapper { /** * Declares the fields produced by this bolt . * @ param declarer */ @ Override public void declareOutputFields ( OutputFieldsDeclarer declarer ) { } }
if ( this . isDrpc ) { declarer . declare ( new Fields ( "id" , this . emitFieldForRowKey , this . emitFieldForColumnName ) ) ; } else { declarer . declare ( new Fields ( this . emitFieldForRowKey , this . emitFieldForColumnName ) ) ; }
public class MSPDIReader { /** * This method extracts task data from an MSPDI file . * @ param project Root node of the MSPDI file */ private void readTasks ( Project project ) { } }
Project . Tasks tasks = project . getTasks ( ) ; if ( tasks != null ) { int tasksWithoutIDCount = 0 ; for ( Project . Tasks . Task task : tasks . getTask ( ) ) { Task mpxjTask = readTask ( task ) ; if ( mpxjTask . getID ( ) == null ) { ++ tasksWithoutIDCount ; } } for ( Project . Tasks . Task task : tasks . getTask ( ) ) { readPredecessors ( task ) ; } // MS Project will happily read tasks from an MSPDI file without IDs , // it will just generate ID values based on the task order in the file . // If we find that there are no ID values present , we ' ll do the same . if ( tasksWithoutIDCount == tasks . getTask ( ) . size ( ) ) { m_projectFile . getTasks ( ) . renumberIDs ( ) ; } } m_projectFile . updateStructure ( ) ;
public class GetSdkTypesResult { /** * The current page of elements from this collection . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setItems ( java . util . Collection ) } or { @ link # withItems ( java . util . Collection ) } if you want to override the * existing values . * @ param items * The current page of elements from this collection . * @ return Returns a reference to this object so that method calls can be chained together . */ public GetSdkTypesResult withItems ( SdkType ... items ) { } }
if ( this . items == null ) { setItems ( new java . util . ArrayList < SdkType > ( items . length ) ) ; } for ( SdkType ele : items ) { this . items . add ( ele ) ; } return this ;
public class Collections { /** * Returns an empty navigable map ( immutable ) . This map is serializable . * < p > This example illustrates the type - safe way to obtain an empty map : * < pre > { @ code * NavigableMap < String , Date > s = Collections . emptyNavigableMap ( ) ; * } < / pre > * @ implNote Implementations of this method need not create a separate * { @ code NavigableMap } object for each call . * @ param < K > the class of the map keys * @ param < V > the class of the map values * @ return an empty navigable map * @ since 1.8 */ @ SuppressWarnings ( "unchecked" ) public static final < K , V > NavigableMap < K , V > emptyNavigableMap ( ) { } }
return ( NavigableMap < K , V > ) UnmodifiableNavigableMap . EMPTY_NAVIGABLE_MAP ;
public class AdminDictStopwordsAction { @ Execute public HtmlResponse details ( final String dictId , final int crudMode , final long id ) { } }
verifyCrudMode ( crudMode , CrudMode . DETAILS , dictId ) ; saveToken ( ) ; return asDetailsHtml ( ) . useForm ( EditForm . class , op -> { op . setup ( form -> { stopwordsService . getStopwordsItem ( dictId , id ) . ifPresent ( entity -> { form . input = entity . getInputValue ( ) ; } ) . orElse ( ( ) -> { throwValidationError ( messages -> messages . addErrorsCrudCouldNotFindCrudTable ( GLOBAL , dictId + ":" + id ) , ( ) -> asListHtml ( dictId ) ) ; } ) ; form . id = id ; form . crudMode = crudMode ; form . dictId = dictId ; } ) ; } ) ;
public class FuncConcat { /** * Execute the function . The function must return * a valid object . * @ param xctxt The current execution context . * @ return A valid XObject . * @ throws javax . xml . transform . TransformerException */ public XObject execute ( XPathContext xctxt ) throws javax . xml . transform . TransformerException { } }
StringBuffer sb = new StringBuffer ( ) ; // Compiler says we must have at least two arguments . sb . append ( m_arg0 . execute ( xctxt ) . str ( ) ) ; sb . append ( m_arg1 . execute ( xctxt ) . str ( ) ) ; if ( null != m_arg2 ) sb . append ( m_arg2 . execute ( xctxt ) . str ( ) ) ; if ( null != m_args ) { for ( int i = 0 ; i < m_args . length ; i ++ ) { sb . append ( m_args [ i ] . execute ( xctxt ) . str ( ) ) ; } } return new XString ( sb . toString ( ) ) ;
public class ReturnAdder { /** * Adds return statements in method code whenever an implicit return is detected . * @ param node the method node where to add return statements * @ deprecated Use { @ link # visitMethod ( org . codehaus . groovy . ast . MethodNode ) } instead */ @ Deprecated public static void addReturnIfNeeded ( MethodNode node ) { } }
ReturnAdder adder = new ReturnAdder ( ) ; adder . visitMethod ( node ) ;
public class DataUtilities { /** * Checks a given name of a file if it is a supported vector extension . * @ param name the name of the file . * @ return < code > true < / code > , if the extension is supported . */ public static boolean isSupportedVectorExtension ( String name ) { } }
for ( String ext : supportedVectors ) { if ( name . toLowerCase ( ) . endsWith ( ext ) ) { return true ; } } return false ;
public class JsonBuilder { /** * Create a JsonReader by providing a Reader . * @ param reader The Reader object . * @ return JsonReader object . */ public JsonReader createReader ( Reader reader ) { } }
return new JsonReader ( reader , jsonObjectFactory , jsonArrayFactory , objectMapper , typeAdapters ) ;
public class BackupShortTermRetentionPoliciesInner { /** * Updates a database ' s short term retention policy . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database . * @ param retentionDays The backup retention period in days . This is how many days Point - in - Time Restore will be supported . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the BackupShortTermRetentionPolicyInner object if successful . */ public BackupShortTermRetentionPolicyInner beginUpdate ( String resourceGroupName , String serverName , String databaseName , Integer retentionDays ) { } }
return beginUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , retentionDays ) . toBlocking ( ) . single ( ) . body ( ) ;
public class GlobalProperties { /** * Filters these GlobalProperties by the fields that are forwarded to the output * as described by the SemanticProperties . * @ param props The semantic properties holding information about forwarded fields . * @ param input The index of the input . * @ return The filtered GlobalProperties */ public GlobalProperties filterBySemanticProperties ( SemanticProperties props , int input ) { } }
if ( props == null ) { throw new NullPointerException ( "SemanticProperties may not be null." ) ; } GlobalProperties gp = new GlobalProperties ( ) ; // filter partitioning switch ( this . partitioning ) { case RANGE_PARTITIONED : // check if ordering is preserved Ordering newOrdering = new Ordering ( ) ; for ( int i = 0 ; i < this . ordering . getInvolvedIndexes ( ) . size ( ) ; i ++ ) { int sourceField = this . ordering . getInvolvedIndexes ( ) . get ( i ) ; FieldSet targetField = props . getForwardingTargetFields ( input , sourceField ) ; if ( targetField == null || targetField . size ( ) == 0 ) { // partitioning is destroyed newOrdering = null ; break ; } else { // use any field of target fields for now . We should use something like field equivalence sets in the future . if ( targetField . size ( ) > 1 ) { LOG . warn ( "Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index." ) ; } newOrdering . appendOrdering ( targetField . toArray ( ) [ 0 ] , this . ordering . getType ( i ) , this . ordering . getOrder ( i ) ) ; } } if ( newOrdering != null ) { gp . partitioning = PartitioningProperty . RANGE_PARTITIONED ; gp . ordering = newOrdering ; gp . partitioningFields = newOrdering . getInvolvedIndexes ( ) ; gp . distribution = this . distribution ; } break ; case HASH_PARTITIONED : case ANY_PARTITIONING : case CUSTOM_PARTITIONING : FieldList newPartitioningFields = new FieldList ( ) ; for ( int sourceField : this . partitioningFields ) { FieldSet targetField = props . getForwardingTargetFields ( input , sourceField ) ; if ( targetField == null || targetField . size ( ) == 0 ) { newPartitioningFields = null ; break ; } else { // use any field of target fields for now . We should use something like field equivalence sets in the future . if ( targetField . size ( ) > 1 ) { LOG . warn ( "Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index." ) ; } newPartitioningFields = newPartitioningFields . addField ( targetField . toArray ( ) [ 0 ] ) ; } } if ( newPartitioningFields != null ) { gp . partitioning = this . partitioning ; gp . partitioningFields = newPartitioningFields ; gp . customPartitioner = this . customPartitioner ; } break ; case FORCED_REBALANCED : case FULL_REPLICATION : case RANDOM_PARTITIONED : gp . partitioning = this . partitioning ; break ; default : throw new RuntimeException ( "Unknown partitioning type." ) ; } // filter unique field combinations if ( this . uniqueFieldCombinations != null ) { Set < FieldSet > newUniqueFieldCombinations = new HashSet < FieldSet > ( ) ; for ( FieldSet fieldCombo : this . uniqueFieldCombinations ) { FieldSet newFieldCombo = new FieldSet ( ) ; for ( Integer sourceField : fieldCombo ) { FieldSet targetField = props . getForwardingTargetFields ( input , sourceField ) ; if ( targetField == null || targetField . size ( ) == 0 ) { newFieldCombo = null ; break ; } else { // use any field of target fields for now . We should use something like field equivalence sets in the future . if ( targetField . size ( ) > 1 ) { LOG . warn ( "Found that a field is forwarded to more than one target field in " + "semantic forwarded field information. Will only use the field with the lowest index." ) ; } newFieldCombo = newFieldCombo . addField ( targetField . toArray ( ) [ 0 ] ) ; } } if ( newFieldCombo != null ) { newUniqueFieldCombinations . add ( newFieldCombo ) ; } } if ( ! newUniqueFieldCombinations . isEmpty ( ) ) { gp . uniqueFieldCombinations = newUniqueFieldCombinations ; } } return gp ;
public class CommerceWishListPersistenceImpl { /** * Returns the commerce wish list with the primary key or throws a { @ link com . liferay . portal . kernel . exception . NoSuchModelException } if it could not be found . * @ param primaryKey the primary key of the commerce wish list * @ return the commerce wish list * @ throws NoSuchWishListException if a commerce wish list with the primary key could not be found */ @ Override public CommerceWishList findByPrimaryKey ( Serializable primaryKey ) throws NoSuchWishListException { } }
CommerceWishList commerceWishList = fetchByPrimaryKey ( primaryKey ) ; if ( commerceWishList == null ) { if ( _log . isDebugEnabled ( ) ) { _log . debug ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } throw new NoSuchWishListException ( _NO_SUCH_ENTITY_WITH_PRIMARY_KEY + primaryKey ) ; } return commerceWishList ;
public class Captcha { /** * creates a random String in given length * @ param length length of the string to create * @ return */ public static String randomString ( int length ) { } }
StringBuilder sb = new StringBuilder ( ) ; for ( int i = 0 ; i < length ; i ++ ) { sb . append ( chars [ AbstractCaptcha . rnd ( 0 , chars . length - 1 ) ] ) ; } return sb . toString ( ) ;
public class DescribeCustomerGatewaysRequest { /** * One or more customer gateway IDs . * Default : Describes all your customer gateways . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setCustomerGatewayIds ( java . util . Collection ) } or { @ link # withCustomerGatewayIds ( java . util . Collection ) } if * you want to override the existing values . * @ param customerGatewayIds * One or more customer gateway IDs . < / p > * Default : Describes all your customer gateways . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeCustomerGatewaysRequest withCustomerGatewayIds ( String ... customerGatewayIds ) { } }
if ( this . customerGatewayIds == null ) { setCustomerGatewayIds ( new com . amazonaws . internal . SdkInternalList < String > ( customerGatewayIds . length ) ) ; } for ( String ele : customerGatewayIds ) { this . customerGatewayIds . add ( ele ) ; } return this ;
public class CmsImportVersion7 { /** * Checks if the resources is in the list of immutable resources . < p > * @ param resourceName the name of the resource * @ return < code > true < / code > or < code > false < / code > */ protected boolean checkImmutable ( String resourceName ) { } }
boolean resourceImmutable = false ; if ( getImmutableResources ( ) . contains ( resourceName ) ) { if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_RESOURCENAME_IMMUTABLE_1 , resourceName ) ) ; } // this resource must not be modified by an import if it already exists String storedSiteRoot = getCms ( ) . getRequestContext ( ) . getSiteRoot ( ) ; try { getCms ( ) . getRequestContext ( ) . setSiteRoot ( "/" ) ; getCms ( ) . readResource ( resourceName ) ; resourceImmutable = true ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_IMMUTABLE_FLAG_SET_1 , resourceName ) ) ; } } catch ( CmsException e ) { // resourceNotImmutable will be true if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_IMPORTEXPORT_ERROR_ON_TEST_IMMUTABLE_1 , resourceName ) , e ) ; } } finally { getCms ( ) . getRequestContext ( ) . setSiteRoot ( storedSiteRoot ) ; } } return resourceImmutable ;
public class NikeFS2SwapFileManager { /** * Retrieves a file handle on the swap directory of this session . * @ return The swap directory of this session . * @ throws IOException */ private static synchronized File getSwapDir ( ) throws IOException { } }
if ( SWAP_DIR == null ) { // create swap directory for this instance File swapDir = File . createTempFile ( SWAP_DIR_PREFIX , SWAP_DIR_SUFFIX , TMP_DIR ) ; // delete if created swapDir . delete ( ) ; // create lock file File lockFile = new File ( TMP_DIR , swapDir . getName ( ) + LOCK_FILE_SUFFIX ) ; lockFile . createNewFile ( ) ; // delete lock file on exit , to make swap directory // eligible for cleanup . lockFile . deleteOnExit ( ) ; // make swap directory swapDir . mkdirs ( ) ; // works reliably only on Unix platforms ! swapDir . deleteOnExit ( ) ; SWAP_DIR = swapDir ; } return SWAP_DIR ;
public class UnauthenticatedRequestTokenProcessingFilter { /** * Create the OAuth token for the specified consumer key . * @ param authentication The authentication request . * @ return The OAuth token . */ protected OAuthProviderToken createOAuthToken ( ConsumerAuthentication authentication ) { } }
return getTokenServices ( ) . createUnauthorizedRequestToken ( authentication . getConsumerDetails ( ) . getConsumerKey ( ) , authentication . getOAuthParameters ( ) . get ( OAuthConsumerParameter . oauth_callback . toString ( ) ) ) ;
public class EventExctractionUtil { /** * we cannot assume order of objects in the array are going to remain the * same , however , we can assume the objects in the array are unique in they * identifying field values . * identity object is the subset of the integrated state of the entity . * using that property we create a map for both pre and post arrays , with * the identifying sub state as the key . * then for all array elements in the identifying array we lookup if that * element was added or updated . */ public static Set < Event > compareAndGetEvents ( JSONArray pre , JSONArray post , JSONArray ids ) throws JSONException { } }
Set < Event > result = new HashSet < > ( ) ; Map < JSONWrapper , JSONWrapper > preMap = getJSONComparisionMap ( ids , pre ) ; Map < JSONWrapper , JSONWrapper > postMap = getJSONComparisionMap ( ids , post ) ; for ( JSONWrapper key : postMap . keySet ( ) ) { if ( ! preMap . containsKey ( key ) || ! preMap . get ( key ) . equals ( postMap . get ( key ) ) ) { Object preJsonObject = ( preMap . get ( key ) == null ) ? null : preMap . get ( key ) . getValue ( ) ; Set < Event > arrayChildResults = compareAndGetEvents ( preJsonObject , postMap . get ( key ) . getValue ( ) , key . getValue ( ) ) ; for ( Event arrayChildResult : arrayChildResults ) { if ( ! preMap . containsKey ( key ) ) { arrayChildResult . setOperation ( Operation . INSERT ) ; } result . add ( arrayChildResult ) ; } } } return result ;
public class CommsInboundChain { /** * Before the chain is stopped / destroyed we send notification to clients so that clients * can close connections gracefully * This method can not be synchronized ( deadlock with update / stop ) . * Rely on CFW synchronization of chain operations . * ( non - Javadoc ) * @ see com . ibm . wsspi . channelfw . ChainEventListener # chainQuiesced ( com . ibm . websphere . channelfw . ChainData ) */ @ Override public void chainQuiesced ( ChainData chainData ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "chainQuiesced" , chainData ) ; chainState . set ( ChainState . QUIESCED . val ) ; // First stop any MP connections which are established through COMMS // stopping connections is Non - blocking try { if ( this . _isSecureChain ) _commsServerFacade . closeViaCommsMPConnections ( JsConstants . ME_STOP_COMMS_SSL_CONNECTIONS ) ; else _commsServerFacade . closeViaCommsMPConnections ( JsConstants . ME_STOP_COMMS_CONNECTIONS ) ; } catch ( Exception e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) SibTr . debug ( tc , "Failed in stopping MP connections which are establised through COMMS: " , e ) ; } // no current connections , notify the final stop can happen now signalNoConnections ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "chainQuiesced" ) ;
public class KMeansInpiredMethod { /** * Partitions the instances around a pivot . Used by quicksort and * kthSmallestValue . * @ param instsThe instances on which the tree is ( or is * to be ) built . * @ param index The master index array containing indices * of the instances . * @ param attidx The attribution / dimension based on which * the instances should be partitioned . * @ param lThe begining index of the portion of master index * array that should be partitioned . * @ param rThe end index of the portion of master index array * that should be partitioned . * @ return the index of the middle element */ protected static int partition ( Instances insts , int [ ] index , int attidx , int l , int r ) { } }
double pivot = insts . instance ( index [ ( l + r ) / 2 ] ) . value ( attidx ) ; int help ; while ( l < r ) { while ( ( insts . instance ( index [ l ] ) . value ( attidx ) < pivot ) && ( l < r ) ) { l ++ ; } while ( ( insts . instance ( index [ r ] ) . value ( attidx ) > pivot ) && ( l < r ) ) { r -- ; } if ( l < r ) { help = index [ l ] ; index [ l ] = index [ r ] ; index [ r ] = help ; l ++ ; r -- ; } } if ( ( l == r ) && ( insts . instance ( index [ r ] ) . value ( attidx ) > pivot ) ) { r -- ; } return r ;
public class ManagedIndex { /** * Required by IndexEntryAccessor interface . */ public void copyToMasterPrimaryKey ( Storable indexEntry , S master ) throws FetchException { } }
mAccessor . copyToMasterPrimaryKey ( indexEntry , master ) ;
public class BuildMetrics { /** * Get the total artifact transformation time . */ public long getElapsedArtifactTransformTime ( ) { } }
long result = 0 ; for ( FragmentedOperation transform : transforms . values ( ) ) { result += transform . getElapsedTime ( ) ; } return result ;
public class SearchUtils { /** * Find field within a class by its name . * @ param classNode class to search * @ param name name to search for * @ return found field ( or { @ code null } if not found ) * @ throws NullPointerException if any argument is { @ code null } * @ throws IllegalArgumentException if { @ code name } is empty */ public static FieldNode findField ( ClassNode classNode , String name ) { } }
Validate . notNull ( classNode ) ; Validate . notNull ( name ) ; Validate . notEmpty ( name ) ; return classNode . fields . stream ( ) . filter ( x -> name . equals ( x . name ) ) . findAny ( ) . orElse ( null ) ;
public class CPAttachmentFileEntryPersistenceImpl { /** * Returns an ordered range of all the cp attachment file entries where classNameId = & # 63 ; and classPK = & # 63 ; and type = & # 63 ; and status & ne ; & # 63 ; . * Useful when paginating results . Returns a maximum of < code > end - start < / code > instances . < code > start < / code > and < code > end < / code > are not primary keys , they are indexes in the result set . Thus , < code > 0 < / code > refers to the first result in the set . Setting both < code > start < / code > and < code > end < / code > to { @ link QueryUtil # ALL _ POS } will return the full result set . If < code > orderByComparator < / code > is specified , then the query will include the given ORDER BY logic . If < code > orderByComparator < / code > is absent and pagination is required ( < code > start < / code > and < code > end < / code > are not { @ link QueryUtil # ALL _ POS } ) , then the query will include the default ORDER BY logic from { @ link CPAttachmentFileEntryModelImpl } . If both < code > orderByComparator < / code > and pagination are absent , for performance reasons , the query will not have an ORDER BY clause and the returned result set will be sorted on by the primary key in an ascending order . * @ param classNameId the class name ID * @ param classPK the class pk * @ param type the type * @ param status the status * @ param start the lower bound of the range of cp attachment file entries * @ param end the upper bound of the range of cp attachment file entries ( not inclusive ) * @ param orderByComparator the comparator to order the results by ( optionally < code > null < / code > ) * @ return the ordered range of matching cp attachment file entries */ @ Override public List < CPAttachmentFileEntry > findByC_C_T_NotST ( long classNameId , long classPK , int type , int status , int start , int end , OrderByComparator < CPAttachmentFileEntry > orderByComparator ) { } }
return findByC_C_T_NotST ( classNameId , classPK , type , status , start , end , orderByComparator , true ) ;
public class GasteigerPEPEPartialCharges { /** * Set the Flags to atoms and bonds from an atomContainer . * @ param container Container with the flags * @ param ac Container to put the flags * @ param b True , if the the flag is true * @ return Container with added flags */ private IAtomContainer setFlags ( IAtomContainer container , IAtomContainer ac , boolean b ) { } }
for ( Iterator < IAtom > it = container . atoms ( ) . iterator ( ) ; it . hasNext ( ) ; ) { int positionA = ac . indexOf ( it . next ( ) ) ; ac . getAtom ( positionA ) . setFlag ( CDKConstants . REACTIVE_CENTER , b ) ; } for ( Iterator < IBond > it = container . bonds ( ) . iterator ( ) ; it . hasNext ( ) ; ) { int positionB = ac . indexOf ( it . next ( ) ) ; ac . getBond ( positionB ) . setFlag ( CDKConstants . REACTIVE_CENTER , b ) ; } return ac ;
public class PutSkillAuthorizationRequest { /** * The authorization result specific to OAUTH code grant output . " Code ” must be populated in the AuthorizationResult * map to establish the authorization . * @ param authorizationResult * The authorization result specific to OAUTH code grant output . " Code ” must be populated in the * AuthorizationResult map to establish the authorization . * @ return Returns a reference to this object so that method calls can be chained together . */ public PutSkillAuthorizationRequest withAuthorizationResult ( java . util . Map < String , String > authorizationResult ) { } }
setAuthorizationResult ( authorizationResult ) ; return this ;
public class GraphHelper { /** * Get relationshipDef name from entityType using relationship attribute . * if more than one relationDefs are returned for an attribute . * e . g . hive _ column . table * hive _ table . columns - > hive _ column . table * hive _ table . partitionKeys - > hive _ column . table * resolve by comparing all incoming edges typename with relationDefs name returned for an attribute * to pick the right relationshipDef name */ public String getRelationshipDefName ( AtlasVertex entityVertex , AtlasEntityType entityType , String attributeName ) { } }
AtlasRelationshipDef relationshipDef = getRelationshipDef ( entityVertex , entityType , attributeName ) ; return ( relationshipDef != null ) ? relationshipDef . getName ( ) : null ;
public class WidgetsUtils { /** * Append a widget to a dom element , and hide it . * Element classes will be copied to the new widget . */ public static void hideAndAfter ( Element e , Widget widget ) { } }
assert e != null && widget != null ; if ( $ ( e ) . widget ( ) != null && $ ( e ) . widget ( ) . isAttached ( ) ) { replaceWidget ( $ ( e ) . widget ( ) , widget , false ) ; } else { detachWidget ( widget ) ; hideAndAfter ( e , widget . getElement ( ) ) ; attachWidget ( widget , getFirstParentWidget ( widget ) ) ; }
public class QuartzScheduler { /** * Get a list containing all of the < code > { @ link org . quartz . listeners . TriggerListener } < / code > s in * the < code > Scheduler < / code > ' s < i > internal < / i > list . */ public List < TriggerListener > getInternalTriggerListeners ( ) { } }
synchronized ( internalTriggerListeners ) { return java . util . Collections . unmodifiableList ( new LinkedList < TriggerListener > ( internalTriggerListeners . values ( ) ) ) ; }
public class Nodes { /** * Normalize a string . * < p > " normalized " in this context means all whitespace characters * are replaced by space characters and consecutive whitespace * characaters are collapsed . < / p > */ static String normalize ( String s ) { } }
StringBuilder sb = new StringBuilder ( ) ; boolean changed = false ; boolean lastCharWasWS = false ; final int len = s . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char c = s . charAt ( i ) ; if ( Character . isWhitespace ( c ) ) { if ( ! lastCharWasWS ) { sb . append ( SPACE ) ; changed |= c != SPACE ; } else { changed = true ; } lastCharWasWS = true ; } else { sb . append ( c ) ; lastCharWasWS = false ; } } return changed ? sb . toString ( ) : s ;
public class AbstractReadableInstantFieldProperty { /** * Returns the difference between this field property instant and the one * passed in , in the units of this field . The sign of the difference * matches that of compareTo . In other words , this field property ' s instant * is the minuend . * @ param instant the subtrahend , null means now * @ return the difference in the units of this field * @ see DateTimeField # getDifference */ public long getDifferenceAsLong ( ReadableInstant instant ) { } }
if ( instant == null ) { return getField ( ) . getDifferenceAsLong ( getMillis ( ) , DateTimeUtils . currentTimeMillis ( ) ) ; } return getField ( ) . getDifferenceAsLong ( getMillis ( ) , instant . getMillis ( ) ) ;
public class MutableTimecode { /** * Returns a Timecode instance for given timecode string and timecode base . Acceptable inputs are * the normal representation HH : MM : SS : FF for non drop frame and HH : MM : SS : FF for drop frame * @ param timecode * @ param timecodeBase * @ return the timecode * @ throws IllegalArgumentException */ public static MutableTimecode valueOf ( String timecode , int timecodeBase ) throws IllegalArgumentException { } }
return valueOf ( timecode , timecodeBase , StringType . NORMAL ) ;
public class BoxSession { /** * This clears the contents of the directory provided in { @ link # getCacheDir ( ) } . */ public void clearCache ( ) { } }
File cacheDir = getCacheDir ( ) ; if ( cacheDir . exists ( ) ) { File [ ] files = cacheDir . listFiles ( ) ; if ( files != null ) { for ( File child : files ) { deleteFilesRecursively ( child ) ; } } }
public class ShortField { /** * Convert the native data type ( Short ) to a string . * @ param tempBinary The physical data convert to a string ( must be the raw data class ) . * @ return A display string representing this binary data . */ public String binaryToString ( Object objData ) { } }
initGlobals ( ) ; if ( objData == null ) return Constants . BLANK ; String strReturn = null ; synchronized ( gIntegerFormat ) { strReturn = gIntegerFormat . format ( ( ( Short ) objData ) . shortValue ( ) ) ; } return strReturn ;
public class LevenbergMarquardt { /** * Computes a simple numerical Jacobian . * @ param param ( input ) The set of parameters that the Jacobian is to be computed at . * @ param jacobian ( output ) Where the jacobian will be stored */ protected void computeNumericalJacobian ( DMatrixRMaj param , DMatrixRMaj jacobian ) { } }
double invDelta = 1.0 / DELTA ; function . compute ( param , temp0 ) ; // compute the jacobian by perturbing the parameters slightly // then seeing how it effects the results . for ( int i = 0 ; i < param . getNumElements ( ) ; i ++ ) { param . data [ i ] += DELTA ; function . compute ( param , temp1 ) ; // compute the difference between the two parameters and divide by the delta // temp1 = ( temp1 - temp0 ) / delta CommonOps_DDRM . add ( invDelta , temp1 , - invDelta , temp0 , temp1 ) ; // copy the results into the jacobian matrix // J ( i , : ) = temp1 CommonOps_DDRM . insert ( temp1 , jacobian , 0 , i ) ; param . data [ i ] -= DELTA ; }
public class Strings2 { /** * Returns a markup id that is JQuery - safe and could be used as a selector . * @ param markupId the markup id to escape * @ return the component ' s markup id that is escaped so that it could be used as JQuery selector */ public static CharSequence escapeMarkupId ( final CharSequence markupId ) { } }
Args . notNull ( markupId , "markupId" ) ; // create pattern for : ! " # $ % & ' ( ) * + , . / : ; < = > ? @ [ \ ] ^ ` { | } ~ final StringCharacterIterator iterator = new StringCharacterIterator ( markupId . toString ( ) ) ; final StringBuilder result = new StringBuilder ( ( int ) ( markupId . length ( ) * 1.5 ) ) ; final String escape = "\\\\" ; char c = iterator . current ( ) ; while ( c != CharacterIterator . DONE ) { boolean escaped = false ; for ( char x : ESCAPE_CHARS ) { if ( x == c ) { result . append ( escape ) . append ( c ) ; escaped = true ; break ; } } if ( ! escaped ) { result . append ( c ) ; } c = iterator . next ( ) ; } return result . toString ( ) ;
public class IssueDao { /** * Gets a list issues by their keys . The result does NOT contain { @ code null } values for issues not found , so * the size of result may be less than the number of keys . A single issue is returned * if input keys contain multiple occurrences of a key . * < p > Results may be in a different order as input keys . < / p > */ public List < IssueDto > selectByKeys ( DbSession session , Collection < String > keys ) { } }
return executeLargeInputs ( keys , mapper ( session ) :: selectByKeys ) ;
public class ArrayMath { /** * methods for filtering vectors - - - - - */ public static int countNaN ( double [ ] v ) { } }
int c = 0 ; for ( double d : v ) { if ( Double . isNaN ( d ) ) { c ++ ; } } return c ;
public class CharacterGene { /** * Create a new CharacterGene with a randomly chosen character from the * set of valid characters . * @ param validCharacters the valid characters for this gene . * @ return a new valid , < em > random < / em > gene , * @ throws NullPointerException if the { @ code validCharacters } are * { @ code null } . */ public static CharacterGene of ( final CharSeq validCharacters ) { } }
return new CharacterGene ( validCharacters , RandomRegistry . getRandom ( ) . nextInt ( validCharacters . length ( ) ) ) ;
public class JacksonDBCollection { /** * Saves an object to this collection ( does insert or update based on the object _ id ) . * @ param object the < code > DBObject < / code > to save * @ param concern the write concern * @ return The result * @ throws MongoException If an error occurred */ public WriteResult < T , K > save ( T object , WriteConcern concern ) throws MongoException { } }
DBObject dbObject = convertToDbObject ( object ) ; return new WriteResult < T , K > ( this , dbCollection . save ( dbObject , concern ) , dbObject ) ;
public class AtomicDoubleCastExtensions { /** * Convert the given value to { @ code Byte } . This function is not null - safe * @ param number a number of { @ code AtomicDouble } type . * @ return the equivalent value to { @ code number } of { @ code Byte } type . */ @ Pure @ Inline ( value = "$2.valueOf($1.byteValue())" , imported = Byte . class ) public static Byte toByte ( AtomicDouble number ) { } }
return Byte . valueOf ( number . byteValue ( ) ) ;
public class SMailPostingMessage { @ Override public void makeEmlFile ( String path ) { } }
assertArgumentNotNull ( "path" , path ) ; ByteArrayOutputStream ous = null ; try { ous = new ByteArrayOutputStream ( ) ; message . writeTo ( ous ) ; final String eml = ous . toString ( ) ; new FileTextIO ( ) . encodeAsUTF8 ( ) . write ( path , eml ) ; } catch ( IOException | MessagingException e ) { logger . info ( "Failed to make EML file to the path: " + path + " subject=" + subject , e ) ; } finally { if ( ous != null ) { try { ous . close ( ) ; } catch ( IOException ignored ) { } } }
public class JoinWithSolutionSetFirstDriver { @ Override @ SuppressWarnings ( "unchecked" ) public void initialize ( ) { } }
// grab a handle to the hash table from the iteration broker if ( taskContext instanceof AbstractIterativePactTask ) { AbstractIterativePactTask < ? , ? > iterativeTaskContext = ( AbstractIterativePactTask < ? , ? > ) taskContext ; String identifier = iterativeTaskContext . brokerKey ( ) ; this . hashTable = ( CompactingHashTable < IT1 > ) SolutionSetBroker . instance ( ) . get ( identifier ) ; } else { throw new RuntimeException ( "The task context of this driver is no iterative task context." ) ; } TaskConfig config = taskContext . getTaskConfig ( ) ; ClassLoader classLoader = taskContext . getUserCodeClassLoader ( ) ; TypeSerializer < IT1 > solutionSetSerializer = this . hashTable . getBuildSideSerializer ( ) ; TypeSerializer < IT2 > probeSideSerializer = taskContext . < IT2 > getInputSerializer ( 0 ) . getSerializer ( ) ; TypeComparatorFactory < IT2 > probeSideComparatorFactory = config . getDriverComparator ( 0 , classLoader ) ; TypeComparator < IT1 > solutionSetComparator = this . hashTable . getBuildSideComparator ( ) . duplicate ( ) ; this . probeSideComparator = probeSideComparatorFactory . createComparator ( ) ; solutionSideRecord = solutionSetSerializer . createInstance ( ) ; probeSideRecord = probeSideSerializer . createInstance ( ) ; TypePairComparatorFactory < IT1 , IT2 > factory = taskContext . getTaskConfig ( ) . getPairComparatorFactory ( taskContext . getUserCodeClassLoader ( ) ) ; pairComparator = factory . createComparator21 ( solutionSetComparator , this . probeSideComparator ) ;
public class TransactionAction { @ Nonnull public static TransactionAction abort ( @ Nonnull Abort abort ) { } }
TransactionAction self = new TransactionAction ( ) ; self . type = Discriminator . abort ; self . abort = abort ; return self ;
public class Item { /** * Sets the value of the specified attribute in the current item to the * given value . */ public Item withFloat ( String attrName , float val ) { } }
checkInvalidAttrName ( attrName ) ; return withNumber ( attrName , Float . valueOf ( val ) ) ;
public class LFltToCharFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */ @ Nonnull public static LFltToCharFunction fltToCharFunctionFrom ( Consumer < LFltToCharFunctionBuilder > buildingFunction ) { } }
LFltToCharFunctionBuilder builder = new LFltToCharFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ;
public class DBConn { /** * Close the database connection . This object can be reused after the connection has * been closed by calling { @ link # open ( ) } again . */ public void close ( ) { } }
// Get the connection ' s protocol ( TBinaryProtocol ) , and the protocol ' s transport // ( TSocket ) and close it . if ( m_client != null ) { TProtocol protocol = m_client . getInputProtocol ( ) ; if ( protocol != null ) { TTransport transport = protocol . getTransport ( ) ; if ( transport != null ) { transport . close ( ) ; } } } m_client = null ; m_bFailed = true ; // Prevent reusing this connection until reconnected m_bDBOpen = false ;
public class CommonConfigUtils { /** * Returns the value for the configuration attribute matching the key provided . If the value does not exist or is empty , the * provided default value will be returned . */ public String getConfigAttributeWithDefaultValue ( Map < String , Object > props , String key , String defaultValue ) { } }
String result = getAndTrimConfigAttribute ( props , key ) ; if ( key != null && result == null ) { if ( defaultValue != null ) { result = defaultValue ; } } return result ;
public class BinaryJedis { /** * Sort a Set or a List accordingly to the specified parameters and store the result at dstkey . * @ see # sort ( byte [ ] , SortingParams ) * @ see # sort ( byte [ ] ) * @ see # sort ( byte [ ] , byte [ ] ) * @ param key * @ param sortingParameters * @ param dstkey * @ return The number of elements of the list at dstkey . */ @ Override public Long sort ( final byte [ ] key , final SortingParams sortingParameters , final byte [ ] dstkey ) { } }
checkIsInMultiOrPipeline ( ) ; client . sort ( key , sortingParameters , dstkey ) ; return client . getIntegerReply ( ) ;
public class Iterate { /** * Flattens a collection of collections into one " flat " collection . * @ param iterable A list of lists , e . g . { { 1 , 2 , 3 } , { 4 , 5 } , { 6 } } * @ return A flattened list , e . g . { 1 , 2 , 3 , 4 , 5 , 6 } */ public static < T > Collection < T > flatten ( Iterable < ? extends Iterable < T > > iterable ) { } }
return Iterate . flatCollect ( iterable , Functions . < Iterable < T > > identity ( ) ) ;
public class EntityUtilities { /** * This function takes the url parameters and uses them to populate a Filter object */ public static Filter populateFilter ( final EntityManager entityManager , final Map < String , String > paramMap , final String filterName , final String tagPrefix , final String groupTagPrefix , final String categoryInternalPrefix , final String categoryExternalPrefix , final String localePrefix , final IFieldFilter fieldFilter ) { } }
// attempt to get the filter id from the url Integer filterId = null ; if ( paramMap . containsKey ( filterName ) ) { final String filterQueryParam = paramMap . get ( filterName ) ; try { filterId = Integer . parseInt ( filterQueryParam ) ; } catch ( final Exception ex ) { // filter value was not an integer filterId = null ; log . debug ( "The filter ID URL query parameter was not an integer. Got " + filterQueryParam + ". Probably a malformed URL." , ex ) ; } } Filter filter = null ; /* First attempt to populate the filter from a filterID variable */ if ( filterId != null ) { filter = entityManager . find ( Filter . class , filterId ) ; } /* If that fails , use the other URL params */ if ( filter == null ) { filter = new Filter ( ) ; for ( final String key : paramMap . keySet ( ) ) { final boolean tagVar = tagPrefix != null && key . startsWith ( tagPrefix ) ; final boolean groupTagVar = groupTagPrefix != null && key . startsWith ( groupTagPrefix ) ; final boolean catIntVar = categoryInternalPrefix != null && key . startsWith ( categoryInternalPrefix ) ; final boolean catExtVar = categoryExternalPrefix != null && key . startsWith ( categoryExternalPrefix ) ; final boolean localeVar = localePrefix != null && key . matches ( "^" + localePrefix + "\\d*$" ) ; final String state = paramMap . get ( key ) ; // add the filter category states if ( catIntVar || catExtVar ) { /* * get the category and project id data from the variable name */ final String catProjDetails = catIntVar ? key . replaceFirst ( categoryInternalPrefix , "" ) : key . replaceFirst ( categoryExternalPrefix , "" ) ; // split the category and project id out of the data final String [ ] catProjID = catProjDetails . split ( "-" ) ; /* * some validity checks . make sure we have one or two strings after the split . */ if ( catProjID . length != 1 && catProjID . length != 2 ) continue ; // try to get the category and project ids Integer catID = null ; Integer projID = null ; try { catID = Integer . parseInt ( catProjID [ 0 ] ) ; /* * if the array has just one element , we have only specified the category . in this case the project is * the common project */ if ( catProjID . length == 2 ) projID = Integer . parseInt ( catProjID [ 1 ] ) ; } catch ( final Exception ex ) { log . debug ( "Was expecting an integer. Got " + catProjID [ 0 ] + ". Probably a malformed URL." , ex ) ; continue ; } // at this point we have found a url variable that // contains a catgeory and project id final Category category = entityManager . find ( Category . class , catID ) ; final Project project = projID != null ? entityManager . find ( Project . class , projID ) : null ; Integer dbState ; if ( catIntVar ) { if ( state . equals ( CommonFilterConstants . AND_LOGIC ) ) dbState = CommonFilterConstants . CATEGORY_INTERNAL_AND_STATE ; else dbState = CommonFilterConstants . CATEGORY_INTERNAL_OR_STATE ; } else { if ( state . equals ( CommonFilterConstants . AND_LOGIC ) ) dbState = CommonFilterConstants . CATEGORY_EXTERNAL_AND_STATE ; else dbState = CommonFilterConstants . CATEGORY_EXTERNAL_OR_STATE ; } final FilterCategory filterCategory = new FilterCategory ( ) ; filterCategory . setFilter ( filter ) ; filterCategory . setProject ( project ) ; filterCategory . setCategory ( category ) ; filterCategory . setCategoryState ( dbState ) ; filter . getFilterCategories ( ) . add ( filterCategory ) ; } // add the filter tag states else if ( tagVar ) { try { final Integer tagId = Integer . parseInt ( key . replaceFirst ( tagPrefix , "" ) ) ; final Integer intState = Integer . parseInt ( state ) ; // get the Tag object that the tag id represents final Tag tag = entityManager . getReference ( Tag . class , tagId ) ; if ( tag != null ) { final FilterTag filterTag = new FilterTag ( ) ; filterTag . setTag ( tag ) ; filterTag . setTagState ( intState ) ; filterTag . setFilter ( filter ) ; filter . getFilterTags ( ) . add ( filterTag ) ; } } catch ( final Exception ex ) { log . debug ( "Probably an invalid tag query parameter. Parameter: " + key + " Value: " + state , ex ) ; } } else if ( groupTagVar ) { final Integer tagId = Integer . parseInt ( key . replaceFirst ( groupTagPrefix , "" ) ) ; // final Integer intState = Integer . parseInt ( state ) ; // get the Tag object that the tag id represents final Tag tag = entityManager . getReference ( Tag . class , tagId ) ; if ( tag != null ) { final FilterTag filterTag = new FilterTag ( ) ; filterTag . setTag ( tag ) ; filterTag . setTagState ( CommonFilterConstants . GROUP_TAG_STATE ) ; filterTag . setFilter ( filter ) ; filter . getFilterTags ( ) . add ( filterTag ) ; } } else if ( localeVar ) { try { final String localeName = state . replaceAll ( "\\d" , "" ) ; final Integer intState = Integer . parseInt ( state . replaceAll ( "[^\\d]" , "" ) ) ; final FilterLocale filterLocale = new FilterLocale ( ) ; filterLocale . setLocaleName ( localeName ) ; filterLocale . setLocaleState ( intState ) ; filterLocale . setFilter ( filter ) ; filter . getFilterLocales ( ) . add ( filterLocale ) ; } catch ( final Exception ex ) { log . debug ( "Probably an invalid locale query parameter. Parameter: " + key + " Value: " + state , ex ) ; } } // add the filter field states else { if ( fieldFilter . hasFieldName ( key ) ) { final FilterField filterField = new FilterField ( ) ; filterField . setFilter ( filter ) ; filterField . setField ( key ) ; filterField . setValue ( state ) ; filterField . setDescription ( fieldFilter . getFieldDesc ( key ) ) ; filter . getFilterFields ( ) . add ( filterField ) ; } } } } return filter ;
public class UCharacterNameIterator { /** * < p > Gets the next result for this iteration and returns * true if we are not at the end of the iteration , false otherwise . < / p > * < p > If the return boolean is a false , the contents of elements will not * be updated . < / p > * @ param element for storing the result codepoint and name * @ return true if we are not at the end of the iteration , false otherwise . * @ see android . icu . util . ValueIterator . Element */ @ Override public boolean next ( ValueIterator . Element element ) { } }
if ( m_current_ >= m_limit_ ) { return false ; } if ( m_choice_ == UCharacterNameChoice . UNICODE_CHAR_NAME || m_choice_ == UCharacterNameChoice . EXTENDED_CHAR_NAME ) { int length = m_name_ . getAlgorithmLength ( ) ; if ( m_algorithmIndex_ < length ) { while ( m_algorithmIndex_ < length ) { // find the algorithm range that could contain m _ current _ if ( m_algorithmIndex_ < 0 || m_name_ . getAlgorithmEnd ( m_algorithmIndex_ ) < m_current_ ) { m_algorithmIndex_ ++ ; } else { break ; } } if ( m_algorithmIndex_ < length ) { // interleave the data - driven ones with the algorithmic ones // iterate over all algorithmic ranges ; assume that they are // in ascending order int start = m_name_ . getAlgorithmStart ( m_algorithmIndex_ ) ; if ( m_current_ < start ) { // this should get rid of those codepoints that are not // in the algorithmic range int end = start ; if ( m_limit_ <= start ) { end = m_limit_ ; } if ( ! iterateGroup ( element , end ) ) { m_current_ ++ ; return true ; } } /* / / " if ( m _ current _ > = m _ limit _ ) " would not return true / / because it can never be reached due to : / / 1 ) It has already been checked earlier / / 2 ) When m _ current _ is updated earlier , it returns true / / 3 ) No updates on m _ limit _ */ if ( m_current_ >= m_limit_ ) { // after iterateGroup fails , current codepoint may be // greater than limit return false ; } element . integer = m_current_ ; element . value = m_name_ . getAlgorithmName ( m_algorithmIndex_ , m_current_ ) ; // reset the group index if we are in the algorithmic names m_groupIndex_ = - 1 ; m_current_ ++ ; return true ; } } } // enumerate the character names after the last algorithmic range if ( ! iterateGroup ( element , m_limit_ ) ) { m_current_ ++ ; return true ; } else if ( m_choice_ == UCharacterNameChoice . EXTENDED_CHAR_NAME ) { if ( ! iterateExtended ( element , m_limit_ ) ) { m_current_ ++ ; return true ; } } return false ;
public class TypePoolGroupNameMap { /** * Is the value set with the appropriate keys ? * @ param type Resource type * @ param poolGroup Name of pool group * @ return True if this map contains a mapping for the specified key , false * otherwise */ public boolean containsKey ( ResourceType type , String poolGroupName ) { } }
Map < String , V > poolGroupNameMap = typePoolGroupNameMap . get ( type ) ; if ( poolGroupNameMap == null ) { return false ; } return poolGroupNameMap . containsKey ( poolGroupName ) ;
public class CmsSite { /** * Returns the server prefix for the given resource in this site , used to distinguish between * secure ( https ) and non - secure ( http ) sites . < p > * This is required since a resource may have an individual " secure " setting using the property * { @ link org . opencms . file . CmsPropertyDefinition # PROPERTY _ SECURE } , which means this resource * must be delivered only using a secure protocol . < p > * The result will look like < code > http : / / site . enterprise . com : 8080 / < / code > or < code > https : / / site . enterprise . com / < / code > . < p > * @ param cms the current users OpenCms context * @ param resourceName the resource name * @ return the server prefix for the given resource in this site * @ see # getSecureUrl ( ) * @ see # getUrl ( ) */ public String getServerPrefix ( CmsObject cms , String resourceName ) { } }
if ( resourceName . startsWith ( cms . getRequestContext ( ) . getSiteRoot ( ) ) ) { // make sure this can also be used with a resource root path resourceName = resourceName . substring ( cms . getRequestContext ( ) . getSiteRoot ( ) . length ( ) ) ; } boolean secure = OpenCms . getStaticExportManager ( ) . isSecureLink ( cms , resourceName , cms . getRequestContext ( ) . isSecureRequest ( ) ) ; return ( secure ? getSecureUrl ( ) : getUrl ( ) ) ;
public class WavImpl { /** * Open the audio stream and prepare it . * @ param media The audio media . * @ return The audio source data . * @ throws IOException If error when reading the audio file . * @ throws LionEngineException If error when getting the stream . */ private static AudioInputStream openStream ( Media media ) throws IOException { } }
try { return AudioSystem . getAudioInputStream ( media . getInputStream ( ) ) ; } catch ( final UnsupportedAudioFileException exception ) { throw new IOException ( ERROR_PLAY_SOUND + media . getPath ( ) , exception ) ; }
public class EntityUtils { /** * extractIdSeq . * @ param entities a { @ link java . util . Collection } object . * @ param < T > a T object . * @ return a { @ link java . lang . String } object . */ public static < T extends Entity < ? > > String extractIdSeq ( Collection < T > entities ) { } }
if ( null == entities || entities . isEmpty ( ) ) { return "" ; } StringBuilder idBuf = new StringBuilder ( "," ) ; for ( Iterator < T > iter = entities . iterator ( ) ; iter . hasNext ( ) ; ) { T element = iter . next ( ) ; try { idBuf . append ( String . valueOf ( PropertyUtils . getProperty ( element , "id" ) ) ) ; idBuf . append ( ',' ) ; } catch ( Exception e ) { throw new RuntimeException ( e . getMessage ( ) ) ; } } return idBuf . toString ( ) ;
public class Instances { /** * Sets the indices of relevant features . * This method also sets the irrelevant ones since * it is the set complement . * @ param indicesRelevants */ public void setIndicesRelevants ( int [ ] indicesRelevants ) { } }
this . indicesRelevants = indicesRelevants ; // -1 to skip the class attribute int numIrrelevantFeatures = this . numAttributes ( ) - this . indicesRelevants . length - 1 ; this . indicesIrrelevants = new int [ numIrrelevantFeatures ] ; // Infers and sets the set of irrelevant features int index = 0 ; int indexRel = 0 ; for ( int i = 0 ; i < numAttributes ( ) ; i ++ ) { if ( i != classIndex ( ) ) { while ( indexRel < indicesRelevants . length - 1 && i > indicesRelevants [ indexRel ] ) indexRel ++ ; if ( indicesRelevants [ indexRel ] != i ) { indicesIrrelevants [ index ] = i ; index ++ ; } } }
public class DateContext { /** * Create a date from the given data . * @ param month The month ( 1-12) * @ param day The day ( 1-31) * @ param year The year ( 4 - digit ) * @ param hour The hour ( 0-23) * @ param minute The minutes ( 0-59) * @ param second The seconds ( 0-59) * @ param millisecond The milliseconds ( 0-999) * @ return The { @ link Date } instance for the given date * @ throws NumberFormatException if the values are invalid numbers */ public Date createDate ( int month , int day , int year , int hour , int minute , int second , int millisecond ) { } }
GregorianCalendar gc = new GregorianCalendar ( ) ; gc . clear ( ) ; gc . set ( Calendar . MONTH , month - 1 ) ; gc . set ( Calendar . DAY_OF_MONTH , day ) ; gc . set ( Calendar . YEAR , year ) ; gc . set ( Calendar . HOUR_OF_DAY , hour ) ; gc . set ( Calendar . MINUTE , minute ) ; gc . set ( Calendar . SECOND , second ) ; gc . set ( Calendar . MILLISECOND , millisecond ) ; return gc . getTime ( ) ;
public class ConnectionFactoryResourceTypeImpl { /** * If not already created , a new < code > property < / code > element will be created and returned . * Otherwise , the first existing < code > property < / code > element will be returned . * @ return the instance defined for the element < code > property < / code > */ public PropertyType < ConnectionFactoryResourceType < T > > getOrCreateProperty ( ) { } }
List < Node > nodeList = childNode . get ( "property" ) ; if ( nodeList != null && nodeList . size ( ) > 0 ) { return new PropertyTypeImpl < ConnectionFactoryResourceType < T > > ( this , "property" , childNode , nodeList . get ( 0 ) ) ; } return createProperty ( ) ;
public class GraphAlgorithmWrappingBase { /** * Merge the other configuration into this algorithm ' s after the call to * { @ link # canMergeConfigurationWith } has checked that the configurations * can be merged . * @ param other the algorithm from which to merge configuration * @ see # canMergeConfigurationWith ( GraphAlgorithmWrappingBase ) */ protected void mergeConfiguration ( GraphAlgorithmWrappingBase other ) { } }
Preconditions . checkNotNull ( other ) ; parallelism = ( parallelism == PARALLELISM_DEFAULT ) ? other . parallelism : ( ( other . parallelism == PARALLELISM_DEFAULT ) ? parallelism : Math . min ( parallelism , other . parallelism ) ) ;
public class StringParser { /** * Parse the given { @ link String } as { @ link BigDecimal } . * @ param sStr * The String to parse . May be < code > null < / code > . * @ param aDefault * The default value to be returned if the passed string could not be * converted to a valid value . May be < code > null < / code > . * @ return < code > aDefault < / code > if the string does not represent a valid * value . */ @ Nullable public static BigDecimal parseBigDecimal ( @ Nullable final String sStr , @ Nullable final BigDecimal aDefault ) { } }
if ( sStr != null && sStr . length ( ) > 0 ) try { return new BigDecimal ( _getUnifiedDecimal ( sStr ) ) ; } catch ( final NumberFormatException ex ) { // Fall through } return aDefault ;
public class BitsyElement { /** * This method prepares the vertex / edge for an update */ public void markForUpdate ( ) { } }
if ( ! updated ) { updated = true ; // Make a copy of the underlying property map , if non - null if ( properties != null ) { properties = properties . copyOf ( ) ; } tx . markForPropertyUpdate ( this ) ; }
public class AbstractComputeModule { /** * Providers for memoized suppliers * These provides wrap the base injection * with a memoized implementation . * todo : make expiration configurable */ @ Provides @ Memoized @ Singleton final Supplier < Set < HardwareFlavor > > provideMemoizedHardwareFlavorSupplier ( Supplier < Set < HardwareFlavor > > originalSupplier ) { } }
return Suppliers . memoizeWithExpiration ( originalSupplier , 1L , TimeUnit . MINUTES ) ;
public class NamedParameters { /** * TODO : Make it more efficient . * @ param sqlQuery * @ return */ public String deParameterize ( String sqlQuery ) { } }
String result = sqlQuery ; for ( String key : namedParams . keySet ( ) ) { Object value = namedParams . get ( key ) ; if ( value instanceof String ) { // This could be combined with last else ( but mostly we encounter String ) result = result . replaceAll ( String . format ( ":%s" , key ) , String . format ( "%s" , value ) ) ; } else if ( value instanceof BigDecimal ) { result = result . replaceAll ( String . format ( ":%s" , key ) , String . format ( "%f" , ( ( BigDecimal ) value ) . doubleValue ( ) ) ) ; } else if ( value instanceof Date ) { result = result . replaceAll ( String . format ( ":%s" , key ) , currentJavaDateFormat . format ( value ) ) . replace ( "Z" , "" ) ; } else if ( value instanceof DateTime ) { result = result . replaceAll ( String . format ( ":%s" , key ) , currentJodaDateFormat . print ( ( ( DateTime ) value ) . getMillis ( ) ) ) . replace ( "Z" , "" ) ; } else { result = result . replaceAll ( String . format ( ":%s" , key ) , value . toString ( ) ) ; } } return result ;
public class J4pBulkRemoteException { /** * Get the a list of responses for successful requests . * @ param < T > response type * @ return list of successful responses . */ public < T extends J4pResponse > List < T > getResponses ( ) { } }
List < T > ret = new ArrayList < T > ( ) ; for ( Object entry : results ) { if ( entry instanceof J4pResponse ) { ret . add ( ( T ) entry ) ; } } return ret ;
public class DataSetBuilder { /** * Set filler for column . * @ param column Column name . * @ param filler Column filler . * @ return The builder instance ( for chained calls ) . */ public DataSetBuilder set ( String column , ColumnFiller < ? > filler ) { } }
ensureArgNotNull ( column ) ; ensureArgNotNull ( filler ) ; if ( ! data . getSource ( ) . getDB ( ) . isEnabled ( DB . Option . CASE_SENSITIVE_COLUMN_NAMES ) ) { column = column . toUpperCase ( ) ; } Integer idx = columnIdx . get ( column ) ; if ( idx == null ) { throw new InvalidOperationException ( "Invalid column name: '" + column + "'." ) ; } if ( fillers [ idx ] == null ) { fillerCount ++ ; } fillers [ idx ] = filler ; return this ;
public class JideApplicationWindow { /** * Overridden close method to avoid memory leaks by Mikael Valot */ public boolean close ( ) { } }
if ( super . close ( ) ) { dockableHolder . dispose ( ) ; dockableHolder . removeWindowFocusListener ( this ) ; WindowListener [ ] listeners = dockableHolder . getWindowListeners ( ) ; for ( WindowListener listener : listeners ) { dockableHolder . removeWindowListener ( listener ) ; } Lm . setParent ( null ) ; dockableHolder . removeAll ( ) ; dockableHolder . getRootPane ( ) . removeAll ( ) ; dockableHolder = null ; return true ; } else { return false ; }
public class Utils { /** * Removes trailing < code > ` < / code > or < code > ~ < / code > and trims spaces . * @ param fenceLine * Fenced code block starting line * @ return Rest of the line after trimming and backtick or tilde removal * @ since 0.7 */ public final static String getMetaFromFence ( final String fenceLine ) { } }
for ( int i = 0 ; i < fenceLine . length ( ) ; i ++ ) { final char c = fenceLine . charAt ( i ) ; if ( ! Character . isWhitespace ( c ) && c != '`' && c != '~' ) { return fenceLine . substring ( i ) . trim ( ) ; } } return "" ;
public class ServerService { /** * Modify list of servers * @ param serverList server list * @ param modifyServerConfig server config * @ return OperationFuture wrapper for list of Servers */ public OperationFuture < List < Server > > modify ( List < Server > serverList , ModifyServerConfig modifyServerConfig ) { } }
List < JobFuture > futures = serverList . stream ( ) . map ( server -> modify ( server , modifyServerConfig ) . jobFuture ( ) ) . collect ( toList ( ) ) ; return new OperationFuture < > ( serverList , new ParallelJobsFuture ( futures ) ) ;
public class AbstractRepositoryClient { /** * This method will return all of the assets matching specific filters in Massive . * It will just return a summary of each asset and not include any { @ link Attachment } s . * @ param types * The types to look for or < code > null < / code > will return all types * @ param productIds The product IDs to look for . Should not be < code > null < / code > although supplying this will return assets for any product ID * @ param visibility The visibility to look for or < code > null < / code > will return all visibility values ( or none ) * @ param productVersions The values of the minimum version in the appliesToFilterInfo to look for * @ return A collection of the assets of that type * @ throws IOException * @ throws RequestFailureException */ @ Override public Collection < Asset > getAssets ( final Collection < ResourceType > types , final Collection < String > productIds , final Visibility visibility , final Collection < String > productVersions ) throws IOException , RequestFailureException { } }
return getFilteredAssets ( types , productIds , visibility , productVersions , false ) ;
public class ARGBColorUtil { /** * Composes two colors with the ARGB format : < br > * The format of the color integer is as follows : 0xAARRGGBB * Where : * < ol > * < li > AA is the alpha component ( 0-255 ) < / li > * < li > RR is the red component ( 0-255 ) < / li > * < li > GG is the green component ( 0-255 ) < / li > * < li > BB is the blue component ( 0-255 ) < / li > * < / ol > * NOTE : The source of this method is quite obscure , but it ' s done this way because it ' s performance - critical ( this method could be run thousands of times per second ! ) < br > * The code ( unobscured ) does this : < br > < br > * < code > * double alpha1 = getAlpha ( foreground ) / 256.0 ; < br > * double alpha2 = getAlpha ( background ) / 256.0 ; < br > * < br > * if ( alpha1 = = 1.0 | | alpha2 = = 0 ) return foreground ; < br > * else if ( alpha1 = = 0 ) return background ; < br > * < br > * int red1 = getRed ( foreground ) ; < br > * int red2 = getRed ( background ) ; < br > * int green1 = getGreen ( foreground ) ; < br > * int green2 = getGreen ( background ) ; < br > * int blue1 = getBlue ( foreground ) ; < br > * int blue2 = getBlue ( background ) ; < br > * < br > * double doubleAlpha = ( alpha1 + alpha2 * ( 1 - alpha1 ) ) ; < br > * int finalAlpha = ( int ) ( doubleAlpha * 256 ) ; < br > * < br > * double cAlpha2 = alpha2 * ( 1 - alpha1 ) * 0.5 ; < br > * < br > * int finalRed = ( int ) ( red1 * alpha1 + red2 * cAlpha2 ) ; < br > * int finalGreen = ( int ) ( green1 * alpha1 + green2 * cAlpha2 ) ; < br > * int finalBlue = ( int ) ( blue1 * alpha1 + blue2 * cAlpha2 ) ; < br > * return getColor ( finalAlpha , finalRed , finalGreen , finalBlue ) ; < br > < br > * < / code > * @ param foreground The foreground color ( above ) * @ param background The background color ( below ) * @ return A composition of both colors , in ARGB format */ public static int composite ( final int foreground , final int background ) { } }
double fA = getAlpha ( foreground ) / 255.0 ; double bA = getAlpha ( background ) / 255.0 ; if ( bA <= 0.0001 ) return foreground ; else if ( fA <= 0.0001 ) return background ; final double alphaA = bA * ( 1 - fA ) ; return getColor ( ( int ) ( 255 * ( fA + alphaA ) ) , // ALPHA ( int ) ( fA * getRed ( foreground ) + alphaA * getRed ( background ) ) , // RED ( int ) ( fA * getGreen ( foreground ) + alphaA * getGreen ( background ) ) , // GREEN ( int ) ( fA * getBlue ( foreground ) + alphaA * getBlue ( background ) ) ) ; // BLUE
public class SeaGlassComboPopup { /** * Calculates the upper left location of the Popup . * @ return the Point representing the upper - left coordinate of the Popup . */ private Point getPopupLocation ( ) { } }
Dimension popupSize = comboBox . getSize ( ) ; Insets insets = getInsets ( ) ; // reduce the width of the scrollpane by the insets so that the popup // is the same width as the combo box . popupSize . setSize ( popupSize . width - ( insets . right + insets . left ) , getPopupHeightForRowCount ( getMaximumRowCount ( ) ) ) ; Rectangle popupBounds = computePopupBounds ( 0 , comboBox . getBounds ( ) . height , popupSize . width , popupSize . height ) ; Dimension scrollSize = popupBounds . getSize ( ) ; Point popupLocation = popupBounds . getLocation ( ) ; scroller . setMaximumSize ( scrollSize ) ; scroller . setPreferredSize ( scrollSize ) ; scroller . setMinimumSize ( scrollSize ) ; list . revalidate ( ) ; return popupLocation ;
public class TagletWriterImpl { /** * { @ inheritDoc } */ public Content throwsTagOutput ( Element element , DocTree throwsTag ) { } }
ContentBuilder body = new ContentBuilder ( ) ; CommentHelper ch = utils . getCommentHelper ( element ) ; Element exception = ch . getException ( configuration , throwsTag ) ; Content excName ; if ( exception == null ) { excName = new RawHtml ( ch . getExceptionName ( throwsTag ) . toString ( ) ) ; } else if ( exception . asType ( ) == null ) { excName = new RawHtml ( utils . getFullyQualifiedName ( exception ) ) ; } else { LinkInfoImpl link = new LinkInfoImpl ( configuration , LinkInfoImpl . Kind . MEMBER , exception . asType ( ) ) ; link . excludeTypeBounds = true ; excName = htmlWriter . getLink ( link ) ; } body . addContent ( HtmlTree . CODE ( excName ) ) ; List < ? extends DocTree > description = ch . getDescription ( configuration , throwsTag ) ; Content desc = htmlWriter . commentTagsToContent ( throwsTag , element , description , false ) ; if ( desc != null && ! desc . isEmpty ( ) ) { body . addContent ( " - " ) ; body . addContent ( desc ) ; } HtmlTree result = HtmlTree . DD ( body ) ; return result ;
public class TeamsBase { /** * Returns the full record for a single team . * @ param team Globally unique identifier for the team . * @ return Request object */ public ItemRequest < Team > findById ( String team ) { } }
String path = String . format ( "/teams/%s" , team ) ; return new ItemRequest < Team > ( this , Team . class , path , "GET" ) ;
public class MSPDIReader { /** * This method extracts project properties from an MSPDI file . * @ param project Root node of the MSPDI file */ private void readProjectProperties ( Project project ) { } }
ProjectProperties properties = m_projectFile . getProjectProperties ( ) ; properties . setActualsInSync ( BooleanHelper . getBoolean ( project . isActualsInSync ( ) ) ) ; properties . setAdminProject ( BooleanHelper . getBoolean ( project . isAdminProject ( ) ) ) ; properties . setApplicationVersion ( NumberHelper . getInteger ( project . getSaveVersion ( ) ) ) ; properties . setAuthor ( project . getAuthor ( ) ) ; properties . setAutoAddNewResourcesAndTasks ( BooleanHelper . getBoolean ( project . isAutoAddNewResourcesAndTasks ( ) ) ) ; properties . setAutolink ( BooleanHelper . getBoolean ( project . isAutolink ( ) ) ) ; properties . setBaselineForEarnedValue ( NumberHelper . getInteger ( project . getBaselineForEarnedValue ( ) ) ) ; properties . setDefaultCalendarName ( project . getCalendarUID ( ) == null ? null : project . getCalendarUID ( ) . toString ( ) ) ; properties . setCategory ( project . getCategory ( ) ) ; properties . setCompany ( project . getCompany ( ) ) ; properties . setCreationDate ( project . getCreationDate ( ) ) ; properties . setCriticalSlackLimit ( NumberHelper . getInteger ( project . getCriticalSlackLimit ( ) ) ) ; properties . setCurrencyDigits ( NumberHelper . getInteger ( project . getCurrencyDigits ( ) ) ) ; properties . setCurrencyCode ( project . getCurrencyCode ( ) ) ; properties . setCurrencySymbol ( project . getCurrencySymbol ( ) ) ; properties . setCurrentDate ( project . getCurrentDate ( ) ) ; properties . setDaysPerMonth ( NumberHelper . getInteger ( project . getDaysPerMonth ( ) ) ) ; properties . setDefaultDurationUnits ( DatatypeConverter . parseDurationTimeUnits ( project . getDurationFormat ( ) ) ) ; properties . setDefaultEndTime ( project . getDefaultFinishTime ( ) ) ; properties . setDefaultFixedCostAccrual ( project . getDefaultFixedCostAccrual ( ) ) ; properties . setDefaultOvertimeRate ( DatatypeConverter . parseRate ( project . getDefaultOvertimeRate ( ) ) ) ; properties . setDefaultStandardRate ( DatatypeConverter . parseRate ( project . getDefaultStandardRate ( ) ) ) ; properties . setDefaultStartTime ( project . getDefaultStartTime ( ) ) ; properties . setDefaultTaskEarnedValueMethod ( DatatypeConverter . parseEarnedValueMethod ( project . getDefaultTaskEVMethod ( ) ) ) ; properties . setDefaultTaskType ( project . getDefaultTaskType ( ) ) ; properties . setDefaultWorkUnits ( DatatypeConverter . parseWorkUnits ( project . getWorkFormat ( ) ) ) ; properties . setEarnedValueMethod ( DatatypeConverter . parseEarnedValueMethod ( project . getEarnedValueMethod ( ) ) ) ; properties . setEditableActualCosts ( BooleanHelper . getBoolean ( project . isEditableActualCosts ( ) ) ) ; properties . setExtendedCreationDate ( project . getExtendedCreationDate ( ) ) ; properties . setFinishDate ( project . getFinishDate ( ) ) ; properties . setFiscalYearStart ( BooleanHelper . getBoolean ( project . isFiscalYearStart ( ) ) ) ; properties . setFiscalYearStartMonth ( NumberHelper . getInteger ( project . getFYStartDate ( ) ) ) ; properties . setHonorConstraints ( BooleanHelper . getBoolean ( project . isHonorConstraints ( ) ) ) ; properties . setInsertedProjectsLikeSummary ( BooleanHelper . getBoolean ( project . isInsertedProjectsLikeSummary ( ) ) ) ; properties . setLastSaved ( project . getLastSaved ( ) ) ; properties . setManager ( project . getManager ( ) ) ; properties . setMicrosoftProjectServerURL ( BooleanHelper . getBoolean ( project . isMicrosoftProjectServerURL ( ) ) ) ; properties . setMinutesPerDay ( NumberHelper . getInteger ( project . getMinutesPerDay ( ) ) ) ; properties . setMinutesPerWeek ( NumberHelper . getInteger ( project . getMinutesPerWeek ( ) ) ) ; properties . setMoveCompletedEndsBack ( BooleanHelper . getBoolean ( project . isMoveCompletedEndsBack ( ) ) ) ; properties . setMoveCompletedEndsForward ( BooleanHelper . getBoolean ( project . isMoveCompletedEndsForward ( ) ) ) ; properties . setMoveRemainingStartsBack ( BooleanHelper . getBoolean ( project . isMoveRemainingStartsBack ( ) ) ) ; properties . setMoveRemainingStartsForward ( BooleanHelper . getBoolean ( project . isMoveRemainingStartsForward ( ) ) ) ; properties . setMultipleCriticalPaths ( BooleanHelper . getBoolean ( project . isMultipleCriticalPaths ( ) ) ) ; properties . setName ( project . getName ( ) ) ; properties . setNewTasksEffortDriven ( BooleanHelper . getBoolean ( project . isNewTasksEffortDriven ( ) ) ) ; properties . setNewTasksEstimated ( BooleanHelper . getBoolean ( project . isNewTasksEstimated ( ) ) ) ; properties . setNewTaskStartIsProjectStart ( NumberHelper . getInt ( project . getNewTaskStartDate ( ) ) == 0 ) ; properties . setProjectExternallyEdited ( BooleanHelper . getBoolean ( project . isProjectExternallyEdited ( ) ) ) ; properties . setProjectTitle ( project . getTitle ( ) ) ; properties . setRemoveFileProperties ( BooleanHelper . getBoolean ( project . isRemoveFileProperties ( ) ) ) ; properties . setRevision ( NumberHelper . getInteger ( project . getRevision ( ) ) ) ; properties . setScheduleFrom ( BooleanHelper . getBoolean ( project . isScheduleFromStart ( ) ) ? ScheduleFrom . START : ScheduleFrom . FINISH ) ; properties . setSubject ( project . getSubject ( ) ) ; properties . setSplitInProgressTasks ( BooleanHelper . getBoolean ( project . isSplitsInProgressTasks ( ) ) ) ; properties . setSpreadActualCost ( BooleanHelper . getBoolean ( project . isSpreadActualCost ( ) ) ) ; properties . setSpreadPercentComplete ( BooleanHelper . getBoolean ( project . isSpreadPercentComplete ( ) ) ) ; properties . setStartDate ( project . getStartDate ( ) ) ; properties . setStatusDate ( project . getStatusDate ( ) ) ; properties . setSymbolPosition ( project . getCurrencySymbolPosition ( ) ) ; properties . setUniqueID ( project . getUID ( ) ) ; properties . setUpdatingTaskStatusUpdatesResourceStatus ( BooleanHelper . getBoolean ( project . isTaskUpdatesResource ( ) ) ) ; properties . setWeekStartDay ( DatatypeConverter . parseDay ( project . getWeekStartDay ( ) ) ) ; updateScheduleSource ( properties ) ;
public class DiscoveryModule { /** * Requests that the annotated DruidNode instance be injected and published as part of the lifecycle . * That is , this module will announce the DruidNode instance returned by * injector . getInstance ( Key . get ( DruidNode . class , annotation ) ) automatically . * Announcement will happen in the ANNOUNCEMENTS stage of the Lifecycle * @ param annotation The annotation instance to use in finding the DruidNode instance , usually a Named annotation */ public static void register ( Binder binder , Annotation annotation ) { } }
registerKey ( binder , Key . get ( new TypeLiteral < DruidNode > ( ) { } , annotation ) ) ;
public class DateTimeParserBucket { /** * Computes the parsed datetime by setting the saved fields . * This method is idempotent , but it is not thread - safe . * @ param resetFields false by default , but when true , unsaved field values are cleared * @ param text optional text being parsed , to be included in any error message * @ return milliseconds since 1970-01-01T00:00:00Z * @ throws IllegalArgumentException if any field is out of range * @ since 2.4 */ public long computeMillis ( boolean resetFields , CharSequence text ) { } }
SavedField [ ] savedFields = iSavedFields ; int count = iSavedFieldsCount ; if ( iSavedFieldsShared ) { // clone so that sort does not affect saved state iSavedFields = savedFields = ( SavedField [ ] ) iSavedFields . clone ( ) ; iSavedFieldsShared = false ; } sort ( savedFields , count ) ; if ( count > 0 ) { // alter base year for parsing if first field is month or day DurationField months = DurationFieldType . months ( ) . getField ( iChrono ) ; DurationField days = DurationFieldType . days ( ) . getField ( iChrono ) ; DurationField first = savedFields [ 0 ] . iField . getDurationField ( ) ; if ( compareReverse ( first , months ) >= 0 && compareReverse ( first , days ) <= 0 ) { saveField ( DateTimeFieldType . year ( ) , iDefaultYear ) ; return computeMillis ( resetFields , text ) ; } } long millis = iMillis ; try { for ( int i = 0 ; i < count ; i ++ ) { millis = savedFields [ i ] . set ( millis , resetFields ) ; } if ( resetFields ) { for ( int i = 0 ; i < count ; i ++ ) { if ( ! savedFields [ i ] . iField . isLenient ( ) ) { millis = savedFields [ i ] . set ( millis , i == ( count - 1 ) ) ; } } } } catch ( IllegalFieldValueException e ) { if ( text != null ) { e . prependMessage ( "Cannot parse \"" + text + '"' ) ; } throw e ; } if ( iOffset != null ) { millis -= iOffset ; } else if ( iZone != null ) { int offset = iZone . getOffsetFromLocal ( millis ) ; millis -= offset ; if ( offset != iZone . getOffset ( millis ) ) { String message = "Illegal instant due to time zone offset transition (" + iZone + ')' ; if ( text != null ) { message = "Cannot parse \"" + text + "\": " + message ; } throw new IllegalInstantException ( message ) ; } } return millis ;
public class WebhooksInner { /** * Update the webhook identified by webhook name . * @ param resourceGroupName Name of an Azure Resource group . * @ param automationAccountName The name of the automation account . * @ param webhookName The webhook name . * @ param parameters The update parameters for webhook . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < WebhookInner > updateAsync ( String resourceGroupName , String automationAccountName , String webhookName , WebhookUpdateParameters parameters , final ServiceCallback < WebhookInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( updateWithServiceResponseAsync ( resourceGroupName , automationAccountName , webhookName , parameters ) , serviceCallback ) ;
public class Async { /** * Convert a synchronous function call into an asynchronous function call through an Observable . * < img width = " 640 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / toAsync . s . png " alt = " " > * @ param < R > the result type * @ param func the function to convert * @ param scheduler the Scheduler used to call the { @ code func } * @ return a function that returns an Observable that executes the { @ code func } and emits its returned value * @ see < a href = " https : / / github . com / ReactiveX / RxJava / wiki / Async - Operators # wiki - toasync - or - asyncaction - or - asyncfunc " > RxJava Wiki : toAsync ( ) < / a > */ public static < R > FuncN < Observable < R > > toAsync ( final FuncN < ? extends R > func , final Scheduler scheduler ) { } }
return new FuncN < Observable < R > > ( ) { @ Override public Observable < R > call ( final Object ... args ) { final AsyncSubject < R > subject = AsyncSubject . create ( ) ; final Worker inner = scheduler . createWorker ( ) ; inner . schedule ( new Action0 ( ) { @ Override public void call ( ) { R result ; try { result = func . call ( args ) ; } catch ( Throwable t ) { subject . onError ( t ) ; return ; } finally { inner . unsubscribe ( ) ; } subject . onNext ( result ) ; subject . onCompleted ( ) ; } } ) ; return subject ; } } ;
public class FactoryFinderInstance { /** * < p > Perform the logic to get the implementation class for the * second step of { @ link FactoryFinder # getImplementationInstance ( ClassLoader , String , java . util . List ) } . < / p > */ private List < String > getImplNameFromServices ( ClassLoader classLoader , String factoryName ) { } }
// Check for a services definition List < String > result = null ; String resourceName = "META-INF/services/" + factoryName ; InputStream stream ; BufferedReader reader = null ; try { Enumeration < URL > e = classLoader . getResources ( resourceName ) ; while ( e . hasMoreElements ( ) ) { URL url = e . nextElement ( ) ; URLConnection conn = url . openConnection ( ) ; conn . setUseCaches ( false ) ; stream = conn . getInputStream ( ) ; if ( stream != null ) { // Deal with systems whose native encoding is possibly // different from the way that the services entry was created try { reader = new BufferedReader ( new InputStreamReader ( stream , "UTF-8" ) ) ; if ( result == null ) { result = new ArrayList < String > ( 3 ) ; } result . add ( reader . readLine ( ) ) ; } catch ( UnsupportedEncodingException uee ) { // The DM _ DEFAULT _ ENCODING warning is acceptable here // because we explicitly * want * to use the Java runtime ' s // default encoding . reader = new BufferedReader ( new InputStreamReader ( stream ) ) ; } finally { if ( reader != null ) { reader . close ( ) ; reader = null ; } if ( stream != null ) { stream . close ( ) ; // noinspection UnusedAssignment stream = null ; } } } } } catch ( IOException e ) { if ( LOGGER . isLoggable ( Level . SEVERE ) ) { LOGGER . log ( Level . SEVERE , e . toString ( ) , e ) ; } } catch ( SecurityException e ) { if ( LOGGER . isLoggable ( Level . SEVERE ) ) { LOGGER . log ( Level . SEVERE , e . toString ( ) , e ) ; } } return result ;
public class ElementMatchers { /** * Matches a { @ link ModifierReviewable . OfAbstraction } that is { @ code abstract } . * @ param < T > The type of the matched object . * @ return A matcher for a { @ code abstract } modifier reviewable . */ public static < T extends ModifierReviewable . OfAbstraction > ElementMatcher . Junction < T > isAbstract ( ) { } }
return new ModifierMatcher < T > ( ModifierMatcher . Mode . ABSTRACT ) ;
public class Depiction { /** * Utility for resolving paths on unix systems that contain tilda for * the home directory . * @ param path the file system path * @ return normalised path */ private static String replaceTildeWithHomeDir ( String path ) { } }
if ( path . startsWith ( "~/" ) ) return System . getProperty ( "user.home" ) + path . substring ( 1 ) ; return path ;
public class ContinuousDistributions { /** * Returns the p - value of a specific z score for Gaussian * Ported from C # code posted at http : / / jamesmccaffrey . wordpress . com / 2010/11/05 / programmatically - computing - the - area - under - the - normal - curve / * @ param z * @ return */ public static double gaussCdf ( double z ) { } }
// input = z - value ( - inf to + inf ) // output = p under Normal curve from - inf to z // e . g . , if z = 0.0 , function returns 0.5000 // ACM Algorithm # 209 double y ; // 209 scratch variable double p ; // result . called ‘ z ’ in 209 double w ; // 209 scratch variable if ( z == 0.0 ) { p = 0.0 ; } else { y = Math . abs ( z ) / 2.0 ; if ( y >= 3.0 ) { p = 1.0 ; } else if ( y < 1.0 ) { w = y * y ; p = ( ( ( ( ( ( ( ( 0.000124818987 * w - 0.001075204047 ) * w + 0.005198775019 ) * w - 0.019198292004 ) * w + 0.059054035642 ) * w - 0.151968751364 ) * w + 0.319152932694 ) * w - 0.531923007300 ) * w + 0.797884560593 ) * y * 2.0 ; } else { y = y - 2.0 ; p = ( ( ( ( ( ( ( ( ( ( ( ( ( - 0.000045255659 * y + 0.000152529290 ) * y - 0.000019538132 ) * y - 0.000676904986 ) * y + 0.001390604284 ) * y - 0.000794620820 ) * y - 0.002034254874 ) * y + 0.006549791214 ) * y - 0.010557625006 ) * y + 0.011630447319 ) * y - 0.009279453341 ) * y + 0.005353579108 ) * y - 0.002141268741 ) * y + 0.000535310849 ) * y + 0.999936657524 ; } } if ( z > 0.0 ) { return ( p + 1.0 ) / 2.0 ; } return ( 1.0 - p ) / 2.0 ;
public class FleetsApi { /** * Create fleet squad ( asynchronously ) Create a new squad in a fleet - - - SSO * Scope : esi - fleets . write _ fleet . v1 * @ param fleetId * ID for a fleet ( required ) * @ param wingId * The wing _ id to create squad in ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param token * Access token to use if unable to set a header ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call postFleetsFleetIdWingsWingIdSquadsAsync ( Long fleetId , Long wingId , String datasource , String token , final ApiCallback < FleetSquadCreatedResponse > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = postFleetsFleetIdWingsWingIdSquadsValidateBeforeCall ( fleetId , wingId , datasource , token , callback ) ; Type localVarReturnType = new TypeToken < FleetSquadCreatedResponse > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class Util { /** * Utility function to combine a hash key and range key . Hash / range key pairs * are expected to be persisted in the following byte format : * < pre > * [ 4 byte hash key length ] * [ arbitrary hash key bytes ] * [ 4 byte range key length ] * [ arbitrary range key bytes ] * < / pre > * @ param hashKeyBytes * Are the hash key ' s bytes . * @ param rangeKeyBytes * Are the range key ' s bytes . * @ return Returns a byte array defined by the format above . */ public static byte [ ] combine ( byte [ ] hashKeyBytes , byte [ ] rangeKeyBytes ) { } }
byte [ ] result = new byte [ 8 + hashKeyBytes . length + rangeKeyBytes . length ] ; System . arraycopy ( ByteBuffer . allocate ( 4 ) . putInt ( hashKeyBytes . length ) . array ( ) , 0 , result , 0 , 4 ) ; System . arraycopy ( hashKeyBytes , 0 , result , 4 , hashKeyBytes . length ) ; System . arraycopy ( ByteBuffer . allocate ( 4 ) . putInt ( rangeKeyBytes . length ) . array ( ) , 0 , result , 4 + hashKeyBytes . length , 4 ) ; System . arraycopy ( rangeKeyBytes , 0 , result , 8 + hashKeyBytes . length , rangeKeyBytes . length ) ; return result ;
public class FixedBucketsHistogram { /** * Write a serialization header containing the serde version byte and full / sparse encoding mode byte . * This header is not needed when serializing the histogram for localized internal use within the * buffer aggregator implementation . * @ param buf Destination buffer * @ param mode Full or sparse mode */ private void writeByteBufferSerdeHeader ( ByteBuffer buf , byte mode ) { } }
buf . put ( SERIALIZATION_VERSION ) ; buf . put ( mode ) ;