signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class JobTracker { /** * Adds a new node to the jobtracker . It involves adding it to the expiry
* thread and adding it for resolution
* Assumes JobTracker , taskTrackers and trackerExpiryQueue is locked on entry
* @ param taskTracker Task Tracker */
void addNewTracker ( TaskTracker taskTracker ) { } } | TaskTrackerStatus status = taskTracker . getStatus ( ) ; trackerExpiryQueue . add ( status ) ; // Register the tracker if its not registered
String hostname = status . getHost ( ) ; if ( getNode ( status . getTrackerName ( ) ) == null ) { // Making the network location resolution inline . .
resolveAndAddToTopology ( hostname ) ; } // add it to the set of tracker per host
Set < TaskTracker > trackers = hostnameToTaskTracker . get ( hostname ) ; if ( trackers == null ) { trackers = Collections . synchronizedSet ( new HashSet < TaskTracker > ( ) ) ; hostnameToTaskTracker . put ( hostname , trackers ) ; } statistics . taskTrackerAdded ( status . getTrackerName ( ) ) ; getInstrumentation ( ) . addTrackers ( 1 ) ; LOG . info ( "Adding tracker " + status . getTrackerName ( ) + " to host " + hostname ) ; trackers . add ( taskTracker ) ; |
public class LocalSubscriptionControl { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . runtime . SIMPLocalSubscriptionControllable # getNumberOfQueuedMessages ( ) */
public long getNumberOfQueuedMessages ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getNumberOfQueuedMessages" ) ; long total = 0 ; try { total = referenceItemStream . getStatistics ( ) . getTotalItemCount ( ) ; } catch ( MessageStoreException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.runtime.LocalSubscriptionControl.getNumberOfQueuedMessages" , "1:422:1.36" , this ) ; SibTr . exception ( tc , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getNumberOfQueuedMessages" , new Long ( total ) ) ; return total ; |
public class IndentedPrintWriter { /** * - - - Override PrintWriter methods to return IndentedPrintWriter . */
@ Override public IndentedPrintWriter printf ( String format , Object ... args ) { } } | super . format ( format , args ) ; return this ; |
public class hqlLexer { /** * $ ANTLR start " ASCENDING " */
public final void mASCENDING ( ) throws RecognitionException { } } | try { int _type = ASCENDING ; int _channel = DEFAULT_TOKEN_CHANNEL ; // hql . g : 11:11 : ( ' asc ' )
// hql . g : 11:13 : ' asc '
{ match ( "asc" ) ; if ( state . failed ) return ; } state . type = _type ; state . channel = _channel ; } finally { // do for sure before leaving
} |
public class TextColumnExporter { /** * Implements the abstract method of the [ SaltBasedExporter ] ( \ ref
* annis . gui . exporter . SaltBasedExporter ) . This method creates and fills an
* adjacency matrix of dimension ( nodeCount x nodeCount ) , which keeps the
* relative order of match numbers to each other of each query result line . A
* result line is a part of a record , which belongs to a speaker .
* The adjacency matrix is a global two - dimensional array of integers , which
* allows to recognize the valid order of match numbers globally , after all
* query results are processed .
* @ param graph an org . corpus _ tools . salt . common . SDocumentGraph
* representation of a record
* @ param args a map containing parameters like ' filter ' or ' metakeys ' ,
* set by user
* @ param recordNumber the number of record within the record set returned for
* the user query
* @ param nodeCount the count of distinct match numbers in the whole record
* set returned for the user query */
@ Override public void createAdjacencyMatrix ( SDocumentGraph graph , Map < String , String > args , int recordNumber , int nodeCount ) throws IOException { } } | String currSpeakerName = "" ; String prevSpeakerName = "" ; List < Long > matchNumbersOrdered = new ArrayList < Long > ( ) ; // if new search , reset adjacencyMatrix , extract parameters , set by user
if ( recordNumber == 0 ) { speakerHasMatches . clear ( ) ; speakerName = "" ; tokenToMatchNumber . clear ( ) ; filterNumbersSetByUser . clear ( ) ; filterNumbersIsEmpty = true ; listOfMetakeys . clear ( ) ; adjacencyMatrix = new int [ nodeCount ] [ nodeCount ] ; matrixIsFilled = false ; singleMatchesGlobal . clear ( ) ; orderedMatchNumbersGlobal . clear ( ) ; matchNumbersGlobal . clear ( ) ; dataIsAlignable = true ; maxMatchesPerLine = 0 ; // initialize adjacency matrix
for ( int i = 0 ; i < adjacencyMatrix . length ; i ++ ) { for ( int j = 0 ; j < adjacencyMatrix [ 0 ] . length ; j ++ ) { adjacencyMatrix [ i ] [ j ] = - 1 ; } } // extract filter numbers , if set
if ( args . containsKey ( FILTER_PARAMETER_KEYWORD ) ) { String parameters = args . get ( FILTER_PARAMETER_KEYWORD ) ; String [ ] numbers = parameters . split ( PARAMETER_SEPARATOR ) ; for ( int i = 0 ; i < numbers . length ; i ++ ) { try { Long number = Long . parseLong ( numbers [ i ] ) ; filterNumbersSetByUser . add ( number ) ; } catch ( NumberFormatException e ) { ; } } } if ( ! filterNumbersSetByUser . isEmpty ( ) ) { filterNumbersIsEmpty = false ; } // extract metakeys
if ( args . containsKey ( METAKEYS_KEYWORD ) ) { String parameters = args . get ( METAKEYS_KEYWORD ) ; String [ ] metakeys = parameters . split ( PARAMETER_SEPARATOR ) ; for ( int i = 0 ; i < metakeys . length ; i ++ ) { String metakey = metakeys [ i ] . trim ( ) ; listOfMetakeys . add ( metakey ) ; } } } // end extraction of parameters set by user
if ( graph != null ) { List < SToken > orderedToken = graph . getSortedTokenByText ( ) ; // iterate over all token
if ( orderedToken != null ) { // reset counter over all the tokens
if ( recordNumber == 0 ) { counterGlobal = 0 ; } // iterate first time over tokens to figure out which speaker has matches and to
// recognize the hierarchical structure of matches as well
for ( SToken token : orderedToken ) { counterGlobal ++ ; String name ; if ( ( name = CommonHelper . getTextualDSForNode ( token , graph ) . getName ( ) ) == null ) { name = "" ; } speakerName = ( recordNumber + 1 ) + "_" + name ; currSpeakerName = speakerName ; // reset data structures for new speaker
if ( ! currSpeakerName . equals ( prevSpeakerName ) ) { matchNumbersOrdered . clear ( ) ; } if ( ! speakerHasMatches . containsKey ( currSpeakerName ) ) { speakerHasMatches . put ( currSpeakerName , false ) ; } List < SNode > root = new LinkedList < > ( ) ; root . add ( token ) ; IsDominatedByMatch traverserSpeakerSearch = new IsDominatedByMatch ( ) ; // reset list
dominatedMatchCodes . clear ( ) ; graph . traverse ( root , GRAPH_TRAVERSE_TYPE . BOTTOM_UP_DEPTH_FIRST , TRAV_PREPROCESSING , traverserSpeakerSearch ) ; if ( ! dominatedMatchCodes . isEmpty ( ) ) { // if filter numbers not set by user , take the number of the highest match node
if ( filterNumbersIsEmpty ) { tokenToMatchNumber . put ( counterGlobal , dominatedMatchCodes . get ( dominatedMatchCodes . size ( ) - 1 ) ) ; // set filter number to the ordered list
if ( ! matchNumbersOrdered . contains ( dominatedMatchCodes . get ( dominatedMatchCodes . size ( ) - 1 ) ) ) { matchNumbersOrdered . add ( dominatedMatchCodes . get ( dominatedMatchCodes . size ( ) - 1 ) ) ; } } else { // take the highest match code , which is present in filterNumbers
boolean filterNumberFound = false ; for ( int i = dominatedMatchCodes . size ( ) - 1 ; i >= 0 ; i -- ) { if ( filterNumbersSetByUser . contains ( dominatedMatchCodes . get ( i ) ) ) { tokenToMatchNumber . put ( counterGlobal , dominatedMatchCodes . get ( i ) ) ; if ( ! matchNumbersOrdered . contains ( dominatedMatchCodes . get ( i ) ) ) { if ( ! filterNumberFound ) { matchNumbersOrdered . add ( dominatedMatchCodes . get ( i ) ) ; filterNumberFound = true ; } } break ; } } } // reset maxMatchesPerLine
if ( maxMatchesPerLine < matchNumbersOrdered . size ( ) ) { maxMatchesPerLine = matchNumbersOrdered . size ( ) ; } // fill the adjacency matrix
if ( matchNumbersOrdered . size ( ) > 1 ) { Iterator < Long > it = matchNumbersOrdered . iterator ( ) ; int prev = Integer . parseInt ( String . valueOf ( ( Long ) it . next ( ) ) ) ; matchNumbersGlobal . add ( prev ) ; while ( it . hasNext ( ) ) { int curr = Integer . parseInt ( String . valueOf ( ( Long ) it . next ( ) ) ) ; matchNumbersGlobal . add ( curr ) ; adjacencyMatrix [ prev - 1 ] [ curr - 1 ] = 1 ; matrixIsFilled = true ; prev = curr ; } } else { matchNumbersGlobal . add ( Integer . parseInt ( String . valueOf ( matchNumbersOrdered . get ( 0 ) ) ) ) ; singleMatchesGlobal . add ( matchNumbersOrdered . get ( 0 ) ) ; } } // set previous speaker name
prevSpeakerName = currSpeakerName ; } } } |
public class HttpChannelConfig { /** * Check the input configuration for the maximum buffer size allowed for
* marshalling headers outbound .
* @ param props */
private void parseOutgoingBufferSize ( Map < Object , Object > props ) { } } | Object value = props . get ( HttpConfigConstants . PROPNAME_OUTGOING_HDR_BUFFSIZE ) ; if ( null != value ) { try { this . outgoingHdrBuffSize = rangeLimit ( convertInteger ( value ) , HttpConfigConstants . MIN_BUFFER_SIZE , HttpConfigConstants . MAX_BUFFER_SIZE ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Config: Outgoing hdr buffer size is " + getOutgoingHdrBufferSize ( ) ) ; } } catch ( NumberFormatException nfe ) { FFDCFilter . processException ( nfe , getClass ( ) . getName ( ) + ".parseOutgoingBufferSize" , "1" ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Config: Invalid outgoing header buffer size; " + value ) ; } } } |
public class IdentifierSet { /** * Returns true if { @ code identifier } or any of its enclosing identifiers is excluded . */
private boolean exclude ( String identifier ) { } } | String excludeMatch = null ; for ( String rule = identifier ; rule != null ; rule = enclosing ( rule ) ) { if ( excludes . contains ( rule ) ) { excludeMatch = rule ; } } if ( excludeMatch != null ) { usedExcludes . add ( excludeMatch ) ; return true ; } return false ; |
public class SelectOnUpdateHandler { /** * Called when a change is the record status is about to happen / has happened .
* @ param field If this file change is due to a field , this is the field .
* @ param changeType The type of change that occurred .
* @ param bDisplayOption If true , display any changes .
* @ return an error code .
* Synchronize records after an update or add . */
public int doRecordChange ( FieldInfo field , int iChangeType , boolean bDisplayOption ) { } } | // Read a valid record
int iErrorCode = super . doRecordChange ( field , iChangeType , bDisplayOption ) ; // Initialize the record
if ( iErrorCode != DBConstants . NORMAL_RETURN ) return iErrorCode ; if ( ( iChangeType == DBConstants . AFTER_UPDATE_TYPE ) || ( iChangeType == DBConstants . AFTER_ADD_TYPE ) ) return this . syncRecords ( ) ; return iErrorCode ; |
public class ServiceReferenceIterable { /** * < p > iterator . < / p >
* @ return a { @ link java . util . Iterator } object . */
public Iterator < T > iterator ( ) { } } | Collection < ServiceReference < T > > fetchReferences = fetchReferences ( ) ; return new ServiceReferenceIterator < T > ( fetchReferences . iterator ( ) , bundleContext ) ; |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link CurveArrayPropertyType } { @ code > }
* @ param value
* Java instance representing xml element ' s value .
* @ return
* the new instance of { @ link JAXBElement } { @ code < } { @ link CurveArrayPropertyType } { @ code > } */
@ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "curveMembers" ) public JAXBElement < CurveArrayPropertyType > createCurveMembers ( CurveArrayPropertyType value ) { } } | return new JAXBElement < CurveArrayPropertyType > ( _CurveMembers_QNAME , CurveArrayPropertyType . class , null , value ) ; |
public class ExecutionGraph { /** * Creates the initial edges between the group vertices
* @ param vertexMap
* the temporary vertex map
* @ throws GraphConversionException
* if the initial wiring cannot be created */
private void createInitialGroupEdges ( final HashMap < AbstractJobVertex , ExecutionVertex > vertexMap ) throws GraphConversionException { } } | Iterator < Map . Entry < AbstractJobVertex , ExecutionVertex > > it = vertexMap . entrySet ( ) . iterator ( ) ; while ( it . hasNext ( ) ) { final Map . Entry < AbstractJobVertex , ExecutionVertex > entry = it . next ( ) ; final AbstractJobVertex sjv = entry . getKey ( ) ; final ExecutionVertex sev = entry . getValue ( ) ; final ExecutionGroupVertex sgv = sev . getGroupVertex ( ) ; // First compare number of output gates
if ( sjv . getNumberOfForwardConnections ( ) != sgv . getEnvironment ( ) . getNumberOfOutputGates ( ) ) { throw new GraphConversionException ( "Job and execution vertex " + sjv . getName ( ) + " have different number of outputs" ) ; } if ( sjv . getNumberOfBackwardConnections ( ) != sgv . getEnvironment ( ) . getNumberOfInputGates ( ) ) { throw new GraphConversionException ( "Job and execution vertex " + sjv . getName ( ) + " have different number of inputs" ) ; } // First , build the group edges
for ( int i = 0 ; i < sjv . getNumberOfForwardConnections ( ) ; ++ i ) { final JobEdge edge = sjv . getForwardConnection ( i ) ; final AbstractJobVertex tjv = edge . getConnectedVertex ( ) ; final ExecutionVertex tev = vertexMap . get ( tjv ) ; final ExecutionGroupVertex tgv = tev . getGroupVertex ( ) ; // Use NETWORK as default channel type if nothing else is defined by the user
ChannelType channelType = edge . getChannelType ( ) ; boolean userDefinedChannelType = true ; if ( channelType == null ) { userDefinedChannelType = false ; channelType = ChannelType . NETWORK ; } final DistributionPattern distributionPattern = edge . getDistributionPattern ( ) ; // Connect the corresponding group vertices and copy the user settings from the job edge
final ExecutionGroupEdge groupEdge = sgv . wireTo ( tgv , edge . getIndexOfInputGate ( ) , i , channelType , userDefinedChannelType , distributionPattern ) ; final ExecutionGate outputGate = new ExecutionGate ( new GateID ( ) , sev , groupEdge , false ) ; sev . insertOutputGate ( i , outputGate ) ; final ExecutionGate inputGate = new ExecutionGate ( new GateID ( ) , tev , groupEdge , true ) ; tev . insertInputGate ( edge . getIndexOfInputGate ( ) , inputGate ) ; } } |
public class TransportNegotiator { /** * Set the best local transport candidate we have offered and accepted by
* the other endpoint .
* @ param bestLocalCandidate the acceptedLocalCandidate to set */
private void setAcceptedLocalCandidate ( TransportCandidate bestLocalCandidate ) { } } | for ( int i = 0 ; i < resolver . getCandidateCount ( ) ; i ++ ) { // TODO FIX The EQUAL Sentence
if ( resolver . getCandidate ( i ) . getIp ( ) . equals ( bestLocalCandidate . getIp ( ) ) && resolver . getCandidate ( i ) . getPort ( ) == bestLocalCandidate . getPort ( ) ) { acceptedLocalCandidate = resolver . getCandidate ( i ) ; return ; } } LOGGER . fine ( "BEST: ip=" + bestLocalCandidate . getIp ( ) + " port=" + bestLocalCandidate . getPort ( ) + " has not been offered." ) ; // throw new XMPPException ( " Local transport candidate has not be offered . " ) ; |
public class JsonReport { /** * Output file for the report file . The name of the output file
* will also trigger how the report is written . If the name of the
* output file ends with " . htm ( l ) ? " then the output file is a HTML
* file and CSS / JS scaffolding is also written to visualize the JSON
* model .
* If the name of the file ends with " . json ( p ) ? " a JSON file is written . */
public void setFile ( File file ) { } } | String fileName = file . getName ( ) . toLowerCase ( Locale . ROOT ) ; if ( fileName . matches ( ".*\\.htm(l)?$" ) ) { method = OutputMethod . HTML ; } else { if ( fileName . matches ( ".*\\.jsonp" ) ) { method = OutputMethod . JSONP ; } else { method = OutputMethod . JSON ; } } this . targetFile = file ; |
public class Parameter { /** * ステートメントへ入力パラメータ値をバインド 。
* @ param preparedStatement ステートメント
* @ param index パラメータインデックス
* @ param parameterMapperManager パラメータ変換管理クラス
* @ return 次のパラメータインデックス
* @ throws SQLException SQL例外 */
protected int setInParameter ( final PreparedStatement preparedStatement , final int index , final BindParameterMapperManager parameterMapperManager ) throws SQLException { } } | int parameterIndex = index ; if ( value instanceof List ) { for ( Object e : ( List < ? > ) value ) { setParameterObject ( preparedStatement , parameterIndex , e , parameterMapperManager ) ; parameterLog ( parameterIndex ) ; parameterIndex ++ ; } } else { setParameterObject ( preparedStatement , parameterIndex , value , parameterMapperManager ) ; parameterLog ( parameterIndex ) ; parameterIndex ++ ; } return parameterIndex ; |
public class ProcScope { /** * Парсит тект с BB - кодами
* @ param context the parsing context
* @ return true if parsing is success . False otherwise , If count of codes in text is not enough , for example .
* @ throws NestingException if nesting is too big . */
public boolean process ( Context context ) throws NestingException { } } | Source source = context . getSource ( ) ; int count = 0 ; while ( source . hasNext ( ) && ( strong || context . hasNextAdjustedForTerminator ( ) ) && ( max < 0 || count < max ) ) { int offset = source . getOffset ( ) ; boolean parsed = false ; if ( ( source . nextMayBeConstant ( ) || hasCrazyCode ) && ! context . checkBadTag ( offset ) ) { boolean suspicious = false ; for ( ProcCode code : cachedCodes ) { if ( code . suspicious ( context ) ) { suspicious = true ; if ( code . process ( context ) ) { parsed = true ; break ; } } } if ( suspicious && ! parsed && ! hasCheck ) { context . addBadTag ( offset ) ; } } if ( ! parsed ) { if ( strong ) { // If scope is strong and has not a code from scope then stop the scope processing
break ; } else if ( ignoreText ) { source . incOffset ( ) ; } else { try { context . getTarget ( ) . append ( source . next ( ) ) ; } catch ( IOException e ) { // Nothing ! Because StringBuilder doesn ' t catch IOException
} } } else { count ++ ; } } return min < 0 || count >= min ; |
public class RESTAssert { /** * assert that string matches the given pattern
* @ param string the string to check
* @ param pattern the pattern to check
* @ throws WebApplicationException with status code 422 ( Unprocessable Entity ) */
public static void assertPattern ( String string , String pattern ) { } } | RESTAssert . assertPattern ( string , pattern , RESTAssert . DEFAULT_STATUS_CODE ) ; |
public class CombineFileInputFormat { /** * Create a new pool and add the filters to it .
* A pathname can satisfy any one of the specified filters .
* A split cannot have files from different pools . */
protected void createPool ( PathFilter ... filters ) { } } | MultiPathFilter multi = new MultiPathFilter ( ) ; for ( PathFilter f : filters ) { multi . add ( f ) ; } pools . add ( multi ) ; |
public class FileDataManager { /** * Checks if the given file needs persistence .
* @ param fileId the file id
* @ return false if the file is being persisted , or is already persisted ; otherwise true */
public boolean needPersistence ( long fileId ) { } } | if ( isFilePersisting ( fileId ) || isFilePersisted ( fileId ) ) { return false ; } try { String ufsFingerprint = ufsFingerprint ( fileId ) ; if ( ufsFingerprint != null ) { // mark as persisted
addPersistedFile ( fileId , ufsFingerprint ) ; return false ; } } catch ( Exception e ) { LOG . warn ( "Failed to check if file {} exists in under storage system: {}" , fileId , e . getMessage ( ) ) ; LOG . debug ( "Exception: " , e ) ; } return true ; |
public class StreamGraphGenerator { /** * Transforms a { @ code OneInputTransformation } .
* < p > This recursively transforms the inputs , creates a new { @ code StreamNode } in the graph and
* wired the inputs to this new node . */
private < IN , OUT > Collection < Integer > transformOneInputTransform ( OneInputTransformation < IN , OUT > transform ) { } } | Collection < Integer > inputIds = transform ( transform . getInput ( ) ) ; // the recursive call might have already transformed this
if ( alreadyTransformed . containsKey ( transform ) ) { return alreadyTransformed . get ( transform ) ; } String slotSharingGroup = determineSlotSharingGroup ( transform . getSlotSharingGroup ( ) , inputIds ) ; streamGraph . addOperator ( transform . getId ( ) , slotSharingGroup , transform . getCoLocationGroupKey ( ) , transform . getOperator ( ) , transform . getInputType ( ) , transform . getOutputType ( ) , transform . getName ( ) ) ; if ( transform . getStateKeySelector ( ) != null ) { TypeSerializer < ? > keySerializer = transform . getStateKeyType ( ) . createSerializer ( env . getConfig ( ) ) ; streamGraph . setOneInputStateKey ( transform . getId ( ) , transform . getStateKeySelector ( ) , keySerializer ) ; } streamGraph . setParallelism ( transform . getId ( ) , transform . getParallelism ( ) ) ; streamGraph . setMaxParallelism ( transform . getId ( ) , transform . getMaxParallelism ( ) ) ; for ( Integer inputId : inputIds ) { streamGraph . addEdge ( inputId , transform . getId ( ) , 0 ) ; } return Collections . singleton ( transform . getId ( ) ) ; |
public class WrappedSQLTransformation { /** * / * ( non - Javadoc )
* @ see com . abubusoft . kripton . processor . sqlite . transform . AbstractSQLTransform # generateReadValueFromCursor ( com . squareup . javapoet . MethodSpec . Builder , com . abubusoft . kripton . processor . sqlite . model . SQLiteDaoDefinition , com . squareup . javapoet . TypeName , java . lang . String , java . lang . String ) */
@ Override public void generateReadValueFromCursor ( Builder methodBuilder , SQLiteDaoDefinition daoDefinition , TypeName paramTypeName , String cursorName , String indexName ) { } } | methodBuilder . addCode ( READ_FROM_CURSOR , cursorName , indexName ) ; |
public class DefaultPlatformManager { /** * Loads configuration information for a module from mod . json . */
private JsonObject loadModuleConfig ( ModuleIdentifier modID , File modJsonFile ) { } } | try ( @ SuppressWarnings ( "resource" ) Scanner scanner = new Scanner ( modJsonFile , "UTF-8" ) . useDelimiter ( "\\A" ) ) { return new JsonObject ( scanner . next ( ) ) ; } catch ( FileNotFoundException e ) { throw new PlatformManagerException ( "Module " + modID + " does not contains a mod.json file" ) ; } catch ( NoSuchElementException e ) { throw new PlatformManagerException ( "Module " + modID + " contains an empty mod.json" ) ; } catch ( DecodeException e ) { throw new PlatformManagerException ( "Module " + modID + " mod.json contains invalid json" ) ; } |
public class MessagingTransportFactory { /** * Creates a transport .
* @ param port a listening port
* @ param clientHandler a transport client side handler
* @ param serverHandler a transport server side handler
* @ param exHandler a exception handler */
@ Override public Transport newInstance ( final int port , final EventHandler < TransportEvent > clientHandler , final EventHandler < TransportEvent > serverHandler , final EventHandler < Exception > exHandler ) { } } | final Injector injector = Tang . Factory . getTang ( ) . newInjector ( ) ; injector . bindVolatileParameter ( RemoteConfiguration . HostAddress . class , this . localAddress ) ; injector . bindVolatileParameter ( RemoteConfiguration . Port . class , port ) ; injector . bindVolatileParameter ( RemoteConfiguration . RemoteClientStage . class , new SyncStage < > ( clientHandler ) ) ; injector . bindVolatileParameter ( RemoteConfiguration . RemoteServerStage . class , new SyncStage < > ( serverHandler ) ) ; final Transport transport ; try { transport = injector . getInstance ( NettyMessagingTransport . class ) ; transport . registerErrorHandler ( exHandler ) ; return transport ; } catch ( final InjectionException e ) { throw new RuntimeException ( e ) ; } |
public class GraphDatabaseConfiguration { /** * Returns the home directory for the graph database initialized in this configuration
* @ return Home directory for this graph database configuration */
public File getHomeDirectory ( ) { } } | if ( ! configuration . has ( STORAGE_DIRECTORY ) ) throw new UnsupportedOperationException ( "No home directory specified" ) ; File dir = new File ( configuration . get ( STORAGE_DIRECTORY ) ) ; Preconditions . checkArgument ( dir . isDirectory ( ) , "Not a directory" ) ; return dir ; |
public class Snappy { /** * Compress the input string using the given encoding
* @ param s
* @ param encoding
* @ return the compressed data
* @ throws UnsupportedEncodingException
* @ throws IOException */
public static byte [ ] compress ( String s , Charset encoding ) throws IOException { } } | byte [ ] data = s . getBytes ( encoding ) ; return compress ( data ) ; |
public class BoxApiUser { /** * Gets a request that creates an enterprise user
* The session provided must be associated with an enterprise admin user
* @ param login the login ( email ) of the user to create
* @ param name name of the user to create
* @ return request to create an enterprise user */
public BoxRequestsUser . CreateEnterpriseUser getCreateEnterpriseUserRequest ( String login , String name ) { } } | BoxRequestsUser . CreateEnterpriseUser request = new BoxRequestsUser . CreateEnterpriseUser ( getUsersUrl ( ) , mSession , login , name ) ; return request ; |
public class BoxUploadSessionEndpoints { /** * Get a map of all end points
* @ return */
public Map < String , String > getEndpointsMap ( ) { } } | List < String > keys = getPropertiesKeySet ( ) ; HashMap < String , String > endpoints = new HashMap < > ( keys . size ( ) ) ; for ( String key : keys ) { endpoints . put ( key , getPropertyAsString ( key ) ) ; } return endpoints ; |
public class ComponentBorder { /** * In this case a real component is to be painted . Setting the location
* of the component will cause it to be painted at that location . */
@ Override public void paintBorder ( Component c , Graphics g , int x , int y , int width , int height ) { } } | float x2 = ( width - component . getWidth ( ) ) * component . getAlignmentX ( ) + x ; float y2 = ( height - component . getHeight ( ) ) * component . getAlignmentY ( ) + y ; component . setLocation ( ( int ) x2 , ( int ) y2 ) ; |
public class MixtureModelOutlierScaling { /** * Compute the a posterior probability for the given parameters .
* @ param f value
* @ param alpha Alpha ( mixing ) parameter
* @ param mu Mu ( for gaussian )
* @ param sigma Sigma ( for gaussian )
* @ param lambda Lambda ( for exponential )
* @ return Probability */
protected static double calcPosterior ( double f , double alpha , double mu , double sigma , double lambda ) { } } | final double pi = calcP_i ( f , mu , sigma ) ; final double qi = calcQ_i ( f , lambda ) ; return ( alpha * pi ) / ( alpha * pi + ( 1.0 - alpha ) * qi ) ; |
public class Symmetry010Date { /** * Consistency check for dates manipulations after calls to
* { @ link # plus ( long , TemporalUnit ) } ,
* { @ link # minus ( long , TemporalUnit ) } ,
* { @ link # until ( AbstractDate , TemporalUnit ) } or
* { @ link # with ( TemporalField , long ) } .
* @ param prolepticYear the Symmetry010 proleptic - year
* @ param month the Symmetry010 month , from 1 to 12
* @ param dayOfMonth the Symmetry010 day - of - month , from 1 to 30 , or 1 to 31 in February , May , August , November ,
* or 1 to 37 in December in a Leap Year
* @ return the resolved date */
private static Symmetry010Date resolvePreviousValid ( int prolepticYear , int month , int dayOfMonth ) { } } | int monthR = Math . min ( month , MONTHS_IN_YEAR ) ; int dayR = Math . min ( dayOfMonth , monthR == 12 && INSTANCE . isLeapYear ( prolepticYear ) ? DAYS_IN_MONTH + 7 : monthR % 3 == 2 ? DAYS_IN_MONTH_LONG : DAYS_IN_MONTH ) ; return create ( prolepticYear , monthR , dayR ) ; |
public class GetDeviceMethodsResult { /** * List of available device APIs .
* @ param deviceMethods
* List of available device APIs . */
public void setDeviceMethods ( java . util . Collection < DeviceMethod > deviceMethods ) { } } | if ( deviceMethods == null ) { this . deviceMethods = null ; return ; } this . deviceMethods = new java . util . ArrayList < DeviceMethod > ( deviceMethods ) ; |
public class VersionOneDataFactoryImpl { /** * Runs the VersionOneConnection library tools against a given
* YAML - formatted query . This requires a pre - formatted paged query to run ,
* and will not perform the paging for you - there are other helper methods
* for this .
* @ return A formatted JSONArray response
* @ throws HygieiaException */
public JSONArray getPagingQueryResponse ( ) throws HygieiaException { } } | synchronized ( this . v1Service ) { Object obj = this . v1Service . executePassThroughQuery ( this . getPagingQuery ( ) ) ; if ( obj == null ) { throw new HygieiaException ( "FAILED: There was a problem parsing or casting JSON types from a message response" , HygieiaException . JSON_FORMAT_ERROR ) ; } if ( obj . toString ( ) . equalsIgnoreCase ( "{\"error\":\"Unauthorized\"}" ) ) { throw new HygieiaException ( "FAILED: There was a problem authenticating with VersionOne" , HygieiaException . INVALID_CONFIGURATION ) ; } return makeJsonOutputArray ( obj . toString ( ) ) ; } |
public class AbstractFixture { /** * < p > getMethods . < / p >
* @ param type a { @ link java . lang . Class } object .
* @ param name a { @ link java . lang . String } object .
* @ return a { @ link java . util . List } object . */
protected List < Method > getMethods ( Class type , String name ) { } } | return introspector ( type ) . getMethods ( toJavaIdentifierForm ( name ) ) ; |
public class MemoryCredentialStore { /** * Lookups credential parameters by its key .
* @ param correlationId ( optional ) transaction id to trace execution through
* call chain .
* @ param key a key to uniquely identify the credential parameters .
* @ return resolved credential parameters or null if nothing was found . */
public CredentialParams lookup ( String correlationId , String key ) { } } | synchronized ( _lock ) { return _items . get ( key ) ; } |
public class AmazonServiceException { /** * Sets the raw response content . */
public void setRawResponseContent ( String rawResponseContent ) { } } | this . rawResponse = rawResponseContent == null ? null : rawResponseContent . getBytes ( StringUtils . UTF8 ) ; |
public class AutoZone { /** * only for test public */
ZoneInfo zoneInfo ( String ak , String bucket ) { } } | ZoneIndex index = new ZoneIndex ( ak , bucket ) ; return zones . get ( index ) ; |
public class Runner { /** * Run the dev appserver in async mode . */
public void runAsync ( int startSuccessTimeout ) throws MojoExecutionException { } } | runMojo . getLog ( ) . info ( "Waiting " + startSuccessTimeout + " seconds for the Dev App Server to start." ) ; try { runMojo . getAppEngineFactory ( ) . devServerRunAsync ( startSuccessTimeout ) . run ( configBuilder . buildRunConfiguration ( processServices ( ) , processProjectId ( ) ) ) ; } catch ( AppEngineException ex ) { throw new RuntimeException ( ex ) ; } runMojo . getLog ( ) . info ( "Dev App Server started." ) ; runMojo . getLog ( ) . info ( "Use the 'mvn appengine:stop' command to stop the server." ) ; |
public class VariantContextConverter { /** * Assumes that ori is in the form " POS : REF : ALT _ 0 ( , ALT _ N ) * : ALT _ IDX " .
* ALT _ N is the n - th allele if this is the n - th variant resultant of a multiallelic vcf row
* @ param ori
* @ return */
protected static List < String > getOriginalAlleles ( String [ ] ori ) { } } | if ( ori != null && ori . length == 4 ) { String [ ] multiAllele = ori [ 2 ] . split ( "," ) ; if ( multiAllele . length != 1 ) { ArrayList < String > alleles = new ArrayList < > ( multiAllele . length + 1 ) ; alleles . add ( ori [ 1 ] ) ; alleles . addAll ( Arrays . asList ( multiAllele ) ) ; return alleles ; } else { return Arrays . asList ( ori [ 1 ] , ori [ 2 ] ) ; } } return null ; |
public class FileSharedServerLeaseLog { /** * Access the singleton instance of the FileSystem Lease log .
* @ return ChannelFrameworkImpl */
public static FileSharedServerLeaseLog getFileSharedServerLeaseLog ( String logDirStem , String localRecoveryIdentity , String recoveryGroup ) { } } | if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "FileSharedServerLeaseLog" , new Object [ ] { logDirStem , localRecoveryIdentity , recoveryGroup } ) ; if ( _serverInstallLeaseLogDir == null ) setLeaseLog ( logDirStem , localRecoveryIdentity , recoveryGroup ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "FileSharedServerLeaseLog" , _fileLeaseLog ) ; return _fileLeaseLog ; |
public class ExtensionsInner { /** * Disables the Operations Management Suite ( OMS ) on the HDInsight cluster .
* @ param resourceGroupName The name of the resource group .
* @ param clusterName The name of the cluster .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the { @ link ServiceResponse } object if successful . */
public Observable < Void > beginDisableMonitoringAsync ( String resourceGroupName , String clusterName ) { } } | return beginDisableMonitoringWithServiceResponseAsync ( resourceGroupName , clusterName ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ; |
public class TomcatReactiveWebServerFactory { /** * Configure the Tomcat { @ link Context } .
* @ param context the Tomcat context */
protected void configureContext ( Context context ) { } } | this . contextLifecycleListeners . forEach ( context :: addLifecycleListener ) ; new DisableReferenceClearingContextCustomizer ( ) . customize ( context ) ; this . tomcatContextCustomizers . forEach ( ( customizer ) -> customizer . customize ( context ) ) ; |
public class TypeVariableUtils { /** * In contrast to { @ link # resolveAllTypeVariables ( Type , Map ) } which replace generics in type according to
* generics map , this method replace variables with their upper bound . For example , variable defined as
* { @ code class Root < T extends String > } and for type { @ code List < T > } result will be { @ code List < String > }
* ( variable T replaced by upper bound - String ) .
* @ param type type to replace variables into .
* @ return type with all variables resolved as upper bound */
public static Type resolveAllTypeVariables ( final Type type ) { } } | final List < TypeVariable > vars = GenericsUtils . findVariables ( type ) ; if ( vars . isEmpty ( ) ) { // no variables in type - nothing to replace
return type ; } final LinkedHashMap < String , Type > generics = new LinkedHashMap < String , Type > ( ) ; // important to resolve vars in correct order
for ( TypeVariable var : GenericsUtils . orderVariablesForResolution ( vars ) ) { generics . put ( var . getName ( ) , GenericsResolutionUtils . resolveRawGeneric ( var , generics ) ) ; } // finally resolve variables with pre - computed upper bounds
return resolveAllTypeVariables ( type , generics ) ; |
public class InterfaceEndpointsInner { /** * Gets the specified interface endpoint by resource group .
* @ param resourceGroupName The name of the resource group .
* @ param interfaceEndpointName The name of the interface endpoint .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the InterfaceEndpointInner object if successful . */
public InterfaceEndpointInner getByResourceGroup ( String resourceGroupName , String interfaceEndpointName ) { } } | return getByResourceGroupWithServiceResponseAsync ( resourceGroupName , interfaceEndpointName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class JMPathOperation { /** * Delete all boolean .
* @ param targetPath the target path
* @ return the boolean */
public static boolean deleteAll ( Path targetPath ) { } } | debug ( log , "deleteAll" , targetPath ) ; return JMPath . isDirectory ( targetPath ) ? deleteDir ( targetPath ) : delete ( targetPath ) ; |
public class TiffITProfile { /** * Validates that the IFD conforms the Tiff / IT standard . */
@ Override public void validate ( ) { } } | try { currentIfd = 0 ; for ( TiffObject o : model . getImageIfds ( ) ) { currentIfd ++ ; IFD ifd = ( IFD ) o ; IfdTags metadata = ifd . getMetadata ( ) ; int sft = - 1 ; int photo = - 1 ; int bps = - 1 ; int planar = - 1 ; int comp = - 1 ; if ( metadata . containsTagId ( TiffTags . getTagId ( "SubfileType" ) ) ) { sft = ( int ) metadata . get ( TiffTags . getTagId ( "SubfileType" ) ) . getFirstNumericValue ( ) ; } if ( metadata . containsTagId ( TiffTags . getTagId ( "Compression" ) ) ) { comp = ( int ) metadata . get ( TiffTags . getTagId ( "Compression" ) ) . getFirstNumericValue ( ) ; } if ( metadata . containsTagId ( TiffTags . getTagId ( "PhotometricInterpretation" ) ) ) { photo = ( int ) metadata . get ( TiffTags . getTagId ( "PhotometricInterpretation" ) ) . getFirstNumericValue ( ) ; } if ( metadata . containsTagId ( TiffTags . getTagId ( "BitsPerSample" ) ) ) { bps = ( int ) metadata . get ( TiffTags . getTagId ( "BitsPerSample" ) ) . getFirstNumericValue ( ) ; } if ( metadata . containsTagId ( TiffTags . getTagId ( "PlanarConfiguration" ) ) ) { planar = ( int ) metadata . get ( TiffTags . getTagId ( "PlanarConfiguration" ) ) . getFirstNumericValue ( ) ; } int p = profile ; // Determination of TIFF / IT file type
if ( sft == 1 || sft == - 1 ) { if ( comp == 1 || comp == 32895 ) { if ( photo == 5 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 32768 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 2 ) { if ( bps > 1 ) { validateIfdCT ( ifd , p ) ; } else if ( bps == 1 ) { validateIfdSD ( ifd , p ) ; } } } else if ( photo == 2 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 32768 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 2 ) { validateIfdCT ( ifd , p ) ; } } else if ( photo == 8 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 32768 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 2 ) { validateIfdCT ( ifd , p ) ; } } else if ( photo == 0 || photo == 1 ) { if ( bps == 1 ) { validateIfdBP ( ifd , p ) ; } else if ( bps > 1 ) { validateIfdMP ( ifd , p ) ; } } } else if ( comp == 4 ) { if ( photo == 0 || photo == 1 ) { validateIfdBP ( ifd , p ) ; } else if ( photo == 5 ) { validateIfdSD ( ifd , p ) ; } } else if ( comp == 7 ) { if ( photo == 5 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } } else if ( photo == 2 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } } else if ( photo == 6 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } } else if ( photo == 8 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } } else if ( photo == 0 || photo == 1 ) { if ( bps > 1 ) { validateIfdMP ( ifd , p ) ; } } } else if ( comp == 8 ) { if ( photo == 5 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 32768 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 2 ) { if ( bps > 1 ) { validateIfdCT ( ifd , p ) ; } else if ( bps == 1 ) { validateIfdSD ( ifd , p ) ; } } } else if ( photo == 2 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 32768 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 2 ) { validateIfdCT ( ifd , p ) ; } } else if ( photo == 8 ) { if ( planar == 1 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 32768 ) { validateIfdCT ( ifd , p ) ; } else if ( planar == 2 ) { validateIfdCT ( ifd , p ) ; } } else if ( photo == 0 || photo == 1 ) { if ( bps == 1 ) { validateIfdBP ( ifd , p ) ; } else if ( bps > 1 ) { validateIfdMP ( ifd , p ) ; } } } else if ( comp == 32896 ) { validateIfdLW ( ifd , p ) ; } else if ( comp == 32897 ) { validateIfdHC ( ifd , p ) ; } else if ( comp == 32898 ) { validateIfdBL ( ifd , p ) ; } else if ( ( ( sft >> 3 ) & 1 ) == 1 ) { validateIfdFP ( ifd , p ) ; } } } } catch ( Exception e ) { } |
public class ProtobufIDLProxy { /** * Creates the .
* @ param data the data
* @ param debug the debug
* @ param isUniName the is uni name
* @ return the map */
public static Map < String , IDLProxyObject > create ( String data , boolean debug , boolean isUniName ) { } } | return create ( data , debug , null , isUniName ) ; |
public class StringWriter { /** * Appends the specified character sequence to this writer .
* < p > An invocation of this method of the form < tt > out . append ( csq ) < / tt >
* behaves in exactly the same way as the invocation
* < pre >
* out . write ( csq . toString ( ) ) < / pre >
* < p > Depending on the specification of < tt > toString < / tt > for the
* character sequence < tt > csq < / tt > , the entire sequence may not be
* appended . For instance , invoking the < tt > toString < / tt > method of a
* character buffer will return a subsequence whose content depends upon
* the buffer ' s position and limit .
* @ param csq
* The character sequence to append . If < tt > csq < / tt > is
* < tt > null < / tt > , then the four characters < tt > " null " < / tt > are
* appended to this writer .
* @ return This writer
* @ since 1.5 */
public StringWriter append ( CharSequence csq ) { } } | if ( csq == null ) write ( "null" ) ; else write ( csq . toString ( ) ) ; return this ; |
public class FilterPredicate { /** * Create a predicate specifying that the value of the specified attribute must exactly
* match the supplied value . The value must also be the type specified by the FilterableAttribute
* @ param attribute The attribute to match on
* @ param value The exact value that the attribute must have
* @ throws IllegalArgumentException if the value is not of the type returned by the attributes getType method */
public static FilterPredicate areEqual ( FilterableAttribute attribute , Object value ) { } } | FilterPredicate pred = new FilterPredicate ( ) ; pred . attribute = attribute ; Class < ? > requiredType = attribute . getType ( ) ; if ( ! requiredType . isInstance ( value ) ) { throw new IllegalArgumentException ( "The value must be of the correct type for the FilterableAttribute." + " Expected: " + requiredType . getName ( ) + " but was " + value . getClass ( ) . getName ( ) ) ; } pred . values = Collections . singleton ( getString ( value ) ) ; return pred ; |
public class VecUtils { /** * Create a new { @ link Vec } of numeric values from a string { @ link Vec } . Any rows that cannot be
* converted to a number are set to NA .
* Currently only does basic numeric formats . No exponents , or hex values . Doesn ' t
* even like commas or spaces . : ( Needs love . Handling more numeric
* representations is PUBDEV - 2209
* @ param src a string { @ link Vec }
* @ return a numeric { @ link Vec } */
public static Vec stringToNumeric ( Vec src ) { } } | if ( ! src . isString ( ) ) throw new H2OIllegalArgumentException ( "stringToNumeric conversion only works on string columns" ) ; Vec res = new MRTask ( ) { @ Override public void map ( Chunk chk , NewChunk newChk ) { if ( chk instanceof C0DChunk ) { // all NAs
for ( int i = 0 ; i < chk . _len ; i ++ ) newChk . addNA ( ) ; } else { BufferedString tmpStr = new BufferedString ( ) ; for ( int i = 0 ; i < chk . _len ; i ++ ) { if ( ! chk . isNA ( i ) ) { tmpStr = chk . atStr ( tmpStr , i ) ; switch ( tmpStr . getNumericType ( ) ) { case BufferedString . NA : newChk . addNA ( ) ; break ; case BufferedString . INT : newChk . addNum ( Long . parseLong ( tmpStr . toString ( ) ) , 0 ) ; break ; case BufferedString . REAL : newChk . addNum ( Double . parseDouble ( tmpStr . toString ( ) ) ) ; break ; default : throw new H2OIllegalValueException ( "Received unexpected type when parsing a string to a number." , this ) ; } } else newChk . addNA ( ) ; } } } } . doAll ( Vec . T_NUM , src ) . outputFrame ( ) . anyVec ( ) ; assert res != null ; return res ; |
public class MarathonScheduler { /** * build the container object */
protected ObjectNode getContainer ( ObjectMapper mapper ) { } } | ObjectNode containerNode = mapper . createObjectNode ( ) ; containerNode . put ( MarathonConstants . CONTAINER_TYPE , "DOCKER" ) ; containerNode . set ( "docker" , getDockerContainer ( mapper ) ) ; return containerNode ; |
public class DialogPlusBuilder { /** * Add margins to your outmost view which contains everything . As default they are 0
* are applied */
public DialogPlusBuilder setOutMostMargin ( int left , int top , int right , int bottom ) { } } | this . outMostMargin [ 0 ] = left ; this . outMostMargin [ 1 ] = top ; this . outMostMargin [ 2 ] = right ; this . outMostMargin [ 3 ] = bottom ; return this ; |
public class SoyType { /** * Returns true if a parameter or field of this type can be assigned from a value of { @ code
* srcType } .
* @ param srcType The type of the incoming value .
* @ return True if the assignment is valid . */
public final boolean isAssignableFrom ( SoyType srcType ) { } } | // Handle unions generically . A type is assignable from a union if it is assignable from _ all _
// memebers .
if ( srcType instanceof UnionType ) { // By construction union types are guaranteed
// 1 . not to be empty
// 2 . not to contain union types
UnionType asUnion = ( UnionType ) srcType ; for ( SoyType member : asUnion . getMembers ( ) ) { if ( ! doIsAssignableFromNonUnionType ( member ) ) { return false ; } } return true ; } else { return doIsAssignableFromNonUnionType ( srcType ) ; } |
public class AbstractRadixAddressableHeap { /** * Helper method for finding and caching the minimum . Assumes that the heap
* contains at least one element .
* @ param firstBucket
* start looking for elements from this bucket */
private void findAndCacheMinimum ( int firstBucket ) { } } | if ( currentMin == null ) { // find first non - empty bucket
int currentMinBucket = EMPTY ; for ( int i = firstBucket ; i < this . buckets . length ; i ++ ) { if ( buckets [ i ] != null ) { currentMinBucket = i ; break ; } } // find new minimum and cache it
if ( currentMinBucket >= 0 ) { Node val = buckets [ currentMinBucket ] ; while ( val != null ) { if ( currentMin == null || compare ( val . key , currentMin . key ) < 0 ) { currentMin = val ; } val = val . next ; } } } |
public class ClientSelectors { /** * Works with AddressUtil . mathInterface
* @ param ipMask ip mask for the selector
* @ return client selector according to IP */
public static ClientSelector ipSelector ( final String ipMask ) { } } | return new ClientSelector ( ) { @ Override public boolean select ( Client client ) { return AddressUtil . matchInterface ( client . getSocketAddress ( ) . getAddress ( ) . getHostAddress ( ) , ipMask ) ; } @ Override public String toString ( ) { return "ClientSelector{ipMask:" + ipMask + " }" ; } } ; |
public class TargetPoolClient { /** * Removes health check URL from a target pool .
* < p > Sample code :
* < pre > < code >
* try ( TargetPoolClient targetPoolClient = TargetPoolClient . create ( ) ) {
* ProjectRegionTargetPoolName targetPool = ProjectRegionTargetPoolName . of ( " [ PROJECT ] " , " [ REGION ] " , " [ TARGET _ POOL ] " ) ;
* TargetPoolsRemoveHealthCheckRequest targetPoolsRemoveHealthCheckRequestResource = TargetPoolsRemoveHealthCheckRequest . newBuilder ( ) . build ( ) ;
* Operation response = targetPoolClient . removeHealthCheckTargetPool ( targetPool , targetPoolsRemoveHealthCheckRequestResource ) ;
* < / code > < / pre >
* @ param targetPool Name of the target pool to remove health checks from .
* @ param targetPoolsRemoveHealthCheckRequestResource
* @ throws com . google . api . gax . rpc . ApiException if the remote call fails */
@ BetaApi public final Operation removeHealthCheckTargetPool ( ProjectRegionTargetPoolName targetPool , TargetPoolsRemoveHealthCheckRequest targetPoolsRemoveHealthCheckRequestResource ) { } } | RemoveHealthCheckTargetPoolHttpRequest request = RemoveHealthCheckTargetPoolHttpRequest . newBuilder ( ) . setTargetPool ( targetPool == null ? null : targetPool . toString ( ) ) . setTargetPoolsRemoveHealthCheckRequestResource ( targetPoolsRemoveHealthCheckRequestResource ) . build ( ) ; return removeHealthCheckTargetPool ( request ) ; |
public class IBANManager { /** * Read all IBAN country data from a file . */
private static void _readIBANDataFromXML ( ) { } } | final IMicroDocument aDoc = MicroReader . readMicroXML ( new ClassPathResource ( "codelists/iban-country-data.xml" ) ) ; if ( aDoc == null ) throw new InitializationException ( "Failed to read IBAN country data [1]" ) ; if ( aDoc . getDocumentElement ( ) == null ) throw new InitializationException ( "Failed to read IBAN country data [2]" ) ; final DateTimeFormatter aDTPattern = DateTimeFormatter . ISO_DATE ; for ( final IMicroElement eCountry : aDoc . getDocumentElement ( ) . getAllChildElements ( ELEMENT_COUNTRY ) ) { // get descriptive string
final String sDesc = eCountry . getTextContent ( ) ; final String sCountryCode = sDesc . substring ( 0 , 2 ) ; if ( CountryCache . getInstance ( ) . getCountry ( sCountryCode ) == null ) if ( LOGGER . isWarnEnabled ( ) ) LOGGER . warn ( "IBAN country data: no such country code '" + sCountryCode + "' - be careful" ) ; LocalDate aValidFrom = null ; if ( eCountry . hasAttribute ( ATTR_VALIDFROM ) ) { // Constant format , conforming to XML date
aValidFrom = PDTFromString . getLocalDateFromString ( eCountry . getAttributeValue ( ATTR_VALIDFROM ) , aDTPattern ) ; } LocalDate aValidTo = null ; if ( eCountry . hasAttribute ( ATTR_VALIDUNTIL ) ) { // Constant format , conforming to XML date
aValidTo = PDTFromString . getLocalDateFromString ( eCountry . getAttributeValue ( ATTR_VALIDUNTIL ) , aDTPattern ) ; } final String sLayout = eCountry . getAttributeValue ( ATTR_LAYOUT ) ; final String sCheckDigits = eCountry . getAttributeValue ( ATTR_CHECKDIGITS ) ; // get expected length
final String sLen = eCountry . getAttributeValue ( ATTR_LEN ) ; final int nExpectedLength = StringParser . parseInt ( sLen , CGlobal . ILLEGAL_UINT ) ; if ( nExpectedLength == CGlobal . ILLEGAL_UINT ) throw new InitializationException ( "Failed to convert length '" + sLen + "' to int!" ) ; if ( s_aIBANData . containsKey ( sCountryCode ) ) throw new IllegalArgumentException ( "Country " + sCountryCode + " is already contained!" ) ; s_aIBANData . put ( sCountryCode , IBANCountryData . createFromString ( sCountryCode , nExpectedLength , sLayout , sCheckDigits , aValidFrom , aValidTo , sDesc ) ) ; } |
public class auditnslogpolicy_systemglobal_binding { /** * Use this API to fetch auditnslogpolicy _ systemglobal _ binding resources of given name . */
public static auditnslogpolicy_systemglobal_binding [ ] get ( nitro_service service , String name ) throws Exception { } } | auditnslogpolicy_systemglobal_binding obj = new auditnslogpolicy_systemglobal_binding ( ) ; obj . set_name ( name ) ; auditnslogpolicy_systemglobal_binding response [ ] = ( auditnslogpolicy_systemglobal_binding [ ] ) obj . get_resources ( service ) ; return response ; |
public class SeSellerFilter { /** * < p > Evaluates S . E . Seller filter
* or throws " SOMETHING _ WRONG " if not found .
* For se entities it return NULL . < / p >
* @ param pReqVars additional request scoped parameters
* @ param pData data
* @ return S . E . Seller filter
* @ throws Exception - if not S . E . Seller */
public final String evaluate ( final Map < String , Object > pReqVars , final IRequestData pData ) throws Exception { } } | SeSeller seSeller = this . findSeSeller . find ( pReqVars , pData . getUserName ( ) ) ; if ( seSeller == null ) { throw new ExceptionWithCode ( ExceptionWithCode . SOMETHING_WRONG , "It's not S.E.Seller - " + pData . getUserName ( ) ) ; } String nmEnt = pData . getParameter ( "nmEnt" ) ; boolean isSe = false ; for ( Class < ? > cl : this . seEntities ) { if ( cl . getSimpleName ( ) . equals ( nmEnt ) ) { isSe = true ; break ; } } if ( isSe ) { // simple - hummer implementation :
String wheSe ; if ( nmEnt . startsWith ( "I18n" ) ) { wheSe = "HASNAME.SELLER=" ; } else if ( nmEnt . endsWith ( "Price" ) || nmEnt . endsWith ( "Place" ) || nmEnt . endsWith ( "Specifics" ) ) { wheSe = "ITEM.SELLER=" ; } else if ( nmEnt . equals ( "CuOrSe" ) ) { wheSe = "CUORSE.SEL=" ; } else { // good / service / paymd
wheSe = nmEnt . toUpperCase ( ) + ".SELLER=" ; } return wheSe + seSeller . getItsId ( ) . getItsId ( ) ; } else { return null ; } |
public class ServerContext { /** * Registers a state change listener .
* @ param listener The state change listener .
* @ return The listener context . */
public Listener < CopycatServer . State > onStateChange ( Consumer < CopycatServer . State > listener ) { } } | return stateChangeListeners . add ( listener ) ; |
public class AWSIotClient { /** * Creates a new thing type .
* @ param createThingTypeRequest
* The input for the CreateThingType operation .
* @ return Result of the CreateThingType operation returned by the service .
* @ throws InvalidRequestException
* The request is not valid .
* @ throws ThrottlingException
* The rate exceeds the limit .
* @ throws UnauthorizedException
* You are not authorized to perform this operation .
* @ throws ServiceUnavailableException
* The service is temporarily unavailable .
* @ throws InternalFailureException
* An unexpected error has occurred .
* @ throws ResourceAlreadyExistsException
* The resource already exists .
* @ sample AWSIot . CreateThingType */
@ Override public CreateThingTypeResult createThingType ( CreateThingTypeRequest request ) { } } | request = beforeClientExecution ( request ) ; return executeCreateThingType ( request ) ; |
public class WritableShimSerialization { /** * Helper method to add this serializer to an existing Hadoop config . */
public static void addToHadoopConfiguration ( Configuration conf ) { } } | final String SERIALIZATION_KEY = "io.serializations" ; String existingSerializers = conf . get ( SERIALIZATION_KEY ) ; if ( existingSerializers != null ) { conf . set ( SERIALIZATION_KEY , existingSerializers + "," + WritableShimSerialization . class . getName ( ) ) ; } else { conf . set ( SERIALIZATION_KEY , "org.apache.hadoop.io.serializer.WritableSerialization," + WritableShimSerialization . class . getName ( ) ) ; } |
public class Query { /** * Evaluators */
@ Override public boolean evaluate ( final GraphRewrite event , final EvaluationContext context ) { } } | final String queryStr = toString ( ) ; return ExecutionStatistics . performBenchmarked ( queryStr , new Task < Boolean > ( ) { public Boolean execute ( ) { Query . this . setInitialFramesSelector ( createInitialFramesSelector ( Query . this ) ) ; Iterable < ? extends WindupVertexFrame > result = framesSelector . getFrames ( event , context ) ; if ( resultFilter != null ) { com . google . common . base . Predicate < WindupVertexFrame > guavaPred = new com . google . common . base . Predicate < WindupVertexFrame > ( ) { @ Override public boolean apply ( WindupVertexFrame input ) { return resultFilter . accept ( input ) ; } } ; result = Iterables . filter ( result , guavaPred ) ; } setResults ( event , getOutputVariablesName ( ) , result ) ; return result . iterator ( ) . hasNext ( ) ; } } ) ; |
public class GrokToJsonConverter { /** * Converts Text ( String ) to JSON based on a Grok regexp expression .
* By default , fields between Text and JSON are mapped by Grok SEMANTIC which is the identifier you give to the piece of text being matched in your Grok expression .
* e . g :
* { @ inheritDoc }
* @ see Converter # convertRecord ( Object , Object , WorkUnitState ) */
@ Override public Iterable < JsonObject > convertRecord ( JsonArray outputSchema , String inputRecord , WorkUnitState workUnit ) throws DataConversionException { } } | JsonObject outputRecord = createOutput ( outputSchema , inputRecord ) ; LOG . debug ( "Converted into " + outputRecord ) ; return new SingleRecordIterable < JsonObject > ( outputRecord ) ; |
public class ExperimentsInner { /** * Gets a list of Experiments within the specified Workspace .
* @ param nextPageLink The NextLink from the previous successful call to List operation .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; ExperimentInner & gt ; object */
public Observable < Page < ExperimentInner > > listByWorkspaceNextAsync ( final String nextPageLink ) { } } | return listByWorkspaceNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < ExperimentInner > > , Page < ExperimentInner > > ( ) { @ Override public Page < ExperimentInner > call ( ServiceResponse < Page < ExperimentInner > > response ) { return response . body ( ) ; } } ) ; |
public class FileUtils { /** * Create parent directory structure of a given file , if it doesn ' t already
* exist .
* @ param file File to create directories for
* @ throws IOException if an I / O error occurs */
public static void mkParentDirs ( final File file ) throws IOException { } } | // Create parent directory structure if necessary
File parentFile = file . getParentFile ( ) ; if ( parentFile == null ) { // File was created with a relative path
parentFile = file . getAbsoluteFile ( ) . getParentFile ( ) ; } if ( parentFile != null && ! parentFile . exists ( ) ) { if ( ! parentFile . mkdirs ( ) ) { throw new IOException ( "Unable to create parent directory " + "structure for file " + file . getAbsolutePath ( ) ) ; } } |
public class SplitIndexWriter { /** * Generate separate index files , for each Unicode character , listing all
* the members starting with the particular unicode character .
* @ param indexbuilder IndexBuilder built by { @ link IndexBuilder }
* @ throws DocletAbortException */
public static void generate ( ConfigurationImpl configuration , IndexBuilder indexbuilder ) { } } | SplitIndexWriter indexgen ; DocPath filename = DocPath . empty ; DocPath path = DocPaths . INDEX_FILES ; try { for ( int i = 0 ; i < indexbuilder . elements ( ) . length ; i ++ ) { int j = i + 1 ; int prev = ( j == 1 ) ? - 1 : i ; int next = ( j == indexbuilder . elements ( ) . length ) ? - 1 : j + 1 ; filename = DocPaths . indexN ( j ) ; indexgen = new SplitIndexWriter ( configuration , path . resolve ( filename ) , indexbuilder , prev , next ) ; indexgen . generateIndexFile ( ( Character ) indexbuilder . elements ( ) [ i ] ) ; indexgen . close ( ) ; } } catch ( IOException exc ) { configuration . standardmessage . error ( "doclet.exception_encountered" , exc . toString ( ) , filename . getPath ( ) ) ; throw new DocletAbortException ( exc ) ; } |
public class InconsistentProperty { /** * < code > map & lt ; string , . alluxio . grpc . meta . InconsistentPropertyValues & gt ; values = 2 ; < / code > */
public boolean containsValues ( java . lang . String key ) { } } | if ( key == null ) { throw new java . lang . NullPointerException ( ) ; } return internalGetValues ( ) . getMap ( ) . containsKey ( key ) ; |
public class UniversalIdStrSubscriber { /** * { @ inheritDoc } */
@ Override public boolean onMessage ( String channel , IMessage < String , byte [ ] > msg ) { } } | if ( msg instanceof UniversalIdStrMessage ) { return onMessage ( channel , ( UniversalIdStrMessage ) msg ) ; } throw new IllegalArgumentException ( "This subscriber expects message of type [" + UniversalIdStrMessage . class . getName ( ) + "]!" ) ; |
public class TypeMapStore { /** * Creates a TypeMap . If { @ code converter } is null , the TypeMap is configured with implicit
* mappings , else the { @ code converter } is set against the TypeMap . */
public < S , D > TypeMap < S , D > create ( S source , Class < S > sourceType , Class < D > destinationType , String typeMapName , InheritingConfiguration configuration , MappingEngineImpl engine ) { } } | synchronized ( lock ) { TypeMapImpl < S , D > typeMap = new TypeMapImpl < S , D > ( sourceType , destinationType , typeMapName , configuration , engine ) ; if ( configuration . isImplicitMappingEnabled ( ) && Types . mightContainsProperties ( typeMap . getSourceType ( ) ) && Types . mightContainsProperties ( typeMap . getDestinationType ( ) ) ) ImplicitMappingBuilder . build ( source , typeMap , config . typeMapStore , config . converterStore ) ; typeMaps . put ( TypePair . of ( sourceType , destinationType , typeMapName ) , typeMap ) ; return typeMap ; } |
public class Hash { /** * a minimal perfect hash function for a 32 byte input
* @ param input
* @ return */
public static byte [ ] shuffle ( byte [ ] input ) { } } | for ( int i = 0 ; i < input . length ; i ++ ) { int i2 = input [ i ] ; if ( i2 < 0 ) i2 = 127 + Math . abs ( i2 ) ; input [ i ] = shuffle [ i2 ] ; // result of more shuffles could just be mapped to the first
// i2 = input [ i ] ;
// if ( i2 < 0)
// i2 = 127 + Math . abs ( i2 ) ;
// input [ i ] = ( byte ) shuffle2 [ i2 ] ;
// i2 = input [ i ] ;
// if ( i2 < 0)
// i2 = 127 + Math . abs ( i2 ) ;
// input [ i ] = ( byte ) shuffle3 [ i2 ] ;
} return input ; |
public class FileTransferManager { /** * When the file transfer request is acceptable , this method should be
* invoked . It will create an IncomingFileTransfer which allows the
* transmission of the file to proceed .
* @ param request
* The remote request that is being accepted .
* @ return The IncomingFileTransfer which manages the download of the file
* from the transfer initiator . */
protected IncomingFileTransfer createIncomingFileTransfer ( FileTransferRequest request ) { } } | if ( request == null ) { throw new NullPointerException ( "ReceiveRequest cannot be null" ) ; } IncomingFileTransfer transfer = new IncomingFileTransfer ( request , fileTransferNegotiator ) ; transfer . setFileInfo ( request . getFileName ( ) , request . getFileSize ( ) ) ; return transfer ; |
public class CssScanner { /** * SUFFIXMATCH $ = */
private void _suffixmatch ( ) throws IOException { } } | if ( debug ) { checkState ( reader . curChar == '$' ) ; } builder . type = Type . SUFFIXMATCH ; builder . append ( "$=" ) ; reader . next ( ) ; if ( debug ) { checkState ( reader . curChar == '=' ) ; } |
public class MurmurHash { /** * Process an { @ code integer } value .
* @ param input 32 - bit input value
* @ return this */
public MurmurHash hash ( int input ) { } } | count ++ ; input *= 0xcc9e2d51 ; input = Integer . rotateLeft ( input , 15 ) ; input *= 0x1b873593 ; hash ^= input ; hash = Integer . rotateLeft ( hash , 13 ) ; hash = hash * 5 + 0xe6546b64 ; return this ; |
public class VirtualNetworkGatewaysInner { /** * Gets all virtual network gateways by resource group .
* @ param resourceGroupName The name of the resource group .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the PagedList & lt ; VirtualNetworkGatewayInner & gt ; object */
public Observable < Page < VirtualNetworkGatewayInner > > listByResourceGroupAsync ( final String resourceGroupName ) { } } | return listByResourceGroupWithServiceResponseAsync ( resourceGroupName ) . map ( new Func1 < ServiceResponse < Page < VirtualNetworkGatewayInner > > , Page < VirtualNetworkGatewayInner > > ( ) { @ Override public Page < VirtualNetworkGatewayInner > call ( ServiceResponse < Page < VirtualNetworkGatewayInner > > response ) { return response . body ( ) ; } } ) ; |
public class ObjectSpace { /** * Returns a proxy object that implements the specified interfaces . Methods invoked on the proxy object will be invoked
* remotely on the object with the specified ID in the ObjectSpace for the specified connection . If the remote end of the
* connection has not { @ link # addConnection ( Connection ) added } the connection to the ObjectSpace , the remote method invocations
* will be ignored .
* Methods that return a value will throw { @ link TimeoutException } if the response is not received with the
* { @ link RemoteObject # setResponseTimeout ( int ) response timeout } .
* If { @ link RemoteObject # setNonBlocking ( boolean ) non - blocking } is false ( the default ) , then methods that return a value must
* not be called from the update thread for the connection . An exception will be thrown if this occurs . Methods with a void
* return value can be called on the update thread .
* If a proxy returned from this method is part of an object graph sent over the network , the object graph on the receiving
* side will have the proxy object replaced with the registered object .
* @ see RemoteObject */
static public RemoteObject getRemoteObject ( Connection connection , int objectID , Class ... ifaces ) { } } | if ( connection == null ) throw new IllegalArgumentException ( "connection cannot be null." ) ; if ( ifaces == null ) throw new IllegalArgumentException ( "ifaces cannot be null." ) ; Class [ ] temp = new Class [ ifaces . length + 1 ] ; temp [ 0 ] = RemoteObject . class ; System . arraycopy ( ifaces , 0 , temp , 1 , ifaces . length ) ; return ( RemoteObject ) Proxy . newProxyInstance ( ObjectSpace . class . getClassLoader ( ) , temp , new RemoteInvocationHandler ( connection , objectID ) ) ; |
public class DoubleIntIndex { /** * Returns the index of the lowest element > the given search target
* @ return the index */
private int binaryGreaterSearch ( ) { } } | int low = 0 ; int high = count ; int mid = 0 ; int compare = 0 ; while ( low < high ) { mid = ( low + high ) / 2 ; compare = compare ( mid ) ; if ( compare < 0 ) { high = mid ; } else { low = mid + 1 ; } } return low == count ? - 1 : low ; |
public class SDBaseOps { /** * Element - wise scalar maximum operation : out = max ( in , value )
* @ param in Input variable
* @ param value Scalar value to compare
* @ return Output variable */
public SDVariable scalarMax ( SDVariable in , Number value ) { } } | return scalarMax ( null , in , value ) ; |
public class CommonConfigUtils { /** * Returns the value for the configuration attribute matching the key provided . If the value does not exist or is empty , the
* provided default value will be returned . */
public boolean getBooleanConfigAttribute ( Map < String , Object > props , String key , boolean defaultValue ) { } } | if ( props . containsKey ( key ) ) { return ( Boolean ) props . get ( key ) ; } return defaultValue ; |
public class APIClient { /** * Update an api key
* @ param acls the list of ACL for this key . Defined by an array of strings that
* can contains the following values :
* - search : allow to search ( https and http )
* - addObject : allows to add / update an object in the index ( https only )
* - deleteObject : allows to delete an existing object ( https only )
* - deleteIndex : allows to delete index content ( https only )
* - settings : allows to get index settings ( https only )
* - editSettings : allows to change index settings ( https only )
* @ param validity the number of seconds after which the key will be automatically removed ( 0 means no time limit for this key )
* @ param maxQueriesPerIPPerHour Specify the maximum number of API calls allowed from an IP address per hour . Defaults to 0 ( no rate limit ) .
* @ param maxHitsPerQuery Specify the maximum number of hits this API key can retrieve in one call . Defaults to 0 ( unlimited )
* @ param indexes the list of targeted indexes */
public JSONObject updateApiKey ( String key , List < String > acls , int validity , int maxQueriesPerIPPerHour , int maxHitsPerQuery , List < String > indexes ) throws AlgoliaException { } } | JSONObject jsonObject = generateUserKeyJson ( acls , validity , maxQueriesPerIPPerHour , maxHitsPerQuery , indexes ) ; return updateApiKey ( key , jsonObject ) ; |
public class SerializerIntrinsics { /** * ! @ note One of dst or src must be st ( 0 ) . */
public final void fadd ( X87Register dst , X87Register src ) { } } | assert dst . index ( ) == 0 || src . index ( ) == 0 ; emitX86 ( INST_FADD , dst , src ) ; |
public class StoreAsBinaryConfigurationBuilder { /** * Enables storing both keys and values as binary . */
public StoreAsBinaryConfigurationBuilder enable ( ) { } } | attributes . attribute ( ENABLED ) . set ( true ) ; getBuilder ( ) . memory ( ) . storageType ( StorageType . BINARY ) ; return this ; |
public class ExpirationManagerImpl { /** * used only for testing */
void initialize ( ScheduledExecutorService executor , String cacheName , Configuration cfg ) { } } | this . executor = executor ; this . configuration = cfg ; this . cacheName = cacheName ; |
public class ArdorMeshMapper { /** * For now very primitive !
* Assumes a single texture and that the triangles form a xy - monotone surface
* A continuous surface S in R3 is called xy - monotone , if every line parallel
* to the z - axis intersects it at a single point at most .
* @ param mesh
* @ param scale */
public static void updateTextureCoordinates ( Mesh mesh , List < DelaunayTriangle > triangles , double scale , double angle ) { } } | TriangulationPoint vertex ; FloatBuffer tcBuf ; float width , maxX , minX , maxY , minY , x , y , xn , yn ; maxX = Float . NEGATIVE_INFINITY ; minX = Float . POSITIVE_INFINITY ; maxY = Float . NEGATIVE_INFINITY ; minY = Float . POSITIVE_INFINITY ; for ( DelaunayTriangle t : triangles ) { for ( int i = 0 ; i < 3 ; i ++ ) { vertex = t . points [ i ] ; x = vertex . getXf ( ) ; y = vertex . getYf ( ) ; maxX = x > maxX ? x : maxX ; minX = x < minX ? x : minX ; maxY = y > maxY ? y : maxY ; minY = y < minY ? x : minY ; } } width = maxX - minX > maxY - minY ? maxX - minX : maxY - minY ; width *= scale ; tcBuf = prepareTextureCoordinateBuffer ( mesh , 0 , 2 * 3 * triangles . size ( ) ) ; tcBuf . rewind ( ) ; for ( DelaunayTriangle t : triangles ) { for ( int i = 0 ; i < 3 ; i ++ ) { vertex = t . points [ i ] ; x = vertex . getXf ( ) - ( maxX - minX ) / 2 ; y = vertex . getYf ( ) - ( maxY - minY ) / 2 ; xn = ( float ) ( x * Math . cos ( angle ) - y * Math . sin ( angle ) ) ; yn = ( float ) ( y * Math . cos ( angle ) + x * Math . sin ( angle ) ) ; tcBuf . put ( xn / width ) ; tcBuf . put ( yn / width ) ; } } |
public class CPadawan { /** * return the filters of type ' type '
* @ param type type of filters to get
* @ return the filters of type ' type ' */
private final List getFilter ( String type ) { } } | try { mutex . acquire ( ) ; return ( List ) mapFilter . get ( type + ".filter" ) ; } finally { try { mutex . release ( ) ; } catch ( Throwable ignore ) { } } |
public class MtasRBTree { /** * Checks if is red .
* @ param n the n
* @ return true , if is red */
private boolean isRed ( MtasRBTreeNode n ) { } } | if ( n == null ) { return false ; } return n . color == MtasRBTreeNode . RED ; |
public class AuxiliaryStorageClientRest { /** * This method is supposed to be used as exception mapper
* from < code > WebApplicationException < / code > , sent in REST response ,
* to < code > AuxiliaryStorageException < / code > .
* @ param exception Exception to convert from . */
private void handleWebException ( WebApplicationException exception ) { } } | Response response = exception . getResponse ( ) ; if ( response == null ) { throw new AuxiliaryStorageException ( "Mapping exception error: response is null" ) ; } int responseStatus = response . getStatus ( ) ; if ( Status . BAD_REQUEST . getStatusCode ( ) == responseStatus ) { throw new IllegalParameterException ( "Bad request server error" ) ; } else if ( Status . NOT_FOUND . getStatusCode ( ) == responseStatus ) { throw new ObjectNotFoundException ( "Object not found in auxiliary storage" ) ; } else if ( Status . CONFLICT . getStatusCode ( ) == responseStatus ) { throw new ObjectAlreadyExistsException ( "Object already exists in auxiliary storage" ) ; } else if ( Status . INTERNAL_SERVER_ERROR . getStatusCode ( ) == responseStatus ) { throw new AuxiliaryStorageException ( "Internal server error" ) ; } else { throw new AuxiliaryStorageException ( "Unknown server error" ) ; } |
public class GetDedicatedIpsResult { /** * A list of dedicated IP addresses that are reserved for use by your Amazon Pinpoint account .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setDedicatedIps ( java . util . Collection ) } or { @ link # withDedicatedIps ( java . util . Collection ) } if you want to
* override the existing values .
* @ param dedicatedIps
* A list of dedicated IP addresses that are reserved for use by your Amazon Pinpoint account .
* @ return Returns a reference to this object so that method calls can be chained together . */
public GetDedicatedIpsResult withDedicatedIps ( DedicatedIp ... dedicatedIps ) { } } | if ( this . dedicatedIps == null ) { setDedicatedIps ( new java . util . ArrayList < DedicatedIp > ( dedicatedIps . length ) ) ; } for ( DedicatedIp ele : dedicatedIps ) { this . dedicatedIps . add ( ele ) ; } return this ; |
public class StreamUtils { /** * Copy the contents of the given String to the given output OutputStream .
* Leaves the stream open when done .
* @ param in the String to copy from
* @ param charset the Charset
* @ param out the OutputStream to copy to
* @ throws IOException in case of I / O errors */
public static void copy ( String in , Charset charset , OutputStream out ) throws IOException { } } | Writer writer = new OutputStreamWriter ( out , charset ) ; writer . write ( in ) ; writer . flush ( ) ; |
public class Enhancements { /** * Returns the { @ link Collection } of extracted categories ( topics ) for the analyzed content
* @ return */
@ SuppressWarnings ( "unchecked" ) public Collection < TopicAnnotation > getCategories ( ) { } } | Collection < ? extends Enhancement > result = enhancements . get ( TopicAnnotation . class ) ; return ( Collection < TopicAnnotation > ) result ; // Should be safe . Needs to be tested |
public class CreateApplicationVersionResult { /** * An array of parameter types supported by the application .
* @ param parameterDefinitions
* An array of parameter types supported by the application . */
public void setParameterDefinitions ( java . util . Collection < ParameterDefinition > parameterDefinitions ) { } } | if ( parameterDefinitions == null ) { this . parameterDefinitions = null ; return ; } this . parameterDefinitions = new java . util . ArrayList < ParameterDefinition > ( parameterDefinitions ) ; |
public class SocketInputStream { /** * / * This method will not return until len bytes have been read
* or the stream has been closed . */
public synchronized int read ( byte [ ] b , int off , int len ) throws IOException { } } | if ( len == 0 ) { return 0 ; } tot = 0 ; while ( true ) { while ( bip > 0 ) { n = in . read ( b , off , Math . min ( len , bip ) ) ; if ( n == - 1 ) { return tot > 0 ? tot : - 1 ; } tot += n ; off += n ; len -= n ; bip -= n ; if ( len == 0 ) { return tot ; } } switch ( SessionServicePacket . readPacketType ( in , header , 0 ) ) { case SessionServicePacket . SESSION_KEEP_ALIVE : break ; case SessionServicePacket . SESSION_MESSAGE : bip = SessionServicePacket . readLength ( header , 0 ) ; break ; case - 1 : if ( tot > 0 ) { return tot ; } return - 1 ; } } |
public class SAMLConfigurerBean { /** * Returns a request { @ link RequestMatcher } that matches all the SAML endpoints configured by the user :
* defaultFailureURL , ssoProcessingURL , ssoHoKProcessingURL , discoveryProcessingURL , idpSelectionPageURL ,
* ssoLoginURL , metadataURL , defaultTargetURL , logoutURL and singleLogoutURL .
* To be used with { @ link HttpSecurity # authorizeRequests ( ) } in this fashion :
* < pre >
* http
* . authorizeRequests ( )
* . requestMatchers ( samlConfigurerBean . endpointsMatcher ( ) )
* . permitAll ( )
* < / pre >
* So that all the configured URLs can bypass security .
* @ return the { @ link RequestMatcher } */
public RequestMatcher endpointsMatcher ( ) { } } | ServiceProviderEndpoints endpoints = Optional . of ( serviceProviderBuilder ) . map ( builder -> builder . getSharedObject ( ServiceProviderEndpoints . class ) ) . orElseThrow ( ( ) -> new IllegalStateException ( "Can't find SAML Endpoints" ) ) ; return new LazyEndpointsRequestMatcher ( endpoints ) ; |
public class StepExecutionSupport { /** * ( non - Javadoc )
* @ see org . springframework . batch . core . StepExecutionListener # afterStep ( org .
* springframework . batch . core . StepExecution ) */
@ Override public ExitStatus afterStep ( StepExecution stepExecution ) { } } | ExitStatus status = stepExecution . getExitStatus ( ) ; return status ; |
public class ScriptController { /** * Delete a script
* @ param model
* @ param scriptIdentifier
* @ return
* @ throws Exception */
@ RequestMapping ( value = "/api/scripts/{scriptIdentifier}" , method = RequestMethod . DELETE ) public @ ResponseBody HashMap < String , Object > deleteScript ( Model model , @ PathVariable String scriptIdentifier ) throws Exception { } } | int scriptId = Integer . parseInt ( scriptIdentifier ) ; ScriptService . getInstance ( ) . removeScript ( scriptId ) ; return this . getScripts ( model , null ) ; |
public class MetricsUtils { /** * Send the given document to the given Elasticsearch URL / Index
* Authentication can be provided via the configured { @ link HttpClient } instance .
* @ param json The json - string to store as document
* @ param httpClient The HTTP Client that can be used to send metrics .
* This can also contain credentials for basic authentication if necessary
* @ param url The base URL where Elasticsearch is available .
* @ throws IOException If the HTTP call fails with an HTTP status code . */
public static void sendDocument ( String json , CloseableHttpClient httpClient , String url ) throws IOException { } } | final HttpPut httpPut = new HttpPut ( url ) ; httpPut . addHeader ( "Content-Type" , NanoHTTPD . MIME_JSON ) ; httpPut . setEntity ( new StringEntity ( json , ContentType . APPLICATION_JSON ) ) ; try ( CloseableHttpResponse response = httpClient . execute ( httpPut ) ) { HttpEntity entity = HttpClientWrapper . checkAndFetch ( response , url ) ; try { log . info ( "Had result when sending document to Elasticsearch at " + url + ": " + IOUtils . toString ( entity . getContent ( ) , "UTF-8" ) ) ; } finally { // ensure all content is taken out to free resources
EntityUtils . consume ( entity ) ; } } |
public class MotorcycleFlagEncoder { /** * Define the place of the speedBits in the edge flags for car . */
@ Override public void createEncodedValues ( List < EncodedValue > registerNewEncodedValue , String prefix , int index ) { } } | // first two bits are reserved for route handling in superclass
super . createEncodedValues ( registerNewEncodedValue , prefix , index ) ; registerNewEncodedValue . add ( priorityWayEncoder = new FactorizedDecimalEncodedValue ( prefix + "priority" , 3 , PriorityCode . getFactor ( 1 ) , false ) ) ; registerNewEncodedValue . add ( curvatureEncoder = new FactorizedDecimalEncodedValue ( prefix + "curvature" , 4 , 0.1 , false ) ) ; |
public class Util { /** * Gets the password from an user info string obtained from the starting URL .
* @ param userInfo
* userInfo , taken from the URL . If no user info specified returning
* null . If user info not null but no password after " : " then
* returning empty , ( we have a user so the default pass for him is
* empty string ) . See bug 6086.
* @ return The password . */
private static String extractPassword ( String userInfo ) { } } | if ( userInfo == null ) { return null ; } String password = "" ; int index = userInfo . lastIndexOf ( ':' ) ; if ( index != - 1 && index < userInfo . length ( ) - 1 ) { // Extract password from the URL .
password = userInfo . substring ( index + 1 ) ; } return password ; |
public class ViewUtils { /** * This intro hides the system bars . */
@ TargetApi ( VERSION_CODES . KITKAT ) public static void hideSystemUI ( Activity activity ) { } } | // Set the IMMERSIVE flag .
// Set the content to appear under the system bars so that the content
// doesn ' t resize when the system bars hideSelf and show .
View decorView = activity . getWindow ( ) . getDecorView ( ) ; decorView . setSystemUiVisibility ( View . SYSTEM_UI_FLAG_LAYOUT_STABLE | View . SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION | View . SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN | View . SYSTEM_UI_FLAG_HIDE_NAVIGATION // hideSelf nav bar
| View . SYSTEM_UI_FLAG_FULLSCREEN // hideSelf status bar
| View . SYSTEM_UI_FLAG_IMMERSIVE ) ; |
public class JEPLResultSetDAOImpl { /** * Desde aquí métodos implementación de List */
@ Override public int size ( ) { } } | if ( ! constructedObject ) { // Se ha llamado a size ( ) antes de que se construta el objeto , esto es cosa del modo
// debug en NetBeans que en el caso de un List muestra el size ( ) de la colección
// en el campo " Value of Variable "
// Hay que tener en cuenta que pasamos por aquí antes de pasar por el constructor
// y de definir los valores iniciales de los atributos ( si hubiera que NO deben tener para no sobreescribir detectedDebugMode )
this . detectedDebugMode = true ; } boolean closed = isClosed ( ) ; if ( detectedDebugMode && ! closed ) { JEPLException ex = new JEPLException ( "size() method cannot be called in debug mode and ResultSet not closed" ) ; ex . printStackTrace ( ) ; throw ex ; } fetchToTheEndIfNotClosed ( ) ; return internalList . size ( ) ; |
public class WSClientConfig { /** * Set the { @ link SSLSocketFactory } to be used by this client to one that
* trusts all servers .
* @ param bDebugMode
* < code > true < / code > for extended debug logging , < code > false < / code > for
* production .
* @ throws KeyManagementException
* if initializing the SSL context failed
* @ return this for chaining
* @ since 9.1.5 */
@ Nonnull public final WSClientConfig setSSLSocketFactoryTrustAll ( final boolean bDebugMode ) throws KeyManagementException { } } | try { final SSLContext aSSLContext = SSLContext . getInstance ( "TLSv1.2" ) ; aSSLContext . init ( null , new TrustManager [ ] { new TrustManagerTrustAll ( bDebugMode ) } , RandomHelper . getSecureRandom ( ) ) ; final SSLSocketFactory aSF = aSSLContext . getSocketFactory ( ) ; return setSSLSocketFactory ( aSF ) ; } catch ( final NoSuchAlgorithmException ex ) { throw new IllegalStateException ( "TLS 1.2 is not supported" , ex ) ; } |
public class DescribeReservedInstancesListingsRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < DescribeReservedInstancesListingsRequest > getDryRunRequest ( ) { } } | Request < DescribeReservedInstancesListingsRequest > request = new DescribeReservedInstancesListingsRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.