signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class WebTarget { /** * Resolves the URI template parameters of the current target instance using supplied name - value pairs and start
* building a request to the targeted web resource .
* @ param templateValues a map of URI template names and their values .
* @ return builder for a request targeted at the URI referenced by the resolved target instance . */
public RequestInvoker resolve ( Map < String , ? > templateValues ) { } } | final Uri resolvedUri ; try { resolvedUri = uriBuilder . build ( templateValues ) ; } catch ( Exception e ) { throw new IllegalArgumentException ( "Could not build the URI with the supplied template values." , e ) ; } return createRequest ( resolvedUri . toString ( ) ) ; |
public class Request { /** * 初始化Request对象
* @ param req 请求对象 */
protected final static void init ( HttpServletRequest req ) { } } | // - - 字符集的过滤
String charset = HuluSetting . charset ; try { req . setCharacterEncoding ( charset ) ; } catch ( Exception e ) { log . warn ( "Charset [{}] not support!" , charset ) ; } servletRequestLocal . set ( req ) ; if ( isMultipart ( ) ) { try { multipartFormDataLocal . set ( parseMultipart ( ) ) ; } catch ( IOException e ) { throw new ActionRuntimeException ( e ) ; } } |
public class ApiOvhDomain { /** * Add whois obfuscators
* REST : POST / domain / { serviceName } / owo
* @ param fields [ required ] Fields to obfuscate
* @ param serviceName [ required ] The internal name of your domain */
public ArrayList < OvhWhoisObfuscatorFieldsEnum > serviceName_owo_POST ( String serviceName , OvhWhoisObfuscatorFieldsEnum [ ] fields ) throws IOException { } } | String qPath = "/domain/{serviceName}/owo" ; StringBuilder sb = path ( qPath , serviceName ) ; HashMap < String , Object > o = new HashMap < String , Object > ( ) ; addBody ( o , "fields" , fields ) ; String resp = exec ( qPath , "POST" , sb . toString ( ) , o ) ; return convertTo ( resp , t7 ) ; |
public class OpenShiftManagedClustersInner { /** * Updates tags on an OpenShift managed cluster .
* Updates an OpenShift managed cluster with the specified tags .
* @ param resourceGroupName The name of the resource group .
* @ param resourceName The name of the OpenShift managed cluster resource .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable for the request */
public Observable < OpenShiftManagedClusterInner > updateTagsAsync ( String resourceGroupName , String resourceName ) { } } | return updateTagsWithServiceResponseAsync ( resourceGroupName , resourceName ) . map ( new Func1 < ServiceResponse < OpenShiftManagedClusterInner > , OpenShiftManagedClusterInner > ( ) { @ Override public OpenShiftManagedClusterInner call ( ServiceResponse < OpenShiftManagedClusterInner > response ) { return response . body ( ) ; } } ) ; |
public class StringGrabber { /** * Insert string into the first
* @ param str
* @ return */
public StringGrabber insertIntoHead ( String str ) { } } | if ( str != null ) { try { sb . insert ( 0 , str ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; } } return StringGrabber . this ; |
public class Options { /** * Gets the Javadoc version .
* @ return The Javadoc version . */
public JavadocQuirks getJavadocVersion ( ) { } } | if ( javadocVersion == null ) { String javaVersion = System . getProperty ( "java.version" ) ; if ( javaVersion != null && javaVersion . compareTo ( "1.8" ) >= 0 ) { return JavadocQuirks . V8 ; } else { return JavadocQuirks . V7 ; } } return javadocVersion ; |
public class AbstractFilterRegistrationBean { /** * Set the URL patterns that the filter will be registered against . This will replace
* any previously specified URL patterns .
* @ param urlPatterns the URL patterns
* @ see # setServletRegistrationBeans
* @ see # setServletNames */
public void setUrlPatterns ( Collection < String > urlPatterns ) { } } | Assert . notNull ( urlPatterns , "UrlPatterns must not be null" ) ; this . urlPatterns = new LinkedHashSet < > ( urlPatterns ) ; |
public class WeakValueHashMap { /** * get ( ) . */
public final Object put ( Object key , Object value ) { } } | clearUnreferencedEntries ( ) ; WeakEntry weakEntry = ( WeakEntry ) super . put ( key , new WeakEntry ( key , value ) ) ; Object existingValue = null ; if ( weakEntry != null ) { existingValue = weakEntry . get ( ) ; weakEntry . clear ( ) ; } return existingValue ; |
public class ElemLiteralResult { /** * Set the " xml : space " attribute .
* A text node is preserved if an ancestor element of the text node
* has an xml : space attribute with a value of preserve , and
* no closer ancestor element has xml : space with a value of default .
* @ see < a href = " http : / / www . w3 . org / TR / xslt # strip " > strip in XSLT Specification < / a >
* @ see < a href = " http : / / www . w3 . org / TR / xslt # section - Creating - Text " > section - Creating - Text in XSLT Specification < / a >
* @ param avt Enumerated value , either Constants . ATTRVAL _ PRESERVE
* or Constants . ATTRVAL _ STRIP . */
public void setXmlSpace ( AVT avt ) { } } | // This function is a bit - o - hack , I guess . . .
addLiteralResultAttribute ( avt ) ; String val = avt . getSimpleString ( ) ; if ( val . equals ( "default" ) ) { super . setXmlSpace ( Constants . ATTRVAL_STRIP ) ; } else if ( val . equals ( "preserve" ) ) { super . setXmlSpace ( Constants . ATTRVAL_PRESERVE ) ; } // else maybe it ' s a real AVT , so we can ' t resolve it at this time . |
public class FastCornerDetector { /** * Computes fast corner features and their intensity . The intensity is needed if non - max suppression is
* used */
public void process ( T image , GrayF32 intensity ) { } } | int maxFeatures = ( int ) ( maxFeaturesFraction * image . width * image . height ) ; candidatesLow . reset ( ) ; candidatesHigh . reset ( ) ; this . image = image ; if ( stride != image . stride ) { stride = image . stride ; offsets = DiscretizedCircle . imageOffsets ( radius , image . stride ) ; } helper . setImage ( image , offsets ) ; for ( int y = radius ; y < image . height - radius ; y ++ ) { int indexIntensity = intensity . startIndex + y * intensity . stride + radius ; int index = image . startIndex + y * image . stride + radius ; for ( int x = radius ; x < image . width - radius ; x ++ , index ++ , indexIntensity ++ ) { int result = helper . checkPixel ( index ) ; if ( result < 0 ) { intensity . data [ indexIntensity ] = helper . scoreLower ( index ) ; candidatesLow . add ( x , y ) ; } else if ( result > 0 ) { intensity . data [ indexIntensity ] = helper . scoreUpper ( index ) ; candidatesHigh . add ( x , y ) ; } else { intensity . data [ indexIntensity ] = 0 ; } } // check on a per row basis to reduce impact on performance
if ( candidatesLow . size + candidatesHigh . size >= maxFeatures ) break ; } |
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link Boolean } { @ code > } */
@ XmlElementDecl ( namespace = "http://docs.oasis-open.org/ns/cmis/messaging/200908/" , name = "includeACL" , scope = GetContentChanges . class ) public JAXBElement < Boolean > createGetContentChangesIncludeACL ( Boolean value ) { } } | return new JAXBElement < Boolean > ( _GetObjectOfLatestVersionIncludeACL_QNAME , Boolean . class , GetContentChanges . class , value ) ; |
public class UfsJournalSnapshot { /** * Gets the current log ( the incomplete log ) that is being written to .
* @ param journal the journal
* @ return the current log */
@ VisibleForTesting public static UfsJournalFile getCurrentLog ( UfsJournal journal ) throws IOException { } } | List < UfsJournalFile > logs = new ArrayList < > ( ) ; UfsStatus [ ] statuses = journal . getUfs ( ) . listStatus ( journal . getLogDir ( ) . toString ( ) ) ; if ( statuses != null ) { for ( UfsStatus status : statuses ) { UfsJournalFile file = UfsJournalFile . decodeLogFile ( journal , status . getName ( ) ) ; if ( file != null ) { logs . add ( file ) ; } } if ( ! logs . isEmpty ( ) ) { UfsJournalFile file = Collections . max ( logs ) ; if ( file . isIncompleteLog ( ) ) { return file ; } } } return null ; |
public class DeviceProxy { public DeviceAttribute [ ] read_attribute_reply ( int id , int timeout ) throws DevFailed , AsynReplyNotArrived { } } | return deviceProxyDAO . read_attribute_reply ( this , id , timeout ) ; |
public class DatabasesInner { /** * Imports a bacpac into a new database .
* @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal .
* @ param serverName The name of the server .
* @ param parameters The required parameters for importing a Bacpac into a database .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the ImportExportResponseInner object if successful . */
public ImportExportResponseInner beginImportMethod ( String resourceGroupName , String serverName , ImportRequest parameters ) { } } | return beginImportMethodWithServiceResponseAsync ( resourceGroupName , serverName , parameters ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class LauncherUtils { /** * Delete a directory and all subdirectories */
public static void recursivelyDeleteDir ( File customProfileDir ) { } } | if ( customProfileDir == null || ! customProfileDir . exists ( ) ) { return ; } Delete delete = new Delete ( ) ; delete . setProject ( new Project ( ) ) ; delete . setDir ( customProfileDir ) ; delete . setFailOnError ( true ) ; delete . execute ( ) ; |
public class ListeFilme { /** * Erstellt ein StringArray der Themen eines Senders oder wenn " sender " leer , aller Sender .
* Ist für die Filterfelder in GuiFilme . */
@ SuppressWarnings ( "unchecked" ) public synchronized void themenLaden ( ) { } } | Duration . counterStart ( THEME_SEARCH_TEXT ) ; LinkedHashSet < String > senderSet = new LinkedHashSet < > ( 21 ) ; // der erste Sender ist " "
senderSet . add ( "" ) ; for ( DatenFilm film : this ) { senderSet . add ( film . arr [ DatenFilm . FILM_SENDER ] ) ; } sender = senderSet . toArray ( new String [ senderSet . size ( ) ] ) ; senderSet . clear ( ) ; // für den Sender " " sind alle Themen im themenPerSender [ 0]
final int senderLength = sender . length ; themenPerSender = new String [ senderLength ] [ ] ; TreeSet < String > [ ] tree = ( TreeSet < String > [ ] ) new TreeSet < ? > [ senderLength ] ; HashSet < String > [ ] hashSet = ( HashSet < String > [ ] ) new HashSet < ? > [ senderLength ] ; for ( int i = 0 ; i < tree . length ; ++ i ) { tree [ i ] = new TreeSet < > ( de . mediathekview . mlib . tool . GermanStringSorter . getInstance ( ) ) ; tree [ i ] . add ( "" ) ; hashSet [ i ] = new HashSet < > ( ) ; } // alle Themen
String filmThema , filmSender ; for ( DatenFilm film : this ) { filmSender = film . arr [ DatenFilm . FILM_SENDER ] ; filmThema = film . arr [ DatenFilm . FILM_THEMA ] ; // hinzufügen
if ( ! hashSet [ 0 ] . contains ( filmThema ) ) { hashSet [ 0 ] . add ( filmThema ) ; tree [ 0 ] . add ( filmThema ) ; } for ( int i = 1 ; i < senderLength ; ++ i ) { if ( filmSender . equals ( sender [ i ] ) ) { if ( ! hashSet [ i ] . contains ( filmThema ) ) { hashSet [ i ] . add ( filmThema ) ; tree [ i ] . add ( filmThema ) ; } } } } for ( int i = 0 ; i < themenPerSender . length ; ++ i ) { themenPerSender [ i ] = tree [ i ] . toArray ( new String [ tree [ i ] . size ( ) ] ) ; tree [ i ] . clear ( ) ; hashSet [ i ] . clear ( ) ; } Duration . counterStop ( THEME_SEARCH_TEXT ) ; |
public class KeyRange { /** * Create a { @ link KeyRangeType # BACKWARD _ CLOSED _ OPEN } range .
* @ param < T > buffer type
* @ param start start key ( required )
* @ param stop stop key ( required )
* @ return a key range ( never null ) */
public static < T > KeyRange < T > closedOpenBackward ( final T start , final T stop ) { } } | return new KeyRange < > ( KeyRangeType . BACKWARD_CLOSED_OPEN , start , stop ) ; |
public class LifecycleApproveChaincodeDefinitionForMyOrgRequest { /** * The name of the chaincode to approve .
* @ param chaincodeName */
public void setChaincodeName ( String chaincodeName ) throws InvalidArgumentException { } } | if ( Utils . isNullOrEmpty ( chaincodeName ) ) { throw new InvalidArgumentException ( "The chaincodeName parameter can not be null or empty." ) ; } this . chaincodeName = chaincodeName ; |
public class MultiDimensionalMap { /** * Associates the specified value with the specified key in this map
* ( optional operation ) . If the map previously contained a mapping for
* the key , the old value is replaced by the specified value . ( A map
* < tt > m < / tt > is said to contain a mapping for a key < tt > k < / tt > if and only
* if { @ link # containsKey ( Object ) m . containsKey ( k ) } would return
* < tt > true < / tt > . )
* @ param key key with which the specified value is to be associated
* @ param value value to be associated with the specified key
* @ return the previous value associated with < tt > key < / tt > , or
* < tt > null < / tt > if there was no mapping for < tt > key < / tt > .
* ( A < tt > null < / tt > return can also indicate that the map
* previously associated < tt > null < / tt > with < tt > key < / tt > ,
* if the implementation supports < tt > null < / tt > values . )
* @ throws UnsupportedOperationException if the < tt > put < / tt > operation
* is not supported by this map
* @ throws ClassCastException if the class of the specified key or value
* prevents it from being stored in this map
* @ throws NullPointerException if the specified key or value is null
* and this map does not permit null keys or values
* @ throws IllegalArgumentException if some property of the specified key
* or value prevents it from being stored in this map */
public V put ( Pair < K , T > key , V value ) { } } | return backedMap . put ( key , value ) ; |
public class JsonRpcClientHandler { /** * Returns a new provisional response suited to receive results of a given
* protobuf message type .
* @ param method the method invocation for which a response should be created
* @ param < O > the type of the message this response will handle */
< O extends Message > JsonResponseFuture < O > newProvisionalResponse ( ClientMethod < O > method ) { } } | long requestId = RANDOM . nextLong ( ) ; JsonResponseFuture < O > outputFuture = new JsonResponseFuture < > ( requestId , method ) ; inFlightRequests . put ( requestId , outputFuture ) ; return outputFuture ; |
public class AbstractResultSetWrapper { /** * { @ inheritDoc }
* @ see java . sql . ResultSet # updateRef ( int , java . sql . Ref ) */
@ Override public void updateRef ( final int columnIndex , final Ref x ) throws SQLException { } } | wrapped . updateRef ( columnIndex , x ) ; |
public class Manager { /** * Asynchronously dispatches a callback to run on a background thread . The callback will be passed
* Database instance . There is not currently a known reason to use it , it may not make
* sense on the Android API , but it was added for the purpose of having a consistent API with iOS .
* @ exclude */
@ InterfaceAudience . Private public Future runAsync ( String databaseName , final AsyncTask function ) throws CouchbaseLiteException { } } | final Database database = getDatabase ( databaseName ) ; return runAsync ( new Runnable ( ) { @ Override public void run ( ) { function . run ( database ) ; } } ) ; |
public class XClass { /** * Checks if all the passed members belong to the given target class . */
private void checkMembers ( XMember < ? > [ ] members ) { } } | for ( int index = 0 ; index < members . length ; index ++ ) { final XMember < ? > member = members [ index ] ; if ( ! member . getMember ( ) . getDeclaringClass ( ) . isAssignableFrom ( getTargetClass ( ) ) ) { throw new IllegalArgumentException ( "Member [" + member + "] does not belong to the target class [" + targetClass + "]." ) ; } } |
public class CommerceWarehouseItemPersistenceImpl { /** * Returns the commerce warehouse item where commerceWarehouseId = & # 63 ; and CPInstanceUuid = & # 63 ; or throws a { @ link NoSuchWarehouseItemException } if it could not be found .
* @ param commerceWarehouseId the commerce warehouse ID
* @ param CPInstanceUuid the cp instance uuid
* @ return the matching commerce warehouse item
* @ throws NoSuchWarehouseItemException if a matching commerce warehouse item could not be found */
@ Override public CommerceWarehouseItem findByCWI_CPIU ( long commerceWarehouseId , String CPInstanceUuid ) throws NoSuchWarehouseItemException { } } | CommerceWarehouseItem commerceWarehouseItem = fetchByCWI_CPIU ( commerceWarehouseId , CPInstanceUuid ) ; if ( commerceWarehouseItem == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "commerceWarehouseId=" ) ; msg . append ( commerceWarehouseId ) ; msg . append ( ", CPInstanceUuid=" ) ; msg . append ( CPInstanceUuid ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchWarehouseItemException ( msg . toString ( ) ) ; } return commerceWarehouseItem ; |
public class ArabicShaping { /** * Name : getLink
* Function : Resolves the link between the characters as
* Arabic characters have four forms :
* Isolated , Initial , Middle and Final Form */
private static int getLink ( char ch ) { } } | if ( ch >= '\u0622' && ch <= '\u06D3' ) { return araLink [ ch - '\u0622' ] ; } else if ( ch == '\u200D' ) { return 3 ; } else if ( ch >= '\u206D' && ch <= '\u206F' ) { return 4 ; } else if ( ch >= '\uFE70' && ch <= '\uFEFC' ) { return presLink [ ch - '\uFE70' ] ; } else { return 0 ; } |
public class DescribeSubscribedWorkteamRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DescribeSubscribedWorkteamRequest describeSubscribedWorkteamRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( describeSubscribedWorkteamRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( describeSubscribedWorkteamRequest . getWorkteamArn ( ) , WORKTEAMARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class ImgUtil { /** * 彩色转为黑白二值化图片 < br >
* 此方法并不关闭流 , 输出JPG格式
* @ param srcImage 源图像流
* @ param destImageStream 目标图像流
* @ param imageType 图片格式 ( 扩展名 )
* @ since 4.0.5
* @ throws IORuntimeException IO异常 */
public static void binary ( Image srcImage , ImageOutputStream destImageStream , String imageType ) throws IORuntimeException { } } | write ( binary ( srcImage ) , imageType , destImageStream ) ; |
public class TriplestoreResource { /** * Try to load a Trellis resource .
* @ implSpec This method will load a { @ link Resource } , initializing the object with all resource metadata
* used with { @ link # getModified } , { @ link # getInteractionModel } and other data fetched by the accessors .
* The resource content is fetched on demand via the { @ link # stream } method .
* @ param rdfConnection the triplestore connector
* @ param identifier the identifier
* @ return a new completion stage with a { @ link Resource } , if one exists */
public static CompletableFuture < Resource > findResource ( final RDFConnection rdfConnection , final IRI identifier ) { } } | return supplyAsync ( ( ) -> { final TriplestoreResource res = new TriplestoreResource ( rdfConnection , identifier ) ; res . fetchData ( ) ; if ( ! res . exists ( ) ) { return MISSING_RESOURCE ; } else if ( res . isDeleted ( ) ) { return DELETED_RESOURCE ; } return res ; } ) ; |
public class ObjectUpdater { /** * Add the given DBObject to the database as a new object . No check is made to see if
* an object with the same ID already exists . If the update is successful , updates are
* merged to the given parent SpiderTransaction .
* @ param parentTran Parent { @ link SpiderTransaction } to which updates are applied
* if the add is successful .
* @ param dbObj DBObject to be added to the database .
* @ return { @ link ObjectResult } representing the results of the update . */
public ObjectResult addNewObject ( SpiderTransaction parentTran , DBObject dbObj ) { } } | ObjectResult result = new ObjectResult ( ) ; try { addBrandNewObject ( dbObj ) ; result . setObjectID ( dbObj . getObjectID ( ) ) ; result . setUpdated ( true ) ; parentTran . mergeSubTransaction ( m_dbTran ) ; m_logger . trace ( "addNewObject(): Object added/updated for ID={}" , dbObj . getObjectID ( ) ) ; } catch ( Throwable ex ) { buildErrorStatus ( result , dbObj . getObjectID ( ) , ex ) ; } return result ; |
public class ClientSocketAdapter { /** * send the given object to the server using JSON serialization
* @ param o the object to send to the server */
public final void sendObjectToSocket ( Object o ) { } } | Session sess = this . getSession ( ) ; if ( sess != null ) { String json ; try { json = this . mapper . writeValueAsString ( o ) ; } catch ( JsonProcessingException e ) { ClientSocketAdapter . LOGGER . error ( "Failed to serialize object" , e ) ; return ; } sess . getRemote ( ) . sendString ( json , new WriteCallback ( ) { @ Override public void writeSuccess ( ) { ClientSocketAdapter . LOGGER . info ( "Send data to socket" ) ; } @ Override public void writeFailed ( Throwable x ) { ClientSocketAdapter . LOGGER . error ( "Error sending message to socket" , x ) ; } } ) ; } |
public class Programs { /** * Creates a { @ link Program } without expanding the location jar . The { @ link Program # getClassLoader ( ) }
* would not function from the program this method returns . */
public static Program create ( Location location , ClassLoader classLoader ) throws IOException { } } | return new DefaultProgram ( location , classLoader ) ; |
public class ZWaveController { /** * Notify our own event listeners of a Z - Wave event .
* @ param event the event to send . */
public void notifyEventListeners ( ZWaveEvent event ) { } } | logger . debug ( "Notifying event listeners" ) ; for ( ZWaveEventListener listener : this . zwaveEventListeners ) { logger . trace ( "Notifying {}" , listener . toString ( ) ) ; listener . ZWaveIncomingEvent ( event ) ; } |
public class ScalableGame { /** * Recalculate the scale of the game
* @ throws SlickException Indicates a failure to reinit the game */
public void recalculateScale ( ) throws SlickException { } } | targetWidth = container . getWidth ( ) ; targetHeight = container . getHeight ( ) ; if ( maintainAspect ) { boolean normalIsWide = ( normalWidth / normalHeight > 1.6 ? true : false ) ; boolean containerIsWide = ( ( float ) targetWidth / ( float ) targetHeight > 1.6 ? true : false ) ; float wScale = targetWidth / normalWidth ; float hScale = targetHeight / normalHeight ; if ( normalIsWide & containerIsWide ) { float scale = ( wScale < hScale ? wScale : hScale ) ; targetWidth = ( int ) ( normalWidth * scale ) ; targetHeight = ( int ) ( normalHeight * scale ) ; } else if ( normalIsWide & ! containerIsWide ) { targetWidth = ( int ) ( normalWidth * wScale ) ; targetHeight = ( int ) ( normalHeight * wScale ) ; } else if ( ! normalIsWide & containerIsWide ) { targetWidth = ( int ) ( normalWidth * hScale ) ; targetHeight = ( int ) ( normalHeight * hScale ) ; } else { float scale = ( wScale < hScale ? wScale : hScale ) ; targetWidth = ( int ) ( normalWidth * scale ) ; targetHeight = ( int ) ( normalHeight * scale ) ; } } if ( held instanceof InputListener ) { container . getInput ( ) . addListener ( ( InputListener ) held ) ; } container . getInput ( ) . setScale ( normalWidth / targetWidth , normalHeight / targetHeight ) ; int yoffset = 0 ; int xoffset = 0 ; if ( targetHeight < container . getHeight ( ) ) { yoffset = ( container . getHeight ( ) - targetHeight ) / 2 ; } if ( targetWidth < container . getWidth ( ) ) { xoffset = ( container . getWidth ( ) - targetWidth ) / 2 ; } container . getInput ( ) . setOffset ( - xoffset / ( targetWidth / normalWidth ) , - yoffset / ( targetHeight / normalHeight ) ) ; |
public class DefaultImportsConfiguration { /** * We cannot just use importDeclaration . getImportedTypeName since that would return the name from
* the resolved type rather than the concrete syntax . */
@ Override public String getLegacyImportSyntax ( XImportDeclaration importDeclaration ) { } } | List < INode > list = NodeModelUtils . findNodesForFeature ( importDeclaration , XtypePackage . Literals . XIMPORT_DECLARATION__IMPORTED_TYPE ) ; if ( list . isEmpty ( ) ) { return null ; } INode singleNode = list . get ( 0 ) ; if ( singleNode . getText ( ) . indexOf ( '$' ) < 0 ) { return null ; } StringBuilder sb = new StringBuilder ( ) ; for ( ILeafNode node : singleNode . getLeafNodes ( ) ) { if ( ! node . isHidden ( ) ) { sb . append ( node . getText ( ) . replace ( "^" , "" ) ) ; } } return sb . toString ( ) ; |
public class DriverTag { /** * Setter method for the scope of the variable to hold the
* result . */
public void setScope ( String scopeName ) { } } | if ( "page" . equals ( scopeName ) ) { scope = PageContext . PAGE_SCOPE ; } else if ( "request" . equals ( scopeName ) ) { scope = PageContext . REQUEST_SCOPE ; } else if ( "session" . equals ( scopeName ) ) { scope = PageContext . SESSION_SCOPE ; } else if ( "application" . equals ( scopeName ) ) { scope = PageContext . APPLICATION_SCOPE ; } |
public class LongTupleDistanceFunctions { /** * Returns a new comparator that compares { @ link LongTuple } instances
* by their distance to the given reference , according to the given
* distance function .
* A copy of the given reference point will be stored , so that changes
* in the given point will not affect the returned comparator .
* @ param reference The reference point
* @ param distanceFunction The distance function
* @ return The comparator */
public static Comparator < LongTuple > byDistanceComparator ( LongTuple reference , final ToDoubleBiFunction < ? super LongTuple , ? super LongTuple > distanceFunction ) { } } | final LongTuple fReference = LongTuples . copy ( reference ) ; return new Comparator < LongTuple > ( ) { @ Override public int compare ( LongTuple t0 , LongTuple t1 ) { double d0 = distanceFunction . applyAsDouble ( fReference , t0 ) ; double d1 = distanceFunction . applyAsDouble ( fReference , t1 ) ; return Double . compare ( d0 , d1 ) ; } } ; |
public class ToPojo { /** * Returns the string representation of the code that reads a primitive array property .
* @ param ref The reference .
* @ param source The type of the reference .
* @ param property The property to read .
* @ return The code . */
private static String readPrimitiveArrayValue ( String ref , TypeDef source , Property property ) { } } | StringBuilder sb = new StringBuilder ( ) ; Method getter = getterOf ( source , property ) ; sb . append ( indent ( ref ) ) . append ( "Arrays.stream(" ) . append ( "(" + property . getTypeRef ( ) . toString ( ) + ")(" + ref + " instanceof Map ? ((Map)" + ref + ").getOrDefault(\"" + getterOf ( source , property ) . getName ( ) + "\" , " + getDefaultValue ( property ) + ") : " + getDefaultValue ( property ) + ")" ) . append ( ".toArray(size -> new " + getter . getReturnType ( ) . toString ( ) + "[size])" ) ; return sb . toString ( ) ; |
public class Optimizer { /** * Nested OR clauses can be simplified to a just a simple set of options . For example ,
* { @ code " ( a | b | ( c | d | ( e | f ) ) | g ) " = > " a | b | c | d | e | f | g " } . */
static Matcher flattenNestedOr ( Matcher matcher ) { } } | if ( matcher instanceof OrMatcher ) { List < Matcher > matchers = matcher . < OrMatcher > as ( ) . matchers ( ) ; List < Matcher > ms = new ArrayList < > ( ) ; for ( Matcher m : matchers ) { if ( m instanceof OrMatcher ) { ms . addAll ( m . < OrMatcher > as ( ) . matchers ( ) ) ; } else { ms . add ( m ) ; } } return OrMatcher . create ( ms ) ; } return matcher ; |
public class UnmodifiableSortedBag { /** * This method will take a MutableSortedBag and wrap it directly in a UnmodifiableSortedBag . */
public static < E , S extends MutableSortedBag < E > > UnmodifiableSortedBag < E > of ( S bag ) { } } | if ( bag == null ) { throw new IllegalArgumentException ( "cannot create an UnmodifiableSortedBag for null" ) ; } return new UnmodifiableSortedBag < E > ( bag ) ; |
public class DataFrameWriter { /** * legacy methods left for backwards compatibility */
public void csv ( String file ) throws IOException { } } | CsvWriteOptions options = CsvWriteOptions . builder ( file ) . build ( ) ; new CsvWriter ( ) . write ( table , options ) ; |
public class JsJmsMapMessageImpl { /** * Set a byte value with the given name , into the Map .
* Javadoc description supplied by JsJmsMessage interface . */
public void setByte ( String name , byte value ) throws UnsupportedEncodingException { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setByte" , Byte . valueOf ( value ) ) ; getBodyMap ( ) . put ( name , Byte . valueOf ( value ) ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setByte" ) ; |
public class CassandraDataHandlerBase { /** * Sets the element collection set .
* @ param setType
* the cql column metadata
* @ param thriftColumnValue
* the thrift column value
* @ param entity
* the entity
* @ param field
* the field
* @ param metaModel
* the meta model
* @ param embeddedClass
* the embedded class
* @ param useNativeProtocol2
* the use native protocol2
* @ return the object */
private Object setElementCollectionSet ( SetType setType , ByteBuffer thriftColumnValue , Object entity , Field field , MetamodelImpl metaModel , Class embeddedClass , boolean useNativeProtocol2 ) { } } | SetSerializer setSerializer = setType . getSerializer ( ) ; Collection outputCollection = new ArrayList ( ) ; if ( useNativeProtocol2 ) { outputCollection . addAll ( ( Collection ) setSerializer . deserializeForNativeProtocol ( thriftColumnValue , ProtocolVersion . V2 ) ) ; } else { outputCollection . addAll ( ( Collection ) setSerializer . deserialize ( thriftColumnValue ) ) ; } UserType usertype = ( UserType ) setType . getElementsType ( ) ; Collection result = new HashSet ( ) ; Iterator collectionItems = outputCollection . iterator ( ) ; while ( collectionItems . hasNext ( ) ) { Object embeddedObject = KunderaCoreUtils . createNewInstance ( embeddedClass ) ; Object value = populateEmbeddedRecursive ( ( ByteBuffer ) collectionItems . next ( ) , usertype . allTypes ( ) , usertype . fieldNames ( ) , embeddedObject , metaModel ) ; result . add ( value ) ; } PropertyAccessorHelper . set ( entity , field , result ) ; return entity ; |
public class WindowsZone { /** * / * [ deutsch ]
* < p > L & ouml ; st diese Namensreferenz zu einem Satz von Zonen - IDs zum
* angegebenen Land auf . < / p >
* @ param country country reference
* @ return set of ids belonging to this windows zone
* @ since 2.2 */
public Set < TZID > resolve ( Locale country ) { } } | Set < TZID > ids = WinZoneProviderSPI . NAME_BASED_MAP . get ( this . name ) . get ( FormatUtils . getRegion ( country ) ) ; if ( ids == null ) { return Collections . emptySet ( ) ; } else { return Collections . unmodifiableSet ( ids ) ; } |
public class Demo { /** * * * * * * Misc * * * * * */
private void calcNoOfNodes ( Node node ) { } } | if ( node instanceof Parent ) { if ( ( ( Parent ) node ) . getChildrenUnmodifiable ( ) . size ( ) != 0 ) { ObservableList < Node > tempChildren = ( ( Parent ) node ) . getChildrenUnmodifiable ( ) ; noOfNodes += tempChildren . size ( ) ; for ( Node n : tempChildren ) { calcNoOfNodes ( n ) ; } } } |
public class ProbeManagerImpl { /** * Deactivation callback from the Declarative Services runtime where the
* component is deactivated .
* @ param bundleContext the bundleContext */
synchronized void deactivate ( ) throws Exception { } } | this . proxyActivator . deactivate ( ) ; this . instrumentation . removeTransformer ( this . classAvailableTransformer ) ; this . instrumentation . removeTransformer ( this . transformer ) ; this . shuttingDown = true ; // Save the classes that have listeners so we can retransform
Collection < Class < ? > > probedClasses = processRemovedListeners ( allRegisteredListeners ) ; // Clear all data structures
listenersLock . writeLock ( ) . lock ( ) ; try { listenersForMonitor . clear ( ) ; allRegisteredListeners . clear ( ) ; } finally { listenersLock . writeLock ( ) . unlock ( ) ; } activeProbesById . clear ( ) ; listenersByProbe . clear ( ) ; listenersByClass . clear ( ) ; probesByKey . clear ( ) ; probesByListener . clear ( ) ; this . proxyActivator = null ; this . transformer = null ; this . componentContext = null ; // Retransform the probed classes without our transformers in play
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( this , tc , "deactivate: probedClasses.size() = " + probedClasses . size ( ) ) ; } for ( Class < ? > clazz : probedClasses ) { instrumentation . retransformClasses ( clazz ) ; } this . instrumentation = null ; |
public class ApplicationDialog { /** * Show the dialog . The dialog will be created if it doesn ' t exist yet .
* Before setting the dialog visible , a hook method onAboutToShow is called
* and the location will be set .
* When showing the dialog several times , it will always be opened on the
* location that has been set , or relative to the parent . ( former location
* will not persist ) */
public void showDialog ( ) { } } | if ( ! isControlCreated ( ) ) { createDialog ( ) ; } if ( ! isShowing ( ) ) { onAboutToShow ( ) ; if ( getLocation ( ) != null ) { dialog . setLocation ( getLocation ( ) ) ; dialog . setPreferredSize ( getPreferredSize ( ) ) ; } else { WindowUtils . centerOnParent ( dialog , getLocationRelativeTo ( ) ) ; } dialog . setVisible ( true ) ; } |
public class SuperActivityToast { /** * Sets the
* { @ link com . github . johnpersano . supertoasts . library . SuperActivityToast . OnButtonClickListener }
* in a TYPE _ BUTTON SuperActivityToast . The listener will be triggered
* when the SuperActivityToast Button is pressed .
* @ param tag A unique tag for this listener
* @ param token A Parcelable token to hold data across orientation changes
* @ param onButtonClickListener The desired OnButtonClickListener
* @ return The current SuperActivityToast instance */
public SuperActivityToast setOnButtonClickListener ( @ NonNull String tag , Parcelable token , @ NonNull OnButtonClickListener onButtonClickListener ) { } } | this . mOnButtonClickListener = onButtonClickListener ; this . mStyle . buttonTag = tag ; this . mStyle . buttonToken = token ; return this ; |
public class EndpointExpander { /** * Given a format string that may contain any of the following conversions
* < ul >
* < li > % t which is replaced with the provided table name < / li >
* < li > % p which is replaced with the provided partition id < / li >
* < li > % g which is replaced with the provided generation < / li >
* < li > % d which is replaced with the provided date < / li >
* < / ul >
* A percent sign may be used to escape another
* Requires :
* - template
* - date ( if % d is in the template )
* - table name
* - template contains % t and % p
* Use the given parameters and place them where their respective
* conversions are .
* @ param tmpl format string
* @ param tn table name
* @ param p partition id
* @ param gn generation
* @ param dt date
* @ return expanded string with the applied parameter substitution conversions */
public static String expand ( String tmpl , String tn , int p , long gn , Date dt ) { } } | return expand ( tmpl , tn , p , gn , dt , null ) ; |
public class DescribeLifecycleHooksRequest { /** * The names of one or more lifecycle hooks . If you omit this parameter , all lifecycle hooks are described .
* < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use
* { @ link # setLifecycleHookNames ( java . util . Collection ) } or { @ link # withLifecycleHookNames ( java . util . Collection ) } if
* you want to override the existing values .
* @ param lifecycleHookNames
* The names of one or more lifecycle hooks . If you omit this parameter , all lifecycle hooks are described .
* @ return Returns a reference to this object so that method calls can be chained together . */
public DescribeLifecycleHooksRequest withLifecycleHookNames ( String ... lifecycleHookNames ) { } } | if ( this . lifecycleHookNames == null ) { setLifecycleHookNames ( new com . amazonaws . internal . SdkInternalList < String > ( lifecycleHookNames . length ) ) ; } for ( String ele : lifecycleHookNames ) { this . lifecycleHookNames . add ( ele ) ; } return this ; |
public class RandomVariableLowMemory { /** * / * ( non - Javadoc )
* @ see net . finmath . stochastic . RandomVariableInterface # getHistogram ( ) */
@ Override public double [ ] getHistogram ( double [ ] intervalPoints ) { } } | double [ ] histogramValues = new double [ intervalPoints . length + 1 ] ; if ( isDeterministic ( ) ) { /* * If the random variable is deterministic we will return an array
* consisting of 0 ' s and one and only one 1. */
java . util . Arrays . fill ( histogramValues , 0.0 ) ; for ( int intervalIndex = 0 ; intervalIndex < intervalPoints . length ; intervalIndex ++ ) { if ( valueIfNonStochastic > intervalPoints [ intervalIndex ] ) { histogramValues [ intervalIndex ] = 1.0 ; break ; } } histogramValues [ intervalPoints . length ] = 1.0 ; } else { /* * If the random variable is deterministic we will return an array
* representing a density , where the sum of the entries is one .
* There is one exception :
* If the size of the random variable is 0 , all entries will be zero . */
float [ ] realizationsSorted = realizations . clone ( ) ; java . util . Arrays . sort ( realizationsSorted ) ; int sampleIndex = 0 ; for ( int intervalIndex = 0 ; intervalIndex < intervalPoints . length ; intervalIndex ++ ) { int sampleCount = 0 ; while ( sampleIndex < realizationsSorted . length && realizationsSorted [ sampleIndex ] <= intervalPoints [ intervalIndex ] ) { sampleIndex ++ ; sampleCount ++ ; } histogramValues [ intervalIndex ] = sampleCount ; } histogramValues [ intervalPoints . length ] = realizationsSorted . length - sampleIndex ; // Normalize histogramValues
if ( realizationsSorted . length > 0 ) { for ( int i = 0 ; i < histogramValues . length ; i ++ ) { histogramValues [ i ] /= realizationsSorted . length ; } } } return histogramValues ; |
public class MDBRuntimeImpl { /** * Declarative service method for removing an EndpointActivationService . */
protected synchronized void removeEndPointActivationService ( ServiceReference < EndpointActivationService > reference ) { } } | String activationSvcId = ( String ) reference . getProperty ( ACT_SPEC_CFG_ID ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "activationSvcId : " + activationSvcId ) ; } EndpointActivationServiceInfo easInfo = endpointActivationServices . get ( activationSvcId ) ; if ( easInfo != null ) { // If the service was being replaced , then the add method would
// have been called first , and this reference would no longer be
// set . If it ' s still set , then the service is being removed and
// there is no replacement , so just deactivate all endpoints .
if ( easInfo . serviceRef . equals ( reference ) ) { deactivateEndpoints ( easInfo . endpointFactories ) ; easInfo . setReference ( null ) ; cleanupEndpointActivationServiceInfo ( easInfo ) ; } else { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "unset reference already removed" ) ; } } } |
public class FrameworkSerializer { /** * Can be overridden to avoid boxing a long where appropriate */
public void serializePrimitive ( S rec , String fieldName , long value ) { } } | serializePrimitive ( rec , fieldName , Long . valueOf ( value ) ) ; |
public class MongoDBClient { /** * ( non - Javadoc )
* @ see com . impetus . kundera . client . Client # findAll ( java . lang . Class ,
* java . lang . Object [ ] ) */
@ Override public < E > List < E > findAll ( Class < E > entityClass , String [ ] columnsToSelect , Object ... keys ) { } } | EntityMetadata entityMetadata = KunderaMetadataManager . getEntityMetadata ( kunderaMetadata , entityClass ) ; log . debug ( "Fetching data from " + entityMetadata . getTableName ( ) + " for Keys " + keys ) ; DBCollection dbCollection = mongoDb . getCollection ( entityMetadata . getTableName ( ) ) ; BasicDBObject query = new BasicDBObject ( ) ; query . put ( "_id" , new BasicDBObject ( "$in" , keys ) ) ; DBCursor cursor = dbCollection . find ( query ) ; KunderaCoreUtils . printQuery ( "Find collection:" + query , showQuery ) ; List entities = new ArrayList < E > ( ) ; while ( cursor . hasNext ( ) ) { DBObject fetchedDocument = cursor . next ( ) ; populateEntity ( entityMetadata , entities , fetchedDocument ) ; } return entities ; |
public class WebServiceRefInfoBuilder { /** * This method will build a ServiceRefPartialInfo object from a class with an
* @ WebServiceClient annotation . */
private static WebServiceRefPartialInfo buildPartialInfoFromWebServiceClient ( Class < ? > serviceInterfaceClass ) { } } | WebServiceClient webServiceClient = serviceInterfaceClass . getAnnotation ( WebServiceClient . class ) ; if ( webServiceClient == null ) { return null ; } String className = serviceInterfaceClass . getName ( ) ; String wsdlLocation = webServiceClient . wsdlLocation ( ) ; QName serviceQName = null ; String localPart = webServiceClient . name ( ) ; if ( localPart != null ) { serviceQName = new QName ( webServiceClient . targetNamespace ( ) , localPart ) ; } String handlerChainDeclaringClassName = null ; javax . jws . HandlerChain handlerChainAnnotation = serviceInterfaceClass . getAnnotation ( javax . jws . HandlerChain . class ) ; if ( handlerChainAnnotation != null ) handlerChainDeclaringClassName = serviceInterfaceClass . getName ( ) ; WebServiceRefPartialInfo partialInfo = new WebServiceRefPartialInfo ( className , wsdlLocation , serviceQName , null , handlerChainDeclaringClassName , handlerChainAnnotation ) ; return partialInfo ; |
public class EntityManagerFactory { /** * Creates and returns an { @ link EntityManager } that allows working with the local Datastore
* ( a . k . a Datastore Emulator ) . Specified project ID will be used .
* @ param serviceURL
* Service URL for the Datastore Emulator . ( e . g . http : / / localhost : 9999)
* @ param projectId
* the project ID . The specified project need not exist in Google Cloud .
* @ return an { @ link EntityManager } that allows working with the local Datastore ( a . k . a Datastore
* Emulator ) . */
public EntityManager createLocalEntityManager ( String serviceURL , String projectId ) { } } | return createLocalEntityManager ( serviceURL , projectId , null ) ; |
public class Metric { /** * Construct and return a message based on the timing and checkpoints . This
* will look like " 123.456ms ( checkpoint1 = 100.228ms , checkpoint2 = 23.228ms ) "
* without the quotes . There will be no spaces or tabs in the output .
* < p > This will automatically call the done ( ) method to stop the timer if
* you haven ' t already done so . < / p >
* @ return a string with timing information , or { @ code " metricsDisabled " }
* if { @ code false } was passed in the constructor .
* @ see # printMessage ( StringBuilder ) */
public String getMessage ( ) { } } | if ( enabled ) { StringBuilder buf = new StringBuilder ( ) ; printMessage ( buf ) ; return buf . toString ( ) ; } return "metricsDisabled" ; |
public class Statistics { /** * Refresh the values of the metrics ( min , max , total and times ) */
private void onEnd ( ) { } } | long result = System . currentTimeMillis ( ) - currentTime . get ( ) . poll ( ) ; times . incrementAndGet ( ) ; if ( result < min . get ( ) ) { min . set ( result ) ; } if ( max . get ( ) < result ) { max . set ( result ) ; } total . addAndGet ( result ) ; |
public class AccountController { /** * Gets an account by its ID */
@ RequestMapping ( value = "{accountId}" , method = RequestMethod . GET ) public Account getAccount ( @ PathVariable ID accountId ) { } } | return accountService . getAccount ( accountId ) ; |
public class WALListener { /** * This is the main method for registration of later writing .
* @ param aDAO
* The DAO to be written
* @ param sWALFilename
* The filename of the WAL file for later deletion ( in case the
* filename changes over time ) .
* @ param aWaitingWime
* The time to wait , until the file is physically written . May not be
* < code > null < / code > . */
public void registerForLaterWriting ( @ Nonnull final AbstractWALDAO < ? > aDAO , @ Nonnull final String sWALFilename , @ Nonnull final TimeValue aWaitingWime ) { } } | // In case many DAOs of the same class exist , the filename is also added
final String sKey = aDAO . getClass ( ) . getName ( ) + "::" + sWALFilename ; // Check if the passed DAO is already scheduled for writing
final boolean bDoScheduleForWriting = m_aRWLock . writeLocked ( ( ) -> m_aWaitingDAOs . add ( sKey ) ) ; if ( bDoScheduleForWriting ) { // We need to schedule it now
if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "Now scheduling writing for DAO " + sKey ) ; // What should be executed upon writing
final Runnable r = ( ) -> { // Use DAO lock !
aDAO . internalWriteLocked ( ( ) -> { // Main DAO writing
aDAO . _writeToFileAndResetPendingChanges ( "ScheduledWriter.run" ) ; // Delete the WAL file
aDAO . _deleteWALFileAfterProcessing ( sWALFilename ) ; if ( LOGGER . isDebugEnabled ( ) ) LOGGER . debug ( "Finished scheduled writing for DAO " + sKey ) ; } ) ; // Remove from the internal set so that another job will be
// scheduled for the same DAO
// Do this after the writing to the file
m_aRWLock . writeLocked ( ( ) -> { // Remove from the overall set as well as from the scheduled items
m_aWaitingDAOs . remove ( sKey ) ; m_aScheduledItems . remove ( sKey ) ; } ) ; } ; // Schedule exactly once in the specified waiting time
final ScheduledFuture < ? > aFuture = m_aES . schedule ( r , aWaitingWime . getDuration ( ) , aWaitingWime . getTimeUnit ( ) ) ; // Remember the scheduled item and the runnable so that the task can
// be rescheduled upon shutdown .
m_aRWLock . writeLocked ( ( ) -> m_aScheduledItems . put ( sKey , new WALItem ( aFuture , r ) ) ) ; } // else the writing of the passed DAO is already scheduled and no further
// action is necessary |
public class FilterContainer { /** * Adds a filter definition to this project file .
* @ param filter filter definition */
public void addFilter ( Filter filter ) { } } | if ( filter . isTaskFilter ( ) ) { m_taskFilters . add ( filter ) ; } if ( filter . isResourceFilter ( ) ) { m_resourceFilters . add ( filter ) ; } m_filtersByName . put ( filter . getName ( ) , filter ) ; m_filtersByID . put ( filter . getID ( ) , filter ) ; |
public class DescriptionStrategy { /** * Given an int , will return a nominal value . Example : 1 in weeks context , may mean " Monday " , so nominal value for 1 would be " Monday "
* Default will return int as String
* @ param fieldValue - some FieldValue
* @ return String */
protected String nominalValue ( final FieldValue < ? > fieldValue ) { } } | Preconditions . checkNotNull ( fieldValue , "FieldValue must not be null" ) ; if ( fieldValue instanceof IntegerFieldValue ) { return nominalValueFunction . apply ( ( ( IntegerFieldValue ) fieldValue ) . getValue ( ) ) ; } return fieldValue . toString ( ) ; |
public class BondRefinable { /** * Get the bond partition , based on the element types of the atoms at either end
* of the bond , and the bond order .
* @ return a partition of the bonds based on the element types and bond order */
public Partition getInitialPartition ( ) { } } | int bondCount = atomContainer . getBondCount ( ) ; Map < String , SortedSet < Integer > > cellMap = new HashMap < String , SortedSet < Integer > > ( ) ; // make mini - ' descriptors ' for bonds like " C = O " or " C # N " etc
for ( int bondIndex = 0 ; bondIndex < bondCount ; bondIndex ++ ) { IBond bond = atomContainer . getBond ( bondIndex ) ; String el0 = bond . getAtom ( 0 ) . getSymbol ( ) ; String el1 = bond . getAtom ( 1 ) . getSymbol ( ) ; String boS ; if ( ignoreBondOrders ) { // doesn ' t matter what it is , so long as it ' s constant
boS = "1" ; } else { boolean isArom = bond . getFlag ( CDKConstants . ISAROMATIC ) ; int orderNumber = ( isArom ) ? 5 : bond . getOrder ( ) . numeric ( ) ; boS = String . valueOf ( orderNumber ) ; } String bondString ; if ( el0 . compareTo ( el1 ) < 0 ) { bondString = el0 + boS + el1 ; } else { bondString = el1 + boS + el0 ; } SortedSet < Integer > cell ; if ( cellMap . containsKey ( bondString ) ) { cell = cellMap . get ( bondString ) ; } else { cell = new TreeSet < Integer > ( ) ; cellMap . put ( bondString , cell ) ; } cell . add ( bondIndex ) ; } // sorting is necessary to get cells in order
List < String > bondStrings = new ArrayList < String > ( cellMap . keySet ( ) ) ; Collections . sort ( bondStrings ) ; // the partition of the bonds by these ' descriptors '
Partition bondPartition = new Partition ( ) ; for ( String key : bondStrings ) { SortedSet < Integer > cell = cellMap . get ( key ) ; bondPartition . addCell ( cell ) ; } bondPartition . order ( ) ; return bondPartition ; |
public class DataCleanerHomeUpgrader { /** * Finds a folder to upgrade from based on the " newFolder " parameter -
* upgrades are performed only within the same major version .
* @ param newFolder
* The folder we want to upgrade to ( the new version )
* @ return true if upgrade was successful , false otherwise */
public boolean upgrade ( final FileObject newFolder ) { } } | try { if ( newFolder . getChildren ( ) . length != 0 ) { // if the folder is not new then we don ' t want to touch it
return false ; } final FileObject upgradeFromFolderCandidate = findUpgradeCandidate ( newFolder ) ; if ( upgradeFromFolderCandidate == null ) { logger . info ( "Did not find a suitable upgrade candidate" ) ; return false ; } logger . info ( "Upgrading DATACLEANER_HOME from : {}" , upgradeFromFolderCandidate ) ; newFolder . copyFrom ( upgradeFromFolderCandidate , new AllFileSelector ( ) ) ; // special handling of userpreferences . dat - we only want to keep
// the good parts ; - )
final UserPreferencesUpgrader userPreferencesUpgrader = new UserPreferencesUpgrader ( newFolder ) ; userPreferencesUpgrader . upgrade ( ) ; // Overwrite example jobs
final List < String > allFilePaths = DataCleanerHome . getAllInitialFiles ( ) ; for ( final String filePath : allFilePaths ) { overwriteFileWithDefaults ( newFolder , filePath ) ; } return true ; } catch ( final FileSystemException e ) { logger . warn ( "Exception occured during upgrading: {}" , e ) ; return false ; } |
public class InfixParser { /** * Parse infix string expression
* @ param infixExpression
* @ param values
* @ return
* @ throws ParseException */
public CList parse ( String infixExpression , Object ... values ) throws ParseException { } } | // get variable names
LinkedHashMap < String , Num > vNames = mapValues ( infixExpression , values ) ; return parse ( infixExpression , vNames ) ; |
public class NetworkServiceRecordAgent { /** * Upgrades a VNFR of a defined VNFD in a running NSR .
* @ param idNsr the ID of the NetworkServiceRecord
* @ param idVnfr the ID of the VNFR to be upgraded
* @ param idVnfd the VNFD ID to which the VNFR shall be upgraded
* @ throws SDKException if the request fails */
@ Help ( help = "Upgrades a VNFR to a defined VNFD in a running NSR with specific id" ) public void upgradeVnfr ( final String idNsr , final String idVnfr , final String idVnfd ) throws SDKException { } } | HashMap < String , Serializable > jsonBody = new HashMap < > ( ) ; jsonBody . put ( "vnfdId" , idVnfd ) ; String url = idNsr + "/vnfrecords" + "/" + idVnfr + "/upgrade" ; requestPost ( url , jsonBody ) ; |
public class XORSwap { /** * Swap two elements of a short array at the specified positions
* @ param shortArray array that will have two of its values swapped .
* @ param index1 one of the indexes of the array .
* @ param index2 other index of the array . */
public static void swap ( short [ ] shortArray , int index1 , int index2 ) { } } | XORSwap . swap ( shortArray , index1 , shortArray , index2 ) ; |
public class SimpleDBUtils { /** * Decodes a long value from the string representation that was created by using
* encodeRealNumberRange ( . . ) function .
* @ param value
* string representation of the long value
* @ param offsetValue
* offset value that was used in the original encoding
* @ return original long value */
public static long decodeRealNumberRangeLong ( String value , long offsetValue ) { } } | long offsetNumber = Long . parseLong ( value , 10 ) ; return ( long ) ( offsetNumber - offsetValue ) ; |
public class CmsEditProjectForm { /** * Submits the form . < p > */
void submit ( ) { } } | if ( isValid ( ) ) { if ( m_project == null ) { createProject ( ) ; } else { saveProject ( ) ; } m_table . loadProjects ( ) ; m_window . close ( ) ; } |
public class FastMath { /** * Convert radians to degrees , with error of less than 0.5 ULP
* @ param x angle in radians
* @ return x converted into degrees */
public static double toDegrees ( double x ) { } } | if ( Double . isInfinite ( x ) || x == 0.0 ) { // Matches + / - 0.0 ; return correct sign
return x ; } // These are 180 / PI split into high and low order bits
final double facta = 57.2957763671875 ; final double factb = 3.145894820876798E-6 ; double xa = doubleHighPart ( x ) ; double xb = x - xa ; return xb * factb + xb * facta + xa * factb + xa * facta ; |
public class InitialContextFinder { /** * 找到现成的的或创建一个 。
* Finds the existing one or create a new one .
* @ param environment
* @ return the context found or created
* @ throws NamingException */
public static InitialContext findOrCreate ( Hashtable < ? , ? > environment ) throws NamingException { } } | if ( found ) { return findDefault ( environment ) ; } InitialContext ctx = null ; NamingException lastE = null ; synchronized ( InitialContextFinder . class ) { // get and test if the initial context is usable
try { ctx = findDefault ( environment ) ; } catch ( NamingException e ) { lastE = e ; } if ( ctx != null ) { try { @ SuppressWarnings ( "unused" ) Object o = ctx . lookup ( InitialContextFinder . class . getName ( ) ) ; } catch ( NoInitialContextException e ) { ctx = null ; } catch ( NamingException e ) { lastE = e ; } } // create one if needed
if ( ctx == null ) { try { ctx = createFSContext ( ) ; } catch ( NamingException e ) { lastE = e ; } } if ( ctx != null ) { found = true ; return ctx ; } else { if ( lastE != null ) { throw lastE ; } else { throw new NamingException ( "Can't find InitialContext." ) ; } } } |
public class LevelRipConverter { /** * Run the converter .
* @ param levelrip The file containing the levelrip as an image .
* @ param map The destination map reference .
* @ param listener The progress listener .
* @ param canceler The canceler reference .
* @ return The total number of not found tiles .
* @ throws LionEngineException If media is < code > null < / code > or image cannot be read . */
public static int start ( Media levelrip , MapTile map , ProgressListener listener , Canceler canceler ) { } } | final Sprite imageMap = Drawable . loadSprite ( levelrip ) ; imageMap . load ( ) ; imageMap . prepare ( ) ; final int imageTilesInX = imageMap . getWidth ( ) / map . getTileWidth ( ) ; final int imageTilesInY = imageMap . getHeight ( ) / map . getTileHeight ( ) ; map . create ( map . getTileWidth ( ) , map . getTileHeight ( ) , imageTilesInX , imageTilesInY ) ; final double progressMax = imageTilesInX * ( double ) imageTilesInY ; long progress = 0L ; int lastPercent = 0 ; int errors = 0 ; final ImageBuffer tileRef = imageMap . getSurface ( ) ; for ( int progressTileY = 0 ; progressTileY < imageTilesInY ; progressTileY ++ ) { for ( int progressTileX = 0 ; progressTileX < imageTilesInX ; progressTileX ++ ) { if ( ! checkPixel ( map , tileRef , progressTileX , progressTileY ) ) { errors ++ ; } final int percent = ( int ) Math . round ( progress / progressMax * 100 ) ; if ( listener != null && percent != lastPercent ) { listener . notifyProgress ( percent , progressTileX , progressTileY ) ; } lastPercent = percent ; progress ++ ; if ( canceler != null && canceler . isCanceled ( ) ) { tileRef . dispose ( ) ; return errors ; } } } tileRef . dispose ( ) ; return errors ; |
public class MethodBindingAdapterBase { /** * < p > Recursively interrogate the < code > cause < / code > property of the
* argument < code > exception < / code > and stop recursing either when it
* is an instance of < code > expectedExceptionClass < / code > or
* < code > null < / code > . Return the result . < / p > */
Throwable getExpectedCause ( Class expectedExceptionClass , Throwable exception ) { } } | Throwable result = exception . getCause ( ) ; if ( null != result ) { if ( ! expectedExceptionClass . isAssignableFrom ( result . getClass ( ) ) ) { result = getExpectedCause ( expectedExceptionClass , result ) ; } } return result ; |
public class ExecutionLock { /** * Causes the current thread to wait indefinitely . This method does not return . */
public void lock ( ) { } } | log . info ( "Elasticsearch has started and the maven process has been blocked. Press CTRL+C to stop the process." ) ; synchronized ( lock ) { try { lock . wait ( ) ; } catch ( InterruptedException exception ) { log . warn ( "RunElasticsearchNodeMojo interrupted" ) ; } } |
public class ClientDObjectMgr { /** * inherit documentation from the interface */
public < T extends DObject > void unsubscribeFromObject ( int oid , Subscriber < T > target ) { } } | queueAction ( oid , target , false ) ; |
public class CmsSitemapHoverbar { /** * Installs a hover bar for the given item widget . < p >
* @ param controller the controller
* @ param treeItem the item to hover
* @ param entryId the entry id */
public static void installOn ( CmsSitemapController controller , CmsTreeItem treeItem , CmsUUID entryId ) { } } | CmsSitemapHoverbar hoverbar = new CmsSitemapHoverbar ( controller , entryId , true , true , null ) ; installHoverbar ( hoverbar , treeItem . getListItemWidget ( ) ) ; |
public class Graph { /** * Creates a graph from a DataSet of Tuple3 objects for edges .
* < p > The first field of the Tuple3 object will become the source ID ,
* the second field will become the target ID , and the third field will become
* the edge value .
* < p > Vertices are created automatically and their values are set to NullValue .
* @ param edges a DataSet of Tuple3 representing the edges .
* @ param context the flink execution environment .
* @ return the newly created graph . */
public static < K , EV > Graph < K , NullValue , EV > fromTupleDataSet ( DataSet < Tuple3 < K , K , EV > > edges , ExecutionEnvironment context ) { } } | DataSet < Edge < K , EV > > edgeDataSet = edges . map ( new Tuple3ToEdgeMap < > ( ) ) . name ( "Type conversion" ) ; return fromDataSet ( edgeDataSet , context ) ; |
public class CmsSessionManager { /** * Returns the complete user session info of a user from the session storage ,
* or < code > null < / code > if this session id has no session info attached . < p >
* @ param sessionId the OpenCms session id to return the session info for
* @ return the complete user session info of a user from the session storage */
public CmsSessionInfo getSessionInfo ( CmsUUID sessionId ) { } } | // since this method could be called from another thread
// we have to prevent access before initialization
if ( m_sessionStorageProvider == null ) { return null ; } return m_sessionStorageProvider . get ( sessionId ) ; |
public class OutputConsoleDataProvider { /** * { @ inheritDoc } */
@ Override public void writeFailedResult ( int line , String value ) { } } | logger . error ( Messages . getMessage ( OUTPUT_CONSOLE_DATA_PROVIDER_FAILED_AT_LINE ) , line , value ) ; |
public class HtmlGroupBaseTag { /** * Return the real value of the < code > optionDataSource < / code > attribute . The value returned will
* always be an instance of < code > Iterator < / code > This value reflects the
* result of expression evaluation on the options data source .
* @ return the object that represents the options data source .
* @ throws JspException when something bad happens */
protected Object evaluateOptionsDataSource ( ) throws JspException { } } | if ( _optionsDataSource == null ) { // optionsDataSource is null , so provide an informational message . This isn ' t an error because it ' s
// possible for tags to list their options inside of their bodies rather than via an optionsDataSource
logger . info ( Bundle . getString ( "Tags_IteratorError" , new Object [ ] { getTagName ( ) , "optionsDataSource" , _optionsDataSource } ) ) ; return IteratorFactory . EMPTY_ITERATOR ; } if ( _optionsDataSource instanceof Map ) return _optionsDataSource ; Iterator it ; it = IteratorFactory . createIterator ( _optionsDataSource ) ; if ( it == null ) it = IteratorFactory . EMPTY_ITERATOR ; assert ( it != null && it instanceof Iterator ) ; return it ; |
public class ScopInstallation { /** * / * ( non - Javadoc )
* @ see org . biojava . nbio . structure . scop . ScopDatabase # getScopDomainsBySunid ( java . lang . Integer ) */
@ Override public List < ScopDomain > getScopDomainsBySunid ( Integer sunid ) { } } | try { ensureClaInstalled ( ) ; } catch ( IOException e ) { throw new ScopIOException ( e ) ; } List < ScopDomain > domains = new ArrayList < ScopDomain > ( ) ; for ( String pdbId : domainMap . keySet ( ) ) { for ( ScopDomain d : domainMap . get ( pdbId ) ) { try { if ( d . getPx ( ) == sunid ) { domains . add ( ( ScopDomain ) d . clone ( ) ) ; } else if ( d . getSpeciesId ( ) == sunid ) { domains . add ( ( ScopDomain ) d . clone ( ) ) ; } else if ( d . getDomainId ( ) == sunid ) { domains . add ( ( ScopDomain ) d . clone ( ) ) ; } else if ( d . getFamilyId ( ) == sunid ) { domains . add ( ( ScopDomain ) d . clone ( ) ) ; } else if ( d . getSuperfamilyId ( ) == sunid ) { domains . add ( ( ScopDomain ) d . clone ( ) ) ; } else if ( d . getFoldId ( ) == sunid ) { domains . add ( ( ScopDomain ) d . clone ( ) ) ; } else if ( d . getClassId ( ) == sunid ) { domains . add ( ( ScopDomain ) d . clone ( ) ) ; } else { throw new RuntimeException ( "Type " + d + " not recognized" ) ; // only possible if SCOP changes
} } catch ( CloneNotSupportedException e ) { throw new RuntimeException ( ScopDomain . class + " subclass does not support clone()" , e ) ; } } } return domains ; |
public class MessageUnpacker { /** * Reads a float .
* This method rounds value to the range of float when precision of the read value is larger than the range of float . This may happen when { @ link # getNextFormat ( ) } returns FLOAT64.
* @ return the read value
* @ throws MessageTypeException when value is not MessagePack Float type
* @ throws IOException when underlying input throws IOException */
public float unpackFloat ( ) throws IOException { } } | byte b = readByte ( ) ; switch ( b ) { case Code . FLOAT32 : // float
float fv = readFloat ( ) ; return fv ; case Code . FLOAT64 : // double
double dv = readDouble ( ) ; return ( float ) dv ; } throw unexpected ( "Float" , b ) ; |
public class UserImportJobTypeMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UserImportJobType userImportJobType , ProtocolMarshaller protocolMarshaller ) { } } | if ( userImportJobType == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( userImportJobType . getJobName ( ) , JOBNAME_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getJobId ( ) , JOBID_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getUserPoolId ( ) , USERPOOLID_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getPreSignedUrl ( ) , PRESIGNEDURL_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getCreationDate ( ) , CREATIONDATE_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getStartDate ( ) , STARTDATE_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getCompletionDate ( ) , COMPLETIONDATE_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getStatus ( ) , STATUS_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getCloudWatchLogsRoleArn ( ) , CLOUDWATCHLOGSROLEARN_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getImportedUsers ( ) , IMPORTEDUSERS_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getSkippedUsers ( ) , SKIPPEDUSERS_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getFailedUsers ( ) , FAILEDUSERS_BINDING ) ; protocolMarshaller . marshall ( userImportJobType . getCompletionMessage ( ) , COMPLETIONMESSAGE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class BookKeeperLog { /** * region AutoCloseable Implementation */
@ Override public void close ( ) { } } | if ( ! this . closed . getAndSet ( true ) ) { this . metricReporter . cancel ( true ) ; this . metrics . close ( ) ; this . rolloverProcessor . close ( ) ; this . writeProcessor . close ( ) ; // Close active ledger .
WriteLedger writeLedger ; synchronized ( this . lock ) { writeLedger = this . writeLedger ; this . writeLedger = null ; this . logMetadata = null ; } // Close the write queue and cancel the pending writes .
this . writes . close ( ) . forEach ( w -> w . fail ( new CancellationException ( "BookKeeperLog has been closed." ) , true ) ) ; if ( writeLedger != null ) { try { Ledgers . close ( writeLedger . ledger ) ; } catch ( DurableDataLogException bkEx ) { log . error ( "{}: Unable to close LedgerHandle for Ledger {}." , this . traceObjectId , writeLedger . ledger . getId ( ) , bkEx ) ; } } log . info ( "{}: Closed." , this . traceObjectId ) ; } |
public class WCApplicationHelper { public static EnterpriseArchive packageJars ( LibertyServer server , String dir , EnterpriseArchive ear , String ... jarFiles ) throws Exception { } } | String baseDir = DIR_PUBLISH + server . getServerName ( ) + "/" + dir + "/" ; for ( String jarFile : jarFiles ) { JavaArchive jar = ShrinkWrap . createFromZipFile ( JavaArchive . class , new File ( baseDir + jarFile ) ) ; ear . addAsLibrary ( jar ) ; } return ear ; |
public class StatementGroupConverter { /** * { @ inheritDoc } */
@ Override public XBELStatementGroup convert ( StatementGroup source ) { } } | if ( source == null ) return null ; XBELStatementGroup xsg = new XBELStatementGroup ( ) ; AnnotationGroup ag = source . getAnnotationGroup ( ) ; AnnotationGroupConverter agConverter = new AnnotationGroupConverter ( ) ; // Defer to AnnotationGroupConverter
XBELAnnotationGroup xag = agConverter . convert ( ag ) ; xsg . setAnnotationGroup ( xag ) ; String name = source . getName ( ) ; xsg . setName ( name ) ; String comment = source . getComment ( ) ; xsg . setComment ( comment ) ; List < Statement > statements = source . getStatements ( ) ; StatementConverter sConverter = new StatementConverter ( ) ; if ( hasItems ( statements ) ) { List < XBELStatement > xstmts = xsg . getStatement ( ) ; for ( final Statement stmt : statements ) { // Defer to StatementConverter
xstmts . add ( sConverter . convert ( stmt ) ) ; } } List < StatementGroup > statementGroups = source . getStatementGroups ( ) ; if ( hasItems ( statementGroups ) ) { List < XBELStatementGroup > xstmtgroup = xsg . getStatementGroup ( ) ; for ( final StatementGroup sg : statementGroups ) { xstmtgroup . add ( convert ( sg ) ) ; } } return xsg ; |
public class ConsumerDispatcherState { /** * time for a durable subscription . */
public void setDurable ( boolean isDurable ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "setDurable" , Boolean . valueOf ( isDurable ) ) ; durable = isDurable ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "setDurable" ) ; |
public class UpdatePodBuilder { /** * name is the name of the pod */
public UpdatePodBuilder name ( String name ) { } } | Objects . requireNonNull ( name ) ; if ( name . indexOf ( '.' ) >= 0 ) { throw new IllegalArgumentException ( name ) ; } _name = name ; return this ; |
public class RtfWriter2 { /** * Adds a fragment of an RTF document to the current RTF document being generated .
* Since this fragment doesn ' t contain font or color tables , all fonts and colors
* are mapped to the default font and color . If the font and color mappings are
* known , they can be specified via the mappings parameter .
* Uses new RtfParser object .
* ( author : Howard Shank )
* @ param documentSource The InputStream to read the RTF fragment from .
* @ param mappings The RtfImportMappings that contain font and color mappings to apply to the fragment .
* @ param events The array of event listeners . May be null
* @ throws IOException On errors reading the RTF fragment .
* @ throws DocumentException On errors adding to this RTF fragment .
* @ see RtfImportMappings
* @ see RtfParser
* @ see RtfParser # importRtfFragment ( InputStream , RtfDocument , RtfImportMappings )
* @ since 2.0.8 */
public void importRtfFragment ( InputStream documentSource , RtfImportMappings mappings , EventListener [ ] events ) throws IOException , DocumentException { } } | if ( ! this . open ) { throw new DocumentException ( "The document must be open to import RTF fragments." ) ; } RtfParser rtfImport = new RtfParser ( this . document ) ; if ( events != null ) { for ( int idx = 0 ; idx < events . length ; idx ++ ) { rtfImport . addListener ( events [ idx ] ) ; } } rtfImport . importRtfFragment ( documentSource , this . rtfDoc , mappings ) ; |
public class MavenJDOMWriter { /** * Method updateContributor .
* @ param value
* @ param element
* @ param counter
* @ param xmlTag */
protected void updateContributor ( Contributor value , String xmlTag , Counter counter , Element element ) { } } | Element root = element ; Counter innerCount = new Counter ( counter . getDepth ( ) + 1 ) ; findAndReplaceSimpleElement ( innerCount , root , "name" , value . getName ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "email" , value . getEmail ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "url" , value . getUrl ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "organization" , value . getOrganization ( ) , null ) ; findAndReplaceSimpleElement ( innerCount , root , "organizationUrl" , value . getOrganizationUrl ( ) , null ) ; findAndReplaceSimpleLists ( innerCount , root , value . getRoles ( ) , "roles" , "role" ) ; findAndReplaceSimpleElement ( innerCount , root , "timezone" , value . getTimezone ( ) , null ) ; findAndReplaceProperties ( innerCount , root , "properties" , value . getProperties ( ) ) ; |
public class LinearLayoutManager { /** * Compatibility support for { @ link android . widget . AbsListView # setStackFromBottom ( boolean ) } */
public void setStackFromEnd ( boolean stackFromEnd ) { } } | if ( mPendingSavedState != null && mPendingSavedState . mStackFromEnd != stackFromEnd ) { // override pending state
mPendingSavedState . mStackFromEnd = stackFromEnd ; } if ( mStackFromEnd == stackFromEnd ) { return ; } mStackFromEnd = stackFromEnd ; requestLayout ( ) ; |
public class ServiceBroker { /** * Stops the specified { @ link MoleculerComponent } .
* @ param component
* component to stop */
protected void stop ( MoleculerComponent component ) { } } | if ( component == null ) { return ; } try { component . stopped ( ) ; logger . info ( nameOf ( component , true ) + " stopped." ) ; } catch ( Exception cause ) { logger . warn ( "Unable to stop component!" , cause ) ; } |
public class Checkbox { /** * Indicates if the element is considered checked ,
* that is , the value is the same as the one configured with { @ link # setEnabledValueString ( java . lang . String ) } .
* @ return True if checked , false otherwise */
public boolean isChecked ( ) { } } | String valueEnabledString = getEnabledValueString ( ) ; String currentValue = this . getFirstValue ( ) ; return valueEnabledString . equals ( currentValue ) ; |
public class SplitHistory { /** * Get the common split dimensions from a list of split histories .
* @ param splitHistories
* @ return list of split dimensions */
public static IntIterator getCommonDimensions ( Collection < SplitHistory > splitHistories ) { } } | Iterator < SplitHistory > it = splitHistories . iterator ( ) ; long [ ] checkSet = BitsUtil . copy ( it . next ( ) . dimBits ) ; while ( it . hasNext ( ) ) { SplitHistory sh = it . next ( ) ; BitsUtil . andI ( checkSet , sh . dimBits ) ; } return new BitsetIterator ( checkSet ) ; |
public class SoundButton { /** * Sets up unmuted UI event .
* Shows chip with " Unmuted " text .
* Changes sound { @ link FloatingActionButton }
* { @ link android . graphics . drawable . Drawable } to denote sound is on .
* Sets private state variable to false ( unmuted )
* @ return false , view is in unmuted state */
private boolean unmute ( ) { } } | isMuted = false ; setSoundChipText ( getContext ( ) . getString ( R . string . unmuted ) ) ; showSoundChip ( ) ; soundFabOn ( ) ; return isMuted ; |
public class VueComponentOptions { /** * Add a watch property to this Component Definition
* @ param javaMethod Function pointer to the method in the { @ link IsVueComponent }
* @ param watchedPropertyName Name of the property name to watch in the data model
* @ param isDeep Is the watcher deep ( will watch child properties )
* @ param isImmediate Is the watcher immediate ( will trigger on initial value ) */
@ JsOverlay public final void addJavaWatch ( Function javaMethod , String watchedPropertyName , boolean isDeep , boolean isImmediate ) { } } | if ( ! isDeep && ! isImmediate ) { addWatch ( watchedPropertyName , javaMethod ) ; return ; } JsPropertyMap < Object > watchDefinition = JsPropertyMap . of ( ) ; watchDefinition . set ( "handler" , javaMethod ) ; watchDefinition . set ( "deep" , isDeep ) ; watchDefinition . set ( "immediate" , isImmediate ) ; addWatch ( watchedPropertyName , watchDefinition ) ; |
public class EmbeddedServerDriver { /** * Determine if the input product extension exists in the input string .
* @ param inputString string to search .
* @ param productExtension product extension to search for .
* @ return true if input product extension is found in the input string . */
private boolean isProductExtensionInstalled ( String inputString , String productExtension ) { } } | if ( ( productExtension == null ) || ( inputString == null ) ) { return false ; } int msgIndex = inputString . indexOf ( "CWWKF0012I: The server installed the following features:" ) ; if ( msgIndex == - 1 ) { return false ; } String msgString = inputString . substring ( msgIndex ) ; int leftBracketIndex = msgString . indexOf ( "[" ) ; int rightBracketIndex = msgString . indexOf ( "]" ) ; if ( ( leftBracketIndex == - 1 ) || ( rightBracketIndex == - 1 ) || ( rightBracketIndex < leftBracketIndex ) ) { return false ; } String features = msgString . substring ( leftBracketIndex , rightBracketIndex ) ; Log . info ( c , "isProductExtensionInstalled" , features ) ; if ( features . indexOf ( productExtension ) == - 1 ) { return false ; } return true ; |
public class HmmerResult { /** * Get the overlap between two HmmerResult objects
* @ param other
* @ return 0 if no overlap , otherwise the length of the overlap */
public int getOverlapLength ( HmmerResult other ) { } } | int overlap = 0 ; for ( HmmerDomain d1 : getDomains ( ) ) { for ( HmmerDomain d2 : other . getDomains ( ) ) { overlap += getOverlap ( d1 , d2 ) ; } } return overlap ; |
public class ThreadContextManager { /** * Declarative Services method for removing a thread context provider .
* @ param ref reference to the service */
protected void unsetThreadContextProvider ( ServiceReference < ThreadContextProvider > ref ) { } } | String threadContextProviderName = ( String ) ref . getProperty ( COMPONENT_NAME ) ; if ( threadContextProviders . removeReference ( threadContextProviderName , ref ) && Boolean . TRUE . equals ( ref . getProperty ( ThreadContextProvider . ALWAYS_CAPTURE_THREAD_CONTEXT ) ) ) alwaysEnabled . remove ( threadContextProviderName ) ; |
public class DJXYAreaChartBuilder { /** * Adds the specified serie column to the dataset with custom label .
* @ param column the serie column */
public DJXYAreaChartBuilder addSerie ( AbstractColumn column , StringExpression labelExpression ) { } } | getDataset ( ) . addSerie ( column , labelExpression ) ; return this ; |
public class PurlIdentifier { /** * Returns the GAV representation of the Package URL as utilized in gradle
* builds .
* @ return the GAV representation of the Package URL */
public String toGav ( ) { } } | if ( purl . getNamespace ( ) != null && purl . getVersion ( ) != null ) { return String . format ( "%s:%s:%s" , purl . getNamespace ( ) , purl . getName ( ) , purl . getVersion ( ) ) ; } return null ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.