signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ByteBufferUtils { /** * Encode a String in a ByteBuffer using UTF _ 8. * @ param s the string to encode * @ return the encoded string */ public static ByteBuffer bytes ( String s ) { } }
if ( s == null ) return null ; return ByteBuffer . wrap ( s . getBytes ( UTF_8 ) ) ;
public class ApplicationsResponse { /** * List of applications returned in this page . * @ param item * List of applications returned in this page . */ public void setItem ( java . util . Collection < ApplicationResponse > item ) { } }
if ( item == null ) { this . item = null ; return ; } this . item = new java . util . ArrayList < ApplicationResponse > ( item ) ;
public class HttpUtils { /** * Returns the cookie with the given name for the given request * @ param name the name of the cookie * @ param request the request where to extract the request from * @ return the cookie object , or null if not found */ public static Cookie getCookie ( String name , HttpServletRequest request ) { } }
Cookie [ ] cookies = request . getCookies ( ) ; if ( ArrayUtils . isNotEmpty ( cookies ) ) { for ( Cookie cookie : cookies ) { if ( cookie . getName ( ) . equals ( name ) ) { return cookie ; } } } return null ;
public class CommonOps_DDRM { /** * Creates a new matrix which is the specified submatrix of ' src ' * s < sub > i - y0 , j - x0 < / sub > = o < sub > ij < / sub > for all y0 & le ; i & lt ; y1 and x0 & le ; j & lt ; x1 < br > * < br > * where ' s < sub > ij < / sub > ' is an element in the submatrix and ' o < sub > ij < / sub > ' is an element in the * original matrix . * @ param src The original matrix which is to be copied . Not modified . * @ param srcX0 Start column . * @ param srcX1 Stop column + 1. * @ param srcY0 Start row . * @ param srcY1 Stop row + 1. * @ return Extracted submatrix . */ public static DMatrixRMaj extract ( DMatrixRMaj src , int srcY0 , int srcY1 , int srcX0 , int srcX1 ) { } }
if ( srcY1 <= srcY0 || srcY0 < 0 || srcY1 > src . numRows ) throw new MatrixDimensionException ( "srcY1 <= srcY0 || srcY0 < 0 || srcY1 > src.numRows" ) ; if ( srcX1 <= srcX0 || srcX0 < 0 || srcX1 > src . numCols ) throw new MatrixDimensionException ( "srcX1 <= srcX0 || srcX0 < 0 || srcX1 > src.numCols" ) ; int w = srcX1 - srcX0 ; int h = srcY1 - srcY0 ; DMatrixRMaj dst = new DMatrixRMaj ( h , w ) ; ImplCommonOps_DDRM . extract ( src , srcY0 , srcX0 , dst , 0 , 0 , h , w ) ; return dst ;
public class AttributeListImpl { /** * Remove an attribute from the list . * < p > SAX application writers can use this method to filter an * attribute out of an AttributeList . Note that invoking this * method will change the length of the attribute list and * some of the attribute ' s indices . < / p > * < p > If the requested attribute is not in the list , this is * a no - op . < / p > * @ param name The attribute name . * @ see # addAttribute */ public void removeAttribute ( String name ) { } }
int i = names . indexOf ( name ) ; if ( i != - 1 ) { names . remove ( i ) ; types . remove ( i ) ; values . remove ( i ) ; }
public class GeometricCumulativeDoubleBondFactory { /** * Check if all atoms in the bond list have 3D coordinates . There is some * redundant checking but the list will typically be short . * @ param bonds the bonds to check * @ return whether all atoms have 2D coordinates */ private static boolean has3DCoordinates ( List < IBond > bonds ) { } }
for ( IBond bond : bonds ) { if ( bond . getBegin ( ) . getPoint3d ( ) == null || bond . getEnd ( ) . getPoint3d ( ) == null ) return false ; } return true ;
public class ColorImg { /** * Copies this image ' s data to a new { @ link Img } . * The specified { @ link TransferFunction } is used to map this * image ' s channel values to 8bit per channel ARGB values . * @ param transferFunc to transform a pixel value to the required 8bit per channel ARGB value * @ return an Img with this image ' s data copied to it */ public Img toImg ( TransferFunction transferFunc ) { } }
Img img = new Img ( getDimension ( ) ) ; if ( hasAlpha ( ) ) { img . forEach ( px -> px . setValue ( transferFunc . toARGB ( getDataA ( ) [ px . getIndex ( ) ] , getDataR ( ) [ px . getIndex ( ) ] , getDataG ( ) [ px . getIndex ( ) ] , getDataB ( ) [ px . getIndex ( ) ] ) ) ) ; } else { img . forEach ( px -> px . setValue ( transferFunc . toRGB ( getDataR ( ) [ px . getIndex ( ) ] , getDataG ( ) [ px . getIndex ( ) ] , getDataB ( ) [ px . getIndex ( ) ] ) ) ) ; } return img ;
public class SlidingSwipeBack { /** * Offsets the menu relative to its original position based on the position of the content . * @ param offsetPixels The number of pixels the content if offset . */ @ TargetApi ( Build . VERSION_CODES . HONEYCOMB ) private void offsetMenu ( int offsetPixels ) { } }
if ( ! mOffsetMenu || mSwipeBackViewSize == 0 ) { return ; } final int width = getWidth ( ) ; final int height = getHeight ( ) ; final int menuSize = mSwipeBackViewSize ; final int sign = ( int ) ( mOffsetPixels / Math . abs ( mOffsetPixels ) ) ; final float openRatio = Math . abs ( mOffsetPixels ) / menuSize ; final int offset = ( int ) ( - 0.25f * ( ( 1.0f - openRatio ) * menuSize ) * sign ) ; switch ( getPosition ( ) ) { case LEFT : { if ( USE_TRANSLATIONS ) { if ( offsetPixels > 0 ) { mSwipeBackContainer . setTranslationX ( offset ) ; } else { mSwipeBackContainer . setTranslationX ( - menuSize ) ; } } else { mSwipeBackContainer . offsetLeftAndRight ( offset - mSwipeBackContainer . getLeft ( ) ) ; mSwipeBackContainer . setVisibility ( offsetPixels == 0 ? INVISIBLE : VISIBLE ) ; } break ; } case RIGHT : { if ( USE_TRANSLATIONS ) { if ( offsetPixels != 0 ) { mSwipeBackContainer . setTranslationX ( offset ) ; } else { mSwipeBackContainer . setTranslationX ( menuSize ) ; } } else { final int oldOffset = mSwipeBackContainer . getRight ( ) - width ; final int offsetBy = offset - oldOffset ; mSwipeBackContainer . offsetLeftAndRight ( offsetBy ) ; mSwipeBackContainer . setVisibility ( offsetPixels == 0 ? INVISIBLE : VISIBLE ) ; } break ; } case TOP : { if ( USE_TRANSLATIONS ) { if ( offsetPixels > 0 ) { mSwipeBackContainer . setTranslationY ( offset ) ; } else { mSwipeBackContainer . setTranslationY ( - menuSize ) ; } } else { mSwipeBackContainer . offsetTopAndBottom ( offset - mSwipeBackContainer . getTop ( ) ) ; mSwipeBackContainer . setVisibility ( offsetPixels == 0 ? INVISIBLE : VISIBLE ) ; } break ; } case BOTTOM : { if ( USE_TRANSLATIONS ) { if ( offsetPixels != 0 ) { mSwipeBackContainer . setTranslationY ( offset ) ; } else { mSwipeBackContainer . setTranslationY ( menuSize ) ; } } else { final int oldOffset = mSwipeBackContainer . getBottom ( ) - height ; final int offsetBy = offset - oldOffset ; mSwipeBackContainer . offsetTopAndBottom ( offsetBy ) ; mSwipeBackContainer . setVisibility ( offsetPixels == 0 ? INVISIBLE : VISIBLE ) ; } break ; } }
public class Utils { /** * read bytes with a short sign prefix ( mark the size of bytes ) * @ param buffer data buffer * @ return string result ( encoding with UTF - 8) * @ see # writeShortString ( ByteBuffer , String ) */ public static String readShortString ( ByteBuffer buffer ) { } }
short size = buffer . getShort ( ) ; if ( size < 0 ) { return null ; } byte [ ] bytes = new byte [ size ] ; buffer . get ( bytes ) ; return fromBytes ( bytes ) ;
public class Ifc4FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcDoorTypeOperationEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class RegistrationRequest { /** * Build a RegistrationRequest from the provided { @ link GridNodeConfiguration } , use the provided * name and description . This is different than { @ code new RegistrationRequest ( GridNodeConfiguration , * String , String ) } because it will first load any specified { @ link * GridNodeConfiguration # nodeConfigFile } and then merge the provided configuration onto it . * @ param configuration the { @ link GridNodeConfiguration } to use . Internally calls { @ code new * GridNodeConfiguration ( ) } if a { @ code null } value is provided since a * request without configuration is not valid . * @ param name the name for the remote * @ param description the description for the remote host */ public static RegistrationRequest build ( GridNodeConfiguration configuration , String name , String description ) { } }
RegistrationRequest pendingRequest = new RegistrationRequest ( configuration , name , description ) ; GridNodeConfiguration pendingConfiguration = pendingRequest . configuration ; if ( pendingConfiguration . nodeConfigFile != null ) { pendingRequest . configuration = GridNodeConfiguration . loadFromJSON ( pendingConfiguration . nodeConfigFile ) ; } pendingRequest . configuration . merge ( pendingConfiguration ) ; // update important merge protected values for the pendingRequest we are building . if ( pendingConfiguration . host != null ) { pendingRequest . configuration . host = pendingConfiguration . host ; } if ( pendingConfiguration . port != null ) { pendingRequest . configuration . port = pendingConfiguration . port ; } // make sure we have a valid host pendingRequest . configuration . fixUpHost ( ) ; // make sure the capabilities are updated with required fields pendingRequest . configuration . fixUpCapabilities ( ) ; pendingRequest . configuration . dropCapabilitiesThatDoesNotMatchCurrentPlatform ( ) ; return pendingRequest ;
public class HttpServletRequestHolder { /** * Associate the request with the current thread . */ public static void bind ( HttpServletRequest request ) { } }
if ( request != null && threadLocal . get ( ) != null ) { throw new IllegalStateException ( "HttpServletRequestHolder.bind() called for a " + "thread that already has a request associated with it. It's likely that the request " + "was not correctly removed from the thread before it is put back into the thread pool." ) ; } threadLocal . set ( request ) ;
public class OsUtil { /** * 检查端口号是否被占用 */ public static boolean isBusyPort ( int port ) { } }
boolean ret = true ; ServerSocket serverSocket = null ; try { serverSocket = new ServerSocket ( port ) ; ret = false ; } catch ( Exception e ) { } finally { if ( serverSocket != null ) { try { serverSocket . close ( ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } } } return ret ;
public class VolumeCreateConfig { private VolumeCreateConfig add ( String name , JsonObject value ) { } }
if ( value != null ) { createConfig . add ( name , value ) ; } return this ;
public class GridSearch { /** * Invokes grid search based on specified hyper space walk strategy . * It updates passed grid object in distributed store . * @ param grid grid object to save results ; grid already locked */ private void gridSearch ( Grid < MP > grid ) { } }
Model model = null ; // Prepare nice model key and override default key by appending model counter // String protoModelKey = _ hyperSpaceWalker . getParams ( ) . _ model _ id = = null // ? grid . _ key + " _ model _ " // : _ hyperSpaceWalker . getParams ( ) . _ model _ id . toString ( ) + H2O . calcNextUniqueModelId ( " " ) + " _ " ; String protoModelKey = grid . _key + "_model_" ; try { // Get iterator to traverse hyper space HyperSpaceWalker . HyperSpaceIterator < MP > it = _hyperSpaceWalker . iterator ( ) ; // Number of traversed model parameters int counter = grid . getModelCount ( ) ; while ( it . hasNext ( model ) ) { if ( _job . stop_requested ( ) ) throw new Job . JobCancelledException ( ) ; // Handle end - user cancel request double max_runtime_secs = it . max_runtime_secs ( ) ; double time_remaining_secs = Double . MAX_VALUE ; if ( max_runtime_secs > 0 ) { time_remaining_secs = it . time_remaining_secs ( ) ; if ( time_remaining_secs < 0 ) { Log . info ( "Grid max_runtime_secs of " + max_runtime_secs + " secs has expired; stopping early." ) ; throw new Job . JobCancelledException ( ) ; } } MP params ; try { // Get parameters for next model params = it . nextModelParameters ( model ) ; // Sequential model building , should never propagate // exception up , just mark combination of model parameters as wrong // Do we need to limit the model build time ? if ( max_runtime_secs > 0 ) { Log . info ( "Grid time is limited to: " + max_runtime_secs + " for grid: " + grid . _key + ". Remaining time is: " + time_remaining_secs ) ; if ( params . _max_runtime_secs == 0 ) { // unlimited params . _max_runtime_secs = time_remaining_secs ; Log . info ( "Due to the grid time limit, changing model max runtime to: " + params . _max_runtime_secs + " secs." ) ; } else { double was = params . _max_runtime_secs ; params . _max_runtime_secs = Math . min ( params . _max_runtime_secs , time_remaining_secs ) ; Log . info ( "Due to the grid time limit, changing model max runtime from: " + was + " secs to: " + params . _max_runtime_secs + " secs." ) ; } } try { ScoringInfo scoringInfo = new ScoringInfo ( ) ; scoringInfo . time_stamp_ms = System . currentTimeMillis ( ) ; // / / build the model ! model = buildModel ( params , grid , ++ counter , protoModelKey ) ; if ( model != null ) { model . fillScoringInfo ( scoringInfo ) ; grid . setScoringInfos ( ScoringInfo . prependScoringInfo ( scoringInfo , grid . getScoringInfos ( ) ) ) ; ScoringInfo . sort ( grid . getScoringInfos ( ) , _hyperSpaceWalker . search_criteria ( ) . stopping_metric ( ) ) ; // Currently AUTO for Cartesian and user - specified for RandomDiscrete } } catch ( RuntimeException e ) { // Catch everything if ( ! Job . isCancelledException ( e ) ) { StringWriter sw = new StringWriter ( ) ; PrintWriter pw = new PrintWriter ( sw ) ; e . printStackTrace ( pw ) ; Log . warn ( "Grid search: model builder for parameters " + params + " failed! Exception: " , e , sw . toString ( ) ) ; } grid . appendFailedModelParameters ( params , e ) ; } } catch ( IllegalArgumentException e ) { Log . warn ( "Grid search: construction of model parameters failed! Exception: " , e ) ; // Model parameters cannot be constructed for some reason it . modelFailed ( model ) ; Object [ ] rawParams = it . getCurrentRawParameters ( ) ; grid . appendFailedModelParameters ( rawParams , e ) ; } finally { // Update progress by 1 increment _job . update ( 1 ) ; // Always update grid in DKV after model building attempt grid . update ( _job ) ; } // finally if ( model != null && grid . getScoringInfos ( ) != null && // did model build and scoringInfo creation succeed ? _hyperSpaceWalker . stopEarly ( model , grid . getScoringInfos ( ) ) ) { Log . info ( "Convergence detected based on simple moving average of the loss function. Grid building completed." ) ; break ; } } // while ( it . hasNext ( model ) ) Log . info ( "For grid: " + grid . _key + " built: " + grid . getModelCount ( ) + " models." ) ; } finally { grid . unlock ( _job ) ; }
public class LoggingService { /** * - - - - - Command callbacks */ @ Override public Collection < Task > getAppTasks ( ApplicationDefinition appDef ) { } }
checkServiceState ( ) ; List < Task > appTasks = new ArrayList < > ( ) ; appTasks . add ( new LogServiceAgerTask ( appDef ) ) ; appTasks . add ( new LogServiceMergerTask ( appDef ) ) ; return appTasks ;
public class XMLValidator { /** * Validates some piece of XML , by firstly converting it to a string to ensure that it is valid . * @ param method The validation method to use during validation . * @ param doc The XML DOM Document to be validated . * @ param fileName The filename of the DTD / Schema data . * @ param data The DTD / Schema data to be used to validate against . * @ param entities The entity data to be used to validate against . * @ param additionalFiles Any additional files that are needed during the validation . * @ return True if the XML is valid , otherwise false . */ public boolean validate ( final ValidationMethod method , final Document doc , final String fileName , final byte [ ] data , final String entities , final Map < String , byte [ ] > additionalFiles ) { } }
if ( doc == null || doc . getDocumentElement ( ) == null ) { return false ; } else { final String xml ; if ( doc . getXmlEncoding ( ) == null ) { xml = XMLUtilities . convertDocumentToString ( doc , "UTF-8" ) ; } else { xml = XMLUtilities . convertDocumentToString ( doc ) ; } return validate ( method , xml , fileName , data , entities , doc . getDocumentElement ( ) . getNodeName ( ) , additionalFiles ) ; }
public class QrcodeAPI { /** * 创建持久二维码 * @ param access _ token access _ token * @ param scene _ str场景值ID ( 字符串形式的ID ) , 字符串类型 , 长度限制为1到64 * @ return QrcodeTicket */ public static QrcodeTicket qrcodeCreateFinal ( String access_token , String scene_str ) { } }
String json = String . format ( "{\"action_name\": \"QR_LIMIT_STR_SCENE\", \"action_info\": {\"scene\": {\"scene_str\": \"%s\"}}}" , scene_str ) ; return qrcodeCreate ( access_token , json ) ;
public class MimeType { /** * returns a mimetype that match given string * @ param strMimeType * @ return */ public static MimeType getInstance ( String strMimeType ) { } }
if ( strMimeType == null ) return ALL ; strMimeType = strMimeType . trim ( ) ; if ( "*" . equals ( strMimeType ) || strMimeType . length ( ) == 0 ) return ALL ; String [ ] arr = ListUtil . listToStringArray ( strMimeType , ';' ) ; if ( arr . length == 0 ) return ALL ; String [ ] arrCT = ListUtil . listToStringArray ( arr [ 0 ] . trim ( ) , '/' ) ; // subtype String type = null , subtype = null ; // type if ( arrCT . length >= 1 ) { type = arrCT [ 0 ] . trim ( ) ; if ( "*" . equals ( type ) ) type = null ; if ( arrCT . length >= 2 ) { subtype = arrCT [ 1 ] . trim ( ) ; if ( "*" . equals ( subtype ) ) subtype = null ; } } if ( arr . length == 1 ) return getInstance ( type , subtype , null ) ; final Map < String , String > properties = new HashMap < String , String > ( ) ; String entry ; String [ ] _arr ; for ( int i = 1 ; i < arr . length ; i ++ ) { entry = arr [ i ] . trim ( ) ; _arr = ListUtil . listToStringArray ( entry , '=' ) ; if ( _arr . length >= 2 ) properties . put ( _arr [ 0 ] . trim ( ) . toLowerCase ( ) , _arr [ 1 ] . trim ( ) ) ; else if ( _arr . length == 1 && ! _arr [ 0 ] . trim ( ) . toLowerCase ( ) . equals ( "*" ) ) properties . put ( _arr [ 0 ] . trim ( ) . toLowerCase ( ) , "" ) ; } return getInstance ( type , subtype , properties ) ;
public class PersistRedeliveredCount { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . msgstore . persistence . Operation # copyDataIfVulnerable ( ) */ public final void copyDataIfVulnerable ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "copyDataIfVulnerable" ) ; if ( _cachedPersistable == null ) { _cachedPersistable = new CachedPersistable ( _masterPersistable ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "copyDataIfVulnerable" ) ;
public class Descriptor { /** * getter for redirects - gets List of redirects pointing to a Wikipedia page . * @ generated * @ return value of the feature */ public StringArray getRedirects ( ) { } }
if ( Descriptor_Type . featOkTst && ( ( Descriptor_Type ) jcasType ) . casFeat_redirects == null ) jcasType . jcas . throwFeatMissing ( "redirects" , "de.julielab.jules.types.wikipedia.Descriptor" ) ; return ( StringArray ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Descriptor_Type ) jcasType ) . casFeatCode_redirects ) ) ) ;
public class QueuePlugin { /** * remove all queued function from the named queue . */ @ SuppressWarnings ( "unchecked" ) public T clearQueue ( String name ) { } }
for ( Element e : elements ( ) ) { queue ( e , name , null ) . clear ( ) ; } return ( T ) this ;
public class Multiplexing { /** * Flattens an iterable of iterators of E to an iterator of E . E . g : * < code > * chain ( [ 1,2 ] , [ 3,4 ] ) - > [ 1,2,3,4] * < / code > * @ param < E > the iterator element type * @ param < I > the iterator type * @ param iterators the source iterable * @ return the flattened iterator */ public static < E , I extends Iterator < E > > Iterator < E > chain ( Iterable < I > iterators ) { } }
dbc . precondition ( iterators != null , "cannot chain a null iterable" ) ; return new ChainIterator < > ( iterators . iterator ( ) ) ;
public class BrowserOpener { /** * open a URL in the browser that was used to launch SPICE * @ param url URL to be opened * @ return true if this was successfull */ public static boolean showDocument ( URL url ) { } }
if ( url != null ) { boolean success = JNLPProxy . showDocument ( url ) ; if ( ! success ) logger . info ( "could not open URL " + url + " in browser. check your config or browser version." ) ; return success ; } else return false ;
public class CoordinatorAdminCommand { /** * Parses command - line and directs to command groups or non - grouped * sub - commands . * @ param args Command - line input * @ throws Exception */ public static void executeCommand ( String [ ] args ) throws Exception { } }
String subCmd = ( args . length > 0 ) ? args [ 0 ] : "" ; args = CoordinatorAdminUtils . copyArrayCutFirst ( args ) ; if ( subCmd . equals ( "get" ) ) { SubCommandGet . executeCommand ( args ) ; } else if ( subCmd . equals ( "put" ) ) { SubCommandPut . executeCommand ( args ) ; } else if ( subCmd . equals ( "delete" ) ) { SubCommandDelete . executeCommand ( args ) ; } else { args = CoordinatorAdminUtils . copyArrayAddFirst ( args , subCmd ) ; executeHelp ( args , System . out ) ; }
public class PauseableComponentQuiesceListener { /** * ( non - Javadoc ) * @ see com . ibm . wsspi . kernel . service . utils . ServerQuiesceListener # serverStopping ( ) */ @ Override public void serverStopping ( ) { } }
if ( bundleContext != null ) { try { Collection < ServiceReference < PauseableComponent > > refs = bundleContext . getServiceReferences ( PauseableComponent . class , null ) ; for ( ServiceReference < PauseableComponent > ref : refs ) { PauseableComponent pc = bundleContext . getService ( ref ) ; if ( ! pc . isPaused ( ) ) { try { pc . pause ( ) ; } catch ( PauseableComponentException ex ) { Tr . warning ( tc , "warn.did.not.pause.on.shutdown" , ex . getMessage ( ) ) ; } } } } catch ( InvalidSyntaxException e ) { // Should never happen , FFDC and return return ; } }
public class NonVoltDBBackend { /** * Returns true if the < i > columnName < / i > is a VARCHAR column type , of any size , * or equivalents in a comparison , non - VoltDB database ; false otherwise . */ private boolean isVarcharColumn ( String columnName , List < String > tableNames , boolean debugPrint ) { } }
List < String > varcharColumnTypes = Arrays . asList ( "VARCHAR" ) ; return isColumnType ( varcharColumnTypes , columnName , tableNames , debugPrint ) ;
public class Nodes { /** * Produces a { @ link Node . Builder . OfLong } . * @ param exactSizeIfKnown - 1 if a variable size builder is requested , * otherwise the exact capacity desired . A fixed capacity builder will * fail if the wrong number of elements are added to the builder . * @ return a { @ code Node . Builder . OfLong } */ static Node . Builder . OfLong longBuilder ( long exactSizeIfKnown ) { } }
return ( exactSizeIfKnown >= 0 && exactSizeIfKnown < MAX_ARRAY_SIZE ) ? new LongFixedNodeBuilder ( exactSizeIfKnown ) : longBuilder ( ) ;
public class CmsExplorerTypeSettings { /** * Sets the reference of the explorer type setting . < p > * @ param reference the reference of the explorer type setting */ public void setReference ( String reference ) { } }
m_reference = reference ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( Messages . get ( ) . getBundle ( ) . key ( Messages . LOG_SET_REFERENCE_1 , m_reference ) ) ; }
public class YearlyReader { /** * Make the request to the Twilio API to perform the read . * @ param client TwilioRestClient with which to make the request * @ return Yearly ResourceSet */ @ Override public ResourceSet < Yearly > read ( final TwilioRestClient client ) { } }
return new ResourceSet < > ( this , client , firstPage ( client ) ) ;
public class BoxUser { /** * Gets a collection of all the email aliases for this user . * < p > Note that the user ' s primary login email is not included in the collection of email aliases . < / p > * @ return a collection of all the email aliases for this user . */ public Collection < EmailAlias > getEmailAliases ( ) { } }
URL url = EMAIL_ALIASES_URL_TEMPLATE . build ( this . getAPI ( ) . getBaseURL ( ) , this . getID ( ) ) ; BoxAPIRequest request = new BoxAPIRequest ( this . getAPI ( ) , url , "GET" ) ; BoxJSONResponse response = ( BoxJSONResponse ) request . send ( ) ; JsonObject responseJSON = JsonObject . readFrom ( response . getJSON ( ) ) ; int totalCount = responseJSON . get ( "total_count" ) . asInt ( ) ; Collection < EmailAlias > emailAliases = new ArrayList < EmailAlias > ( totalCount ) ; JsonArray entries = responseJSON . get ( "entries" ) . asArray ( ) ; for ( JsonValue value : entries ) { JsonObject emailAliasJSON = value . asObject ( ) ; emailAliases . add ( new EmailAlias ( emailAliasJSON ) ) ; } return emailAliases ;
public class EbeanQueryChannelService { /** * Return a ExampleExpression specifying more options . * @ return the created ExampleExpression specifying more options . */ @ Override public ExampleExpression exampleOf ( Object example , boolean caseInsensitive , LikeType likeType ) { } }
return ebeanServer . getExpressionFactory ( ) . exampleLike ( example , caseInsensitive , likeType ) ;
public class DocumentClassifierTrainer { /** * Getting the stream with the right corpus format . * @ param inputData * the input data * @ param clearFeatures whether to reset the features for each document * @ return the stream from the several corpus formats * @ throws IOException * the io exception */ public static ObjectStream < DocSample > getDocumentStream ( final String inputData , String clearFeatures ) throws IOException { } }
final ObjectStream < String > docStream = IOUtils . readFileIntoMarkableStreamFactory ( inputData ) ; ObjectStream < DocSample > sampleStream = new DocSampleStream ( clearFeatures , docStream ) ; return sampleStream ;
public class EngineParamPanel { /** * This method is called from within the constructor to initialize the form . * WARNING : Do NOT modify this code . The content of this method is always * regenerated by the Form Editor . */ @ SuppressWarnings ( "unchecked" ) // < editor - fold defaultstate = " collapsed " desc = " Generated Code " > / / GEN - BEGIN : initComponents private void initComponents ( ) { } }
java . awt . GridBagConstraints gridBagConstraints ; _populationSizeLabel = new javax . swing . JLabel ( ) ; _populationSizeSpinner = new javax . swing . JSpinner ( ) ; _tournamentSizeLabel = new javax . swing . JLabel ( ) ; _tournamentSizeSpinner = new javax . swing . JSpinner ( ) ; _mutationRateLabel = new javax . swing . JLabel ( ) ; _mutationChangeLabel = new javax . swing . JLabel ( ) ; _polygonLengthLabel = new javax . swing . JLabel ( ) ; _polygonLengthSpinner = new javax . swing . JSpinner ( ) ; _polygonCountLabel = new javax . swing . JLabel ( ) ; _polygonCountSpinner = new javax . swing . JSpinner ( ) ; _referenceImageSizeLabel = new javax . swing . JLabel ( ) ; _mutationRateSpinner = new javax . swing . JSpinner ( ) ; _mutationChangeSpinner = new javax . swing . JSpinner ( ) ; _referenceImageWidthSpinner = new javax . swing . JSpinner ( ) ; _referenceImageHeightSpinner = new javax . swing . JSpinner ( ) ; setLayout ( new java . awt . GridBagLayout ( ) ) ; _populationSizeLabel . setText ( "Population size:" ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . anchor = java . awt . GridBagConstraints . LINE_START ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _populationSizeLabel , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 1 ; gridBagConstraints . gridy = 0 ; gridBagConstraints . gridwidth = 2 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . weightx = 1.0 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _populationSizeSpinner , gridBagConstraints ) ; _tournamentSizeLabel . setText ( "Tournament size" ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 3 ; gridBagConstraints . gridy = 0 ; gridBagConstraints . anchor = java . awt . GridBagConstraints . LINE_START ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _tournamentSizeLabel , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 4 ; gridBagConstraints . gridy = 0 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . weightx = 1.0 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _tournamentSizeSpinner , gridBagConstraints ) ; _mutationRateLabel . setText ( "Mutation rate:" ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 0 ; gridBagConstraints . gridy = 1 ; gridBagConstraints . anchor = java . awt . GridBagConstraints . LINE_START ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _mutationRateLabel , gridBagConstraints ) ; _mutationChangeLabel . setText ( "Mutation change:" ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 3 ; gridBagConstraints . gridy = 1 ; gridBagConstraints . anchor = java . awt . GridBagConstraints . LINE_START ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _mutationChangeLabel , gridBagConstraints ) ; _polygonLengthLabel . setText ( "Polygon length:" ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 0 ; gridBagConstraints . gridy = 3 ; gridBagConstraints . anchor = java . awt . GridBagConstraints . LINE_START ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _polygonLengthLabel , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 1 ; gridBagConstraints . gridy = 3 ; gridBagConstraints . gridwidth = 2 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . weightx = 1.0 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _polygonLengthSpinner , gridBagConstraints ) ; _polygonCountLabel . setText ( "Polygon count:" ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 3 ; gridBagConstraints . gridy = 3 ; gridBagConstraints . anchor = java . awt . GridBagConstraints . LINE_START ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _polygonCountLabel , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 4 ; gridBagConstraints . gridy = 3 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . weightx = 1.0 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _polygonCountSpinner , gridBagConstraints ) ; _referenceImageSizeLabel . setText ( "Reference image size:" ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 0 ; gridBagConstraints . gridy = 4 ; gridBagConstraints . anchor = java . awt . GridBagConstraints . LINE_START ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _referenceImageSizeLabel , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 1 ; gridBagConstraints . gridy = 1 ; gridBagConstraints . gridwidth = 2 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . weightx = 1.0 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _mutationRateSpinner , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 4 ; gridBagConstraints . gridy = 1 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . weightx = 0.2 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _mutationChangeSpinner , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 1 ; gridBagConstraints . gridy = 4 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . weightx = 0.5 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _referenceImageWidthSpinner , gridBagConstraints ) ; gridBagConstraints = new java . awt . GridBagConstraints ( ) ; gridBagConstraints . gridx = 2 ; gridBagConstraints . gridy = 4 ; gridBagConstraints . fill = java . awt . GridBagConstraints . HORIZONTAL ; gridBagConstraints . weightx = 0.5 ; gridBagConstraints . insets = new java . awt . Insets ( 2 , 2 , 2 , 2 ) ; add ( _referenceImageHeightSpinner , gridBagConstraints ) ;
public class DescribeSpotPriceHistoryRequest { /** * Filters the results by the specified basic product descriptions . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setProductDescriptions ( java . util . Collection ) } or { @ link # withProductDescriptions ( java . util . Collection ) } * if you want to override the existing values . * @ param productDescriptions * Filters the results by the specified basic product descriptions . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeSpotPriceHistoryRequest withProductDescriptions ( String ... productDescriptions ) { } }
if ( this . productDescriptions == null ) { setProductDescriptions ( new com . amazonaws . internal . SdkInternalList < String > ( productDescriptions . length ) ) ; } for ( String ele : productDescriptions ) { this . productDescriptions . add ( ele ) ; } return this ;
public class Swagger2MarkupConfigBuilder { /** * Loads the default properties from the classpath . * @ return the default properties */ private Configuration getDefaultConfiguration ( ) { } }
Configurations configs = new Configurations ( ) ; try { return configs . properties ( PROPERTIES_DEFAULT ) ; } catch ( ConfigurationException e ) { throw new RuntimeException ( String . format ( "Can't load default properties '%s'" , PROPERTIES_DEFAULT ) , e ) ; }
public class LifecycleAnnotationFinder { /** * Called once Guice has created our Lifecycle , so we can start registering callbacks */ void lifecycleAvailable ( Lifecycle lifecycle ) { } }
LOG . debug ( "Lifecycle now available, draining queue" ) ; // First , make sure we will not let any more listeners be added once the lifecycle starts going . lifecycle . addListener ( LifecycleStage . CONFIGURE_STAGE , new LifecycleListener ( ) { @ Override public void onStage ( LifecycleStage lifecycleStage ) { LOG . debug ( "Lifecycle started, further injections disallowed" ) ; LifecycleAnnotationFinder . this . lifecycle = null ; // Now both lifecycle and foundInvocations are null , triggering ISE on further discoveries } } ) ; // Now direct further visits to the lifecycle directly this . lifecycle = lifecycle ; // Now drain out all the previous ones into the lifecycle and remove the list itself for ( LifecycleInvocation invocation : foundInvocations ) { addListener ( invocation ) ; } foundInvocations = null ;
public class CouchDbRepositorySupport { /** * Wait a short while in order to prevent racing initializations from other repositories . */ private void backOff ( ) { } }
try { Thread . sleep ( new Random ( ) . nextInt ( 400 ) ) ; } catch ( InterruptedException ie ) { Thread . currentThread ( ) . interrupt ( ) ; }
public class PathWrapper { /** * Looks up a native path , adding attributes . */ public PathImpl lookupNative ( String name , Map < String , Object > attributes ) { } }
return getWrappedPath ( ) . lookupNative ( name , attributes ) ;
public class Subframe_Fixed { /** * Get the data from the last encode attempt . Data is returned in an * EncodedElement , properly packed at the bit - level to be added directly to * a FLAC stream . * @ return EncodedElement containing encoded subframe */ public EncodedElement getData ( EncodedElement dataEle ) { } }
// EncodedElement dataEle = new EncodedElement ( _ totalBits / 8 + 1 , _ offset ) ; int startSize = dataEle . getTotalBits ( ) ; int unencSampleSize = _frameSampleSize ; // write headers int encodedType = 1 << 3 | _order ; dataEle . addInt ( 0 , 1 ) ; dataEle . addInt ( encodedType , 6 ) ; dataEle . addInt ( 0 , 1 ) ; if ( _order > 0 ) { dataEle . packInt ( _samples , unencSampleSize , _start , _skip , _order ) ; } // send best data to rice encoder int paramSize = ( lowOrderBits [ _order ] > 14 ) ? 5 : 4 ; boolean fiveBitParam = ( paramSize < 5 ) ? false : true ; RiceEncoder . beginResidual ( fiveBitParam , ( byte ) 0 , dataEle ) ; /* for ( int i = 0 ; i < errorCount ; i + + ) { int error = errors [ errorOffset + i * errorStep ] ; if ( error > = 32767 | | error < = - 32767) System . err . println ( " Error Bound issue ? : " + error ) ; */ rice . encodeRicePartition ( _errors , _errorOffset , _errorStep , _errorCount , dataEle , lowOrderBits [ _order ] , fiveBitParam ) ; this . lastEncodedSize = dataEle . getTotalBits ( ) - startSize ; if ( DEBUG_LEV > 0 ) System . err . println ( "Subframe_Fixed::encodeSamples(...): End" ) ; return dataEle ;
public class ProtobufIDLProxy { /** * Checks if is nested type dependency . * @ param type the type * @ param nestedTypes the nested types * @ return true , if is nested type dependency */ private static boolean isNestedTypeDependency ( String type , List < Type > nestedTypes ) { } }
if ( nestedTypes == null ) { return false ; } for ( Type t : nestedTypes ) { if ( type . equals ( t . getName ( ) ) ) { return true ; } } return false ;
import java . util . Arrays ; import java . util . List ; class PlanetExplorer { /** * We have eight planets in our solar system , sequentially from closest to furthest from the sun : * Mercury , Venus , Earth , Mars , Jupiter , Saturn , Uranus , Neptune . * This function , given two strings ' first _ planet ' and ' second _ planet ' representing names of planets , * returns a List containing all planets situated between the orbits of ' first _ planet ' and ' second _ planet ' , * ordered by distance from the Sun . If ' first _ planet ' or ' second _ planet ' are not valid planet names , or if they * are the same , the function returns an empty List . * Examples : * explore _ space ( " Jupiter " , " Neptune " ) = = > List of ( " Saturn " , " Uranus " ) * explore _ space ( " Earth " , " Mercury " ) = = > List of ( " Venus " ) * explore _ space ( " Mercury " , " Uranus " ) = = > List of ( " Venus " , " Earth " , " Mars " , " Jupiter " , " Saturn " ) */ public static List < String > exploreSpace ( String firstPlanet , String secondPlanet ) { } }
List < String > orderedPlanetNames = Arrays . asList ( "Mercury" , "Venus" , "Earth" , "Mars" , "Jupiter" , "Saturn" , "Uranus" , "Neptune" ) ; if ( ! orderedPlanetNames . contains ( firstPlanet ) || ! orderedPlanetNames . contains ( secondPlanet ) || firstPlanet . equals ( secondPlanet ) ) { return Arrays . asList ( ) ; } int firstPlanetIndex = orderedPlanetNames . indexOf ( firstPlanet ) ; int secondPlanetIndex = orderedPlanetNames . indexOf ( secondPlanet ) ; if ( firstPlanetIndex < secondPlanetIndex ) { return orderedPlanetNames . subList ( firstPlanetIndex + 1 , secondPlanetIndex ) ; } else { return orderedPlanetNames . subList ( secondPlanetIndex + 1 , firstPlanetIndex ) ; }
public class OsmMapShapeConverter { /** * Convert a { @ link Geometry } to a Map shape and add it * @ param map * @ param geometry * @ return */ @ SuppressWarnings ( "unchecked" ) public OsmDroidMapShape addToMap ( MapView map , Geometry geometry ) { } }
OsmDroidMapShape shape = null ; GeometryType geometryType = geometry . getGeometryType ( ) ; switch ( geometryType ) { case POINT : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . MARKER , addLatLngToMap ( map , toLatLng2 ( ( Point ) geometry ) ) ) ; break ; case LINESTRING : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . POLYLINE , addPolylineToMap ( map , toPolyline ( ( LineString ) geometry ) ) ) ; break ; case POLYGON : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . POLYGON , addPolygonToMap ( map , toPolygon ( ( Polygon ) geometry ) , polygonOptions ) ) ; break ; case MULTIPOINT : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . MULTI_MARKER , addLatLngsToMap ( map , toLatLngs ( ( MultiPoint ) geometry ) ) ) ; break ; case MULTILINESTRING : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . MULTI_POLYLINE , addPolylinesToMap ( map , toPolylines ( ( MultiLineString ) geometry ) ) ) ; break ; case MULTIPOLYGON : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . MULTI_POLYGON , addPolygonsToMap ( map , toPolygons ( ( MultiPolygon ) geometry ) , polygonOptions ) ) ; break ; case CIRCULARSTRING : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . POLYLINE , addPolylineToMap ( map , toPolyline ( ( CircularString ) geometry ) ) ) ; break ; case COMPOUNDCURVE : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . MULTI_POLYLINE , addPolylinesToMap ( map , toPolylines ( ( CompoundCurve ) geometry ) ) ) ; break ; case CURVEPOLYGON : org . osmdroid . views . overlay . Polygon polygon = toCurvePolygon ( ( CurvePolygon ) geometry ) ; shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . POLYGON , addPolygonToMap ( map , polygon , polygonOptions ) ) ; break ; case POLYHEDRALSURFACE : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . MULTI_POLYGON , addPolygonsToMap ( map , toPolygons ( ( PolyhedralSurface ) geometry ) , polygonOptions ) ) ; break ; case TIN : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . MULTI_POLYGON , addPolygonsToMap ( map , toPolygons ( ( TIN ) geometry ) , polygonOptions ) ) ; break ; case TRIANGLE : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . POLYGON , addPolygonToMap ( map , toPolygon ( ( Triangle ) geometry ) , polygonOptions ) ) ; break ; case GEOMETRYCOLLECTION : shape = new OsmDroidMapShape ( geometryType , OsmMapShapeType . COLLECTION , addToMap ( map , ( GeometryCollection < Geometry > ) geometry ) ) ; break ; default : throw new GeoPackageException ( "Unsupported Geometry Type: " + geometryType . getName ( ) ) ; } return shape ;
public class DefaultConfigurableOptionsFactory { /** * Helper method to check whether the ( key , value ) is valid through given configuration and returns the formatted value . * @ param key The configuration key which is configurable in { @ link RocksDBConfigurableOptions } . * @ param value The value within given configuration . * @ return whether the given key and value in string format is legal . */ private static boolean checkArgumentValid ( String key , String value ) { } }
if ( POSITIVE_INT_CONFIG_SET . contains ( key ) ) { Preconditions . checkArgument ( Integer . parseInt ( value ) > 0 , "Configured value for key: " + key + " must be larger than 0." ) ; } else if ( SIZE_CONFIG_SET . contains ( key ) ) { Preconditions . checkArgument ( MemorySize . parseBytes ( value ) > 0 , "Configured size for key" + key + " must be larger than 0." ) ; } else if ( BOOLEAN_CONFIG_SET . contains ( key ) ) { Preconditions . checkArgument ( "true" . equalsIgnoreCase ( value ) || "false" . equalsIgnoreCase ( value ) , "The configured boolean value: " + value + " for key: " + key + " is illegal." ) ; } else if ( key . equals ( COMPACTION_STYLE . key ( ) ) ) { value = value . toLowerCase ( ) ; Preconditions . checkArgument ( COMPACTION_STYLE_SET . contains ( value ) , "Compression type: " + value + " is not recognized with legal types: " + String . join ( ", " , COMPACTION_STYLE_SET ) ) ; } return true ;
public class Explanation { /** * Loads a previously stored explanation from the specified input stream * @ param is The input stream from where to read the explanation * @ return The explanation that was read * @ throws IOException if there was a problem reading the explanation * @ throws IllegalStateException if the input stream does not appear to contain a serialisation of an explanation . */ public static Explanation < OWLAxiom > load ( InputStream is ) throws IOException { } }
try { OWLOntologyManager manager = OWLManager . createOWLOntologyManager ( ) ; OWLOntology ontology = manager . loadOntologyFromOntologyDocument ( new BufferedInputStream ( is ) ) ; OWLDataFactory df = manager . getOWLDataFactory ( ) ; OWLAnnotationProperty entailmentMarkerAnnotationProperty = df . getOWLAnnotationProperty ( ENTAILMENT_MARKER_IRI ) ; Set < OWLAxiom > justificationAxioms = new HashSet < OWLAxiom > ( ) ; OWLAxiom entailment = null ; for ( OWLAxiom ax : ontology . getAxioms ( ) ) { boolean isEntailmentAxiom = ! ax . getAnnotations ( entailmentMarkerAnnotationProperty ) . isEmpty ( ) ; if ( ! isEntailmentAxiom ) { justificationAxioms . add ( ax ) ; } else { entailment = ax . getAxiomWithoutAnnotations ( ) ; } } if ( entailment == null ) { throw new IllegalStateException ( "Not a serialisation of an Explanation" ) ; } return new Explanation < OWLAxiom > ( entailment , justificationAxioms ) ; } catch ( OWLOntologyCreationException e ) { throw new RuntimeException ( e ) ; }
public class OWLSymmetricObjectPropertyAxiomImpl_CustomFieldSerializer { /** * Serializes the content of the object into the * { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } . * @ param streamWriter the { @ link com . google . gwt . user . client . rpc . SerializationStreamWriter } to write the * object ' s content to * @ param instance the object instance to serialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the serialization operation is not * successful */ @ Override public void serializeInstance ( SerializationStreamWriter streamWriter , OWLSymmetricObjectPropertyAxiomImpl instance ) throws SerializationException { } }
serialize ( streamWriter , instance ) ;
public class Result { /** * Returns whether the password is considered to be random . * @ return true if the password is considered random , false otherwise . */ public boolean isRandom ( ) { } }
boolean is_random = true ; for ( Match match : matches ) { if ( ! ( match instanceof BruteForceMatch ) ) { is_random = false ; break ; } } return is_random ;
public class DummyWorkspace { /** * This method does allocation from a given Workspace * @ param requiredMemory allocation size , in bytes * @ param kind MemoryKind for allocation * @ param dataType dataType that is going to be used * @ param initialize * @ return */ @ Override public PagedPointer alloc ( long requiredMemory , MemoryKind kind , DataType dataType , boolean initialize ) { } }
throw new UnsupportedOperationException ( "DummyWorkspace shouldn't be used for allocation" ) ;
public class EigenValueDecomposition { /** * Updates the columns of the matrix M such that < br > < br > * < code > < br > * for ( int i = low ; i < = high ; i + + ) < br > * { < br > * & nbsp ; & nbsp ; z = M [ i ] [ n + shift ] ; < br > * & nbsp ; & nbsp ; M [ i ] [ n + shift ] = q * z + p * M [ i ] [ n ] ; < br > * & nbsp ; & nbsp ; M [ i ] [ n ] = q * M [ i ] [ n ] - p * z ; < br > * } < br > * < / code > * @ param M the matrix to alter * @ param low the starting column ( inclusive ) * @ param high the ending column ( inclusive ) * @ param n the column to alter , and the preceding column will be altered as * well * @ param q first constant * @ param p second constant * @ param shift the direction to perform the computation . Either 1 for after * the current column , or - 1 for before the current column . */ private static void columnOpTransform ( Matrix M , int low , int high , int n , double q , double p , int shift ) { } }
double z ; for ( int i = low ; i <= high ; i ++ ) { z = M . get ( i , n + shift ) ; M . set ( i , n + shift , q * z + p * M . get ( i , n ) ) ; M . set ( i , n , q * M . get ( i , n ) - p * z ) ; }
public class JSONObject { /** * Returns the value mapped by { @ code name } if it exists and is a double or can be * coerced to a double . * @ param name the name of the property * @ return the value * @ throws JSONException if the mapping doesn ' t exist or cannot be coerced to a * double . */ public double getDouble ( String name ) throws JSONException { } }
Object object = get ( name ) ; Double result = JSON . toDouble ( object ) ; if ( result == null ) { throw JSON . typeMismatch ( name , object , "double" ) ; } return result ;
public class AOValue { /** * / * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . ControllableResource # createControlAdapter ( ) */ public void createControlAdapter ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "createControlAdapter" ) ; DestinationHandler dh = null ; try { ItemStream is = getItemStream ( ) ; // TODO - This method is using the wrong itemstream dh = ( ( AOProtocolItemStream ) is ) . getDestinationHandler ( ) ; SIMPMessage msg = ( SIMPMessage ) is . findById ( msgId ) ; controlAdapter = new QueuedMessage ( msg , dh , is ) ; } catch ( Exception e ) { // FFDC FFDCFilter . processException ( e , "com.ibm.ws.sib.processor.impl.store.items.AOValue.createControlAdapter" , "1:371:1.28.1.5" , this ) ; SibTr . exception ( tc , e ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "createControlAdapter" ) ;
public class ComparatorChain { /** * 替换指定位置的比较器 , 替换指定排序方式 * @ param index 位置 * @ param comparator { @ link Comparator } * @ param reverse 是否反序 , true表示正序 , false反序 * @ return this */ public ComparatorChain < E > setComparator ( final int index , final Comparator < E > comparator , final boolean reverse ) { } }
checkLocked ( ) ; chain . set ( index , comparator ) ; if ( reverse == true ) { orderingBits . set ( index ) ; } else { orderingBits . clear ( index ) ; } return this ;
public class AWSOpsWorksClient { /** * Registers an Elastic IP address with a specified stack . An address can be registered with only one stack at a * time . If the address is already registered , you must first deregister it by calling < a > DeregisterElasticIp < / a > . * For more information , see < a href = " http : / / docs . aws . amazon . com / opsworks / latest / userguide / resources . html " > Resource * Management < / a > . * < b > Required Permissions < / b > : To use this action , an IAM user must have a Manage permissions level for the stack , * or an attached policy that explicitly grants permissions . For more information on user permissions , see < a * href = " http : / / docs . aws . amazon . com / opsworks / latest / userguide / opsworks - security - users . html " > Managing User * Permissions < / a > . * @ param registerElasticIpRequest * @ return Result of the RegisterElasticIp operation returned by the service . * @ throws ValidationException * Indicates that a request was not valid . * @ throws ResourceNotFoundException * Indicates that a resource was not found . * @ sample AWSOpsWorks . RegisterElasticIp * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / opsworks - 2013-02-18 / RegisterElasticIp " target = " _ top " > AWS API * Documentation < / a > */ @ Override public RegisterElasticIpResult registerElasticIp ( RegisterElasticIpRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeRegisterElasticIp ( request ) ;
public class CmsSearchWidgetDialog { /** * Returns the different select options for sort search result criteria . < p > * @ return the different select options for sort search result criteria */ private List < CmsSelectWidgetOption > getSortWidgetConfiguration ( ) { } }
List < CmsSelectWidgetOption > result = new LinkedList < CmsSelectWidgetOption > ( ) ; CmsMessages messages = Messages . get ( ) . getBundle ( getLocale ( ) ) ; result . add ( new CmsSelectWidgetOption ( CmsSearchParameters . SORT_NAMES [ 0 ] , true , messages . key ( Messages . GUI_SELECT_LABEL_SEARCH_SORT_SCORE_0 ) ) ) ; result . add ( new CmsSelectWidgetOption ( CmsSearchParameters . SORT_NAMES [ 1 ] , false , messages . key ( Messages . GUI_SELECT_LABEL_SEARCH_SORT_DATE_CREATED_0 ) ) ) ; result . add ( new CmsSelectWidgetOption ( CmsSearchParameters . SORT_NAMES [ 2 ] , false , messages . key ( Messages . GUI_SELECT_LABEL_SEARCH_SORT_DATE_LAST_MODIFIED_0 ) ) ) ; result . add ( new CmsSelectWidgetOption ( CmsSearchParameters . SORT_NAMES [ 3 ] , false , messages . key ( Messages . GUI_SELECT_LABEL_SEARCH_SORT_TITLE_0 ) ) ) ; return result ;
public class JSMessageImpl { /** * Locking : Requires the lock as it relies on , and may change , vital instance variable ( s ) . */ public DataSlice getAssembledContent ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . entry ( this , tc , "getAssembledContent" ) ; DataSlice result = null ; // lock the message - we don ' t want someone clearing the contents while we ' re doing this synchronized ( getMessageLockArtefact ( ) ) { // If contents isn ' t null , the message is assembled so we can return something useful if ( contents != null ) { // We must mark the contents as shared now we ' re handing them out sharedContents = true ; // d348294.1 result = new DataSlice ( contents , messageOffset , length ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) JmfTr . exit ( this , tc , "getAssembledContent" , result ) ; return result ;
public class IdentityPatchContext { /** * Backup the current configuration as part of the patch history . * @ throws IOException for any error */ void backupConfiguration ( ) throws IOException { } }
final String configuration = Constants . CONFIGURATION ; final File a = new File ( installedImage . getAppClientDir ( ) , configuration ) ; final File d = new File ( installedImage . getDomainDir ( ) , configuration ) ; final File s = new File ( installedImage . getStandaloneDir ( ) , configuration ) ; if ( a . exists ( ) ) { final File ab = new File ( configBackup , Constants . APP_CLIENT ) ; backupDirectory ( a , ab ) ; } if ( d . exists ( ) ) { final File db = new File ( configBackup , Constants . DOMAIN ) ; backupDirectory ( d , db ) ; } if ( s . exists ( ) ) { final File sb = new File ( configBackup , Constants . STANDALONE ) ; backupDirectory ( s , sb ) ; }
public class ManagementGroupVertex { /** * Checks if this vertex is an output vertex in its stage , i . e . has either no * outgoing connections or only outgoing connections to group vertices in a higher stage . * @ return < code > true < / code > if this vertex is an output vertex , < code > false < / code > otherwise */ public boolean isOutputVertex ( ) { } }
if ( this . forwardEdges . size ( ) == 0 ) { return true ; } final Iterator < ManagementGroupEdge > it = this . forwardEdges . iterator ( ) ; while ( it . hasNext ( ) ) { if ( it . next ( ) . getTarget ( ) . getStageNumber ( ) == this . getStageNumber ( ) ) { return false ; } } return true ;
public class QueryLexer { /** * $ ANTLR start " T _ _ 35" */ public final void mT__35 ( ) throws RecognitionException { } }
try { int _type = T__35 ; int _channel = DEFAULT_TOKEN_CHANNEL ; // src / riemann / Query . g : 27:7 : ( ' metric _ f ' ) // src / riemann / Query . g : 27:9 : ' metric _ f ' { match ( "metric_f" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class FileLogSet { /** * Attempt to rename the base log file to a new log file , and then recreate * the base log file . * @ return the base log file */ private File rollFile ( ) throws IOException { } }
// If the base file exists , rename it and recreate it . File file = new File ( directory , fileName + fileExtension ) ; if ( file . isFile ( ) ) { // Reuse the file if it ' s already empty . if ( file . length ( ) == 0 ) { return file ; } if ( maxFiles == 1 ) { if ( ! FileLogUtils . deleteFile ( file ) ) { // We failed to delete the file and issued a message . return file ; } } else if ( createNewUniqueFile ( file ) == null ) { // We failed to rename ( or copy + delete ) the base file to a new // file , and we already issued a message . return file ; } } if ( ! file . createNewFile ( ) ) { Tr . error ( tc , "UNABLE_TO_DELETE_RESOURCE_NOEX" , new Object [ ] { file } ) ; } return file ;
public class ReadabilityStatistics { /** * Returns average syllables per word for text . * @ param strText Text to be measured * @ return */ private static double averageSyllablesPerWord ( String strText ) { } }
int intSyllableCount = totalSyllables ( strText ) ; int intWordCount = wordCount ( strText ) ; return ( intSyllableCount / ( double ) intWordCount ) ;
public class FullDTDReader { /** * Method called to handle < ! NOTATION . . . > declaration . */ private void handleNotationDecl ( ) throws XMLStreamException { } }
char c = skipObligatoryDtdWs ( ) ; String id = readDTDName ( c ) ; c = skipObligatoryDtdWs ( ) ; boolean isPublic = checkPublicSystemKeyword ( c ) ; String pubId , sysId ; c = skipObligatoryDtdWs ( ) ; // Ok , now we can parse the reference ; first public id if needed : if ( isPublic ) { if ( c != '"' && c != '\'' ) { throwDTDUnexpectedChar ( c , "; expected a quote to start the public identifier" ) ; } pubId = parsePublicId ( c , getErrorMsg ( ) ) ; c = skipDtdWs ( true ) ; } else { pubId = null ; } /* And then we may need the system id ; one NOTATION oddity , if * there ' s public id , system one is optional . */ if ( c == '"' || c == '\'' ) { sysId = parseSystemId ( c , mNormalizeLFs , getErrorMsg ( ) ) ; c = skipDtdWs ( true ) ; } else { if ( ! isPublic ) { throwDTDUnexpectedChar ( c , "; expected a quote to start the system identifier" ) ; } sysId = null ; } // And then we should get the closing ' > ' if ( c != '>' ) { throwDTDUnexpectedChar ( c , "; expected closing '>' after NOTATION declaration" ) ; } URL baseURL ; try { baseURL = mInput . getSource ( ) ; } catch ( IOException e ) { throw new WstxIOException ( e ) ; } // Any external listeners ? if ( mEventListener != null ) { mEventListener . dtdNotationDecl ( id , pubId , sysId , baseURL ) ; } /* Ok , event needs to know its exact starting point ( opening ' < ' * char ) , let ' s get that info now ( note : data has been preserved * earlier ) */ Location evtLoc = getStartLocation ( ) ; NotationDeclaration nd = new WNotationDeclaration ( evtLoc , id , pubId , sysId , baseURL ) ; // Any definitions from the internal subset ? if ( mPredefdNotations != null ) { NotationDeclaration oldDecl = mPredefdNotations . get ( id ) ; if ( oldDecl != null ) { // oops , a problem ! DTDSubsetImpl . throwNotationException ( oldDecl , nd ) ; } } HashMap < String , NotationDeclaration > m = mNotations ; if ( m == null ) { /* Let ' s try to get insert - ordered Map , to be able to * report redefinition problems in proper order when validating * subset compatibility */ mNotations = m = new LinkedHashMap < String , NotationDeclaration > ( ) ; } else { NotationDeclaration oldDecl = m . get ( id ) ; if ( oldDecl != null ) { // oops , a problem ! DTDSubsetImpl . throwNotationException ( oldDecl , nd ) ; } } // Does this resolve a dangling reference ? if ( mNotationForwardRefs != null ) { mNotationForwardRefs . remove ( id ) ; } m . put ( id , nd ) ;
public class DeleteBusinessReportScheduleRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteBusinessReportScheduleRequest deleteBusinessReportScheduleRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteBusinessReportScheduleRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteBusinessReportScheduleRequest . getScheduleArn ( ) , SCHEDULEARN_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class AiMesh { /** * Returns a buffer containing vertex colors for a color set . < p > * A vertex color consists of 4 floats ( red , green , blue and alpha ) , the * buffer will therefore contain < code > 4 * getNumVertices ( ) < / code > floats * @ param colorset the color set * @ return a native - order direct buffer , or null if no data is available */ public FloatBuffer getColorBuffer ( int colorset ) { } }
if ( m_colorsets [ colorset ] == null ) { return null ; } return m_colorsets [ colorset ] . asFloatBuffer ( ) ;
public class StreamingManager { /** * 获取流推流的片段列表 , 一个流开始和断流算一个片段 * @ param streamKey 流名称 * @ param start 开始时间戳 , 单位秒 * @ param end 结束时间戳 , 单位秒 */ public ActivityRecords history ( String streamKey , long start , long end ) throws QiniuException { } }
if ( start <= 0 || end < 0 || ( start >= end && end != 0 ) ) { throw new QiniuException ( new IllegalArgumentException ( "bad argument" + start + "," + end ) ) ; } String path = encodeKey ( streamKey ) + "/historyactivity?start=" + start ; if ( end != 0 ) { path += "&end=" + end ; } return get ( path , ActivityRecords . class ) ;
public class CompositeTagAttributeUtils { /** * Applies the " displayName " , " shortDescription " , " expert " , " hidden " , * and " preferred " attributes to the BeanDescriptor if they are all literal values . * Thus no FaceletContext is necessary . * @ param descriptor * @ param displayName * @ param shortDescription * @ param expert * @ param hidden * @ param preferred */ public static void addDevelopmentAttributesLiteral ( FeatureDescriptor descriptor , TagAttribute displayName , TagAttribute shortDescription , TagAttribute expert , TagAttribute hidden , TagAttribute preferred ) { } }
if ( displayName != null ) { descriptor . setDisplayName ( displayName . getValue ( ) ) ; } if ( shortDescription != null ) { descriptor . setShortDescription ( shortDescription . getValue ( ) ) ; } if ( expert != null ) { descriptor . setExpert ( Boolean . valueOf ( expert . getValue ( ) ) ) ; } if ( hidden != null ) { descriptor . setHidden ( Boolean . valueOf ( hidden . getValue ( ) ) ) ; } if ( preferred != null ) { descriptor . setPreferred ( Boolean . valueOf ( preferred . getValue ( ) ) ) ; }
public class QueryChangesetsFilters { /** * Filter by changesets that have at one time been open during the given time range * @ param closedAfter limit search to changesets that have been closed after this date * @ param createdBefore limit search to changesets that have been created before this date */ public QueryChangesetsFilters byOpenSomeTimeBetween ( Date createdBefore , Date closedAfter ) { } }
params . put ( "time" , dateFormat . format ( closedAfter ) + "," + dateFormat . format ( createdBefore ) ) ; return this ;
public class ZoteroItemDataProvider { /** * Copies all items from the given provider and sanitizes its IDs * @ param provider the provider * @ return the sanitized items */ private static CSLItemData [ ] sanitizeItems ( ItemDataProvider provider ) { } }
Set < String > knownIds = new LinkedHashSet < > ( ) ; // create a date parser which will be used to get the item ' s year CSLDateParser dateParser = new CSLDateParser ( ) ; // iterate through all items String [ ] ids = provider . getIds ( ) ; CSLItemData [ ] result = new CSLItemData [ ids . length ] ; for ( int i = 0 ; i < ids . length ; ++ i ) { String id = ids [ i ] ; CSLItemData item = provider . retrieveItem ( id ) ; // create a new ID String newId = makeId ( item , dateParser ) ; // make ID unique newId = uniquify ( newId , knownIds ) ; knownIds . add ( newId ) ; // copy item and replace ID item = new CSLItemDataBuilder ( item ) . id ( newId ) . build ( ) ; result [ i ] = item ; } return result ;
public class Slf4jLogger { /** * Log a message at the TRACE level according to the specified format * and arguments . * < p > This form avoids superfluous object creation when the logger * is disabled for the TRACE level . < / p > * @ param format the format string * @ param argArray an array of arguments */ public void trace ( String format , Object [ ] argArray ) { } }
if ( m_delegate . isTraceEnabled ( ) ) { FormattingTuple tuple = MessageFormatter . arrayFormat ( format , argArray ) ; m_delegate . trace ( tuple . getMessage ( ) , tuple . getThrowable ( ) ) ; }
public class RingPlacer { /** * Layout all rings in the given RingSet that are connected to a given Ring * @ param rs The RingSet to be searched for rings connected to Ring * @ param ring The Ring for which all connected rings in RingSet are to be layed out . */ void placeConnectedRings ( IRingSet rs , IRing ring , int handleType , double bondLength ) { } }
final IRingSet connectedRings = rs . getConnectedRings ( ring ) ; // logger . debug ( rs . reportRingList ( molecule ) ) ; for ( IAtomContainer container : connectedRings . atomContainers ( ) ) { final IRing connectedRing = ( IRing ) container ; if ( ! connectedRing . getFlag ( CDKConstants . ISPLACED ) ) { // logger . debug ( ring . toString ( molecule ) ) ; // logger . debug ( connectedRing . toString ( molecule ) ) ; final IAtomContainer sharedAtoms = AtomContainerManipulator . getIntersection ( ring , connectedRing ) ; final int numSharedAtoms = sharedAtoms . getAtomCount ( ) ; logger . debug ( "placeConnectedRings-> connectedRing: " + ( ring . toString ( ) ) ) ; if ( ( numSharedAtoms == 2 && handleType == FUSED ) || ( numSharedAtoms == 1 && handleType == SPIRO ) || ( numSharedAtoms > 2 && handleType == BRIDGED ) ) { final Point2d sharedAtomsCenter = GeometryUtil . get2DCenter ( sharedAtoms ) ; final Point2d oldRingCenter = GeometryUtil . get2DCenter ( ring ) ; final Vector2d tempVector = ( new Vector2d ( sharedAtomsCenter ) ) ; final Vector2d newRingCenterVector = new Vector2d ( tempVector ) ; newRingCenterVector . sub ( new Vector2d ( oldRingCenter ) ) ; // zero ( or v . small ring center ) if ( Math . abs ( newRingCenterVector . x ) < 0.001 && Math . abs ( newRingCenterVector . y ) < 0.001 ) { // first see if we can use terminal bonds IAtomContainer terminalOnly = molecule . getBuilder ( ) . newInstance ( IAtomContainer . class ) ; for ( IAtom atom : ring . atoms ( ) ) { if ( ring . getConnectedBondsCount ( atom ) == 1 ) terminalOnly . addAtom ( atom ) ; } if ( terminalOnly . getAtomCount ( ) == 2 ) { newRingCenterVector . set ( GeometryUtil . get2DCenter ( terminalOnly ) ) ; newRingCenterVector . sub ( oldRingCenter ) ; connectedRing . setProperty ( RingPlacer . SNAP_HINT , true ) ; } else { // project coordinates on 12 axis ( 30 degree snaps ) and choose one with most spread Vector2d vec = new Vector2d ( 0 , 1 ) ; double bestLen = - Double . MAX_VALUE ; for ( int i = 0 ; i < 12 ; i ++ ) { Vector2d orth = new Vector2d ( - vec . y , vec . x ) ; orth . normalize ( ) ; double min = Double . MAX_VALUE , max = - Double . MAX_VALUE ; for ( IAtom atom : sharedAtoms . atoms ( ) ) { // s : scalar projection double s = orth . dot ( new Vector2d ( atom . getPoint2d ( ) ) ) ; if ( s < min ) min = s ; if ( s > max ) max = s ; } double len = max - min ; if ( len > bestLen ) { bestLen = len ; newRingCenterVector . set ( vec ) ; } rotate ( vec , RAD_30 ) ; } } } final Vector2d oldRingCenterVector = new Vector2d ( newRingCenterVector ) ; logger . debug ( "placeConnectedRing -> tempVector: " + tempVector + ", tempVector.length: " + tempVector . length ( ) ) ; logger . debug ( "placeConnectedRing -> bondCenter: " + sharedAtomsCenter ) ; logger . debug ( "placeConnectedRing -> oldRingCenterVector.length(): " + oldRingCenterVector . length ( ) ) ; logger . debug ( "placeConnectedRing -> newRingCenterVector.length(): " + newRingCenterVector . length ( ) ) ; final Point2d tempPoint = new Point2d ( sharedAtomsCenter ) ; tempPoint . add ( newRingCenterVector ) ; placeRing ( connectedRing , sharedAtoms , sharedAtomsCenter , newRingCenterVector , bondLength ) ; connectedRing . setFlag ( CDKConstants . ISPLACED , true ) ; placeConnectedRings ( rs , connectedRing , handleType , bondLength ) ; } } }
public class DeviceProxyDAODefaultImpl { public DevicePipe readPipe ( DeviceProxy deviceProxy , String pipeName ) throws DevFailed { } }
build_connection ( deviceProxy ) ; if ( deviceProxy . idl_version < 5 ) Except . throw_exception ( "TangoApi_NOT_SUPPORTED" , "Pipe not supported in IDL " + deviceProxy . idl_version ) ; boolean done = false ; final int retries = deviceProxy . transparent_reconnection ? 2 : 1 ; for ( int tr = 0 ; tr < retries && ! done ; tr ++ ) { try { DevPipeData pipeData = deviceProxy . device_5 . read_pipe_5 ( pipeName , DevLockManager . getInstance ( ) . getClntIdent ( ) ) ; done = true ; return new DevicePipe ( pipeData ) ; } catch ( final DevFailed e ) { throw e ; } catch ( final Exception e ) { manageExceptionReconnection ( deviceProxy , retries , tr , e , this . getClass ( ) + ".DeviceProxy.readPipe" ) ; } } return null ; // cannot occur
public class Matrix3D { /** * Apply another matrix to the left of this one . */ @ Override public void preApply ( Matrix3D left ) { } }
preApply ( left . m00 , left . m01 , left . m02 , left . m03 , left . m10 , left . m11 , left . m12 , left . m13 , left . m20 , left . m21 , left . m22 , left . m23 , left . m30 , left . m31 , left . m32 , left . m33 ) ;
public class cmpaction { /** * Use this API to add cmpaction . */ public static base_response add ( nitro_service client , cmpaction resource ) throws Exception { } }
cmpaction addresource = new cmpaction ( ) ; addresource . name = resource . name ; addresource . cmptype = resource . cmptype ; addresource . deltatype = resource . deltatype ; return addresource . add_resource ( client ) ;
public class Blacklist { /** * Checks if the given entity is blacklisted in at least one context . * @ param pe physical entity BioPAX object * @ return true / false */ public boolean isUbique ( PhysicalEntity pe ) { } }
String id = getSMRID ( pe ) ; return id != null && isUbique ( id ) ;
public class CPInstancePersistenceImpl { /** * Returns the cp instances before and after the current cp instance in the ordered set where CPDefinitionId = & # 63 ; and status = & # 63 ; . * @ param CPInstanceId the primary key of the current cp instance * @ param CPDefinitionId the cp definition ID * @ param status the status * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the previous , current , and next cp instance * @ throws NoSuchCPInstanceException if a cp instance with the primary key could not be found */ @ Override public CPInstance [ ] findByC_ST_PrevAndNext ( long CPInstanceId , long CPDefinitionId , int status , OrderByComparator < CPInstance > orderByComparator ) throws NoSuchCPInstanceException { } }
CPInstance cpInstance = findByPrimaryKey ( CPInstanceId ) ; Session session = null ; try { session = openSession ( ) ; CPInstance [ ] array = new CPInstanceImpl [ 3 ] ; array [ 0 ] = getByC_ST_PrevAndNext ( session , cpInstance , CPDefinitionId , status , orderByComparator , true ) ; array [ 1 ] = cpInstance ; array [ 2 ] = getByC_ST_PrevAndNext ( session , cpInstance , CPDefinitionId , status , orderByComparator , false ) ; return array ; } catch ( Exception e ) { throw processException ( e ) ; } finally { closeSession ( session ) ; }
public class RestartParentResourceRemoveHandler { /** * Performs the update to the persistent configuration model . This default implementation simply removes * the targeted resource . * @ param context the operation context * @ param operation the operation * @ throws OperationFailedException if there is a problem updating the model */ protected void updateModel ( final OperationContext context , final ModelNode operation ) throws OperationFailedException { } }
// verify that the resource exist before removing it context . readResource ( PathAddress . EMPTY_ADDRESS , false ) ; Resource resource = context . removeResource ( PathAddress . EMPTY_ADDRESS ) ; recordCapabilitiesAndRequirements ( context , operation , resource ) ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getTileSize ( ) { } }
if ( tileSizeEClass == null ) { tileSizeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 395 ) ; } return tileSizeEClass ;
public class TaskLockbox { /** * Release all locks for a task and remove task from set of active tasks . Does nothing if the task is not currently locked or not an active task . * @ param task task to unlock */ public void remove ( final Task task ) { } }
giant . lock ( ) ; try { try { log . info ( "Removing task[%s] from activeTasks" , task . getId ( ) ) ; for ( final TaskLockPosse taskLockPosse : findLockPossesForTask ( task ) ) { unlock ( task , taskLockPosse . getTaskLock ( ) . getInterval ( ) ) ; } } finally { activeTasks . remove ( task . getId ( ) ) ; } } finally { giant . unlock ( ) ; }
public class HeaderCell { /** * Implementation of the { @ link org . apache . beehive . netui . tags . IAttributeConsumer } interface . This * allows users of the anchorCell tag to extend the attribute set that is rendered by the HTML * anchor . This method accepts the following facets : * < table > * < tr > < td > Facet Name < / td > < td > Operation < / td > < / tr > * < tr > < td > < code > header < / code > < / td > < td > Adds an attribute with the provided < code > name < / code > and < code > value < / code > to the * attributes rendered on the & lt ; th & gt ; tag . < / td > < / tr > * < / table > * The HeaderCell tag defaults to the setting attributes on the header when the facet name is unset . * @ param name the name of the attribute * @ param value the value of the attribute * @ param facet the facet for the attribute * @ throws JspException thrown when the facet is not recognized */ public void setAttribute ( String name , String value , String facet ) throws JspException { } }
if ( facet == null || facet . equals ( ATTRIBUTE_HEADER_NAME ) ) { super . addStateAttribute ( _cellState , name , value ) ; } else { String s = Bundle . getString ( "Tags_AttributeFacetNotSupported" , new Object [ ] { facet } ) ; throw new JspException ( s ) ; }
public class ListSizeConstraintSetsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListSizeConstraintSetsRequest listSizeConstraintSetsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listSizeConstraintSetsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listSizeConstraintSetsRequest . getNextMarker ( ) , NEXTMARKER_BINDING ) ; protocolMarshaller . marshall ( listSizeConstraintSetsRequest . getLimit ( ) , LIMIT_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CmsEditResourceTypeDialog { /** * Submit changes . < p > * @ param window Window * @ param app app */ protected void submit ( Window window , CmsResourceTypeApp app ) { } }
if ( isValid ( ) ) { CmsModule module = OpenCms . getModuleManager ( ) . getModule ( m_type . getModuleName ( ) ) . clone ( ) ; if ( isKeepTypeCase ( ) ) { saveResourceType ( module ) ; } else { try { changeIdNameOrSchema ( module ) ; } catch ( CmsConfigurationException e ) { LOG . error ( "Unable to change resource type." , e ) ; } } try { OpenCms . getModuleManager ( ) . updateModule ( m_cms , module ) ; OpenCms . getResourceManager ( ) . initialize ( m_cms ) ; OpenCms . getWorkplaceManager ( ) . addExplorerTypeSettings ( module ) ; // re - initialize the workplace OpenCms . getWorkplaceManager ( ) . initialize ( m_cms ) ; } catch ( CmsException e ) { LOG . error ( "Unable to save resource type" , e ) ; } window . close ( ) ; app . reload ( ) ; }
public class DiscussionsApi { /** * Get a Stream of Discussion instances for the specified commit . * < pre > < code > GitLab Endpoint : GET / projects / : id / commits / : commit _ id / discussions < / code > < / pre > * @ param projectIdOrPath projectIdOrPath the project in the form of an Integer ( ID ) , String ( path ) , or Project instance * @ param commitId the internal ID of the commit * @ return a Stream instance containing the Discussion instances for the specified commit * @ throws GitLabApiException if any exception occurs during execution */ public Stream < Discussion > getCommitDiscussionsStream ( Object projectIdOrPath , Integer commitId ) throws GitLabApiException { } }
Pager < Discussion > pager = getCommitDiscussionsPager ( projectIdOrPath , commitId , getDefaultPerPage ( ) ) ; return ( pager . stream ( ) ) ;
public class VirtualMachinesInner { /** * Converts virtual machine disks from blob - based to managed disks . Virtual machine must be stop - deallocated before invoking this operation . * @ param resourceGroupName The name of the resource group . * @ param vmName The name of the virtual machine . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < OperationStatusResponseInner > beginConvertToManagedDisksAsync ( String resourceGroupName , String vmName , final ServiceCallback < OperationStatusResponseInner > serviceCallback ) { } }
return ServiceFuture . fromResponse ( beginConvertToManagedDisksWithServiceResponseAsync ( resourceGroupName , vmName ) , serviceCallback ) ;
public class RouteFilterRulesInner { /** * Gets all RouteFilterRules in a route filter . * @ param nextPageLink The NextLink from the previous successful call to List operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; RouteFilterRuleInner & gt ; object */ public Observable < Page < RouteFilterRuleInner > > listByRouteFilterNextAsync ( final String nextPageLink ) { } }
return listByRouteFilterNextWithServiceResponseAsync ( nextPageLink ) . map ( new Func1 < ServiceResponse < Page < RouteFilterRuleInner > > , Page < RouteFilterRuleInner > > ( ) { @ Override public Page < RouteFilterRuleInner > call ( ServiceResponse < Page < RouteFilterRuleInner > > response ) { return response . body ( ) ; } } ) ;
public class Hyperalgo { /** * This method is identical to * { @ link Hyperalgo # insideBackward ( Hypergraph , Hyperpotential , Algebra , Scores , HyperedgeDoubleFn ) } * except that the adjoints of the weights are add to { @ link Scores # weightAdj } . * INPUT : scores . alpha , scores . beta , scores . betaAdj . * OUTPUT : scores . betaAdj , scores . weightsAdj . */ public static void insideBackward ( final Hypergraph graph , final Hyperpotential w , final Algebra s , final Scores scores ) { } }
insideAdjoint ( graph , w , s , scores , false ) ; weightAdjoint ( graph , w , s , scores , false ) ;
public class GeometryIndexService { /** * Given a certain geometry and index , find the neighboring edges . It is important to understand that searching * edges within a closed ring will always return 2 results ( unless the ring contains only 1 or 2 coordinates ) , while * searching within a LineString can yield different results ( the beginning or end only has 1 neighbor ) . * @ param geometry * The geometry wherein to search for neighboring edges . * @ param index * The index to start out from . Must point to either a vertex or and edge . * @ return The list of neighboring edges . * @ throws GeometryIndexNotFoundException * Thrown in case the given index does not match the given geometry . */ public List < GeometryIndex > getAdjacentEdges ( Geometry geometry , GeometryIndex index ) throws GeometryIndexNotFoundException { } }
if ( geometry == null || index == null ) { throw new IllegalArgumentException ( "No null values allowed!" ) ; } GeometryIndexCombo combo = recursiveSearch ( geometry , index ) ; int [ ] indices = new int [ ] { } ; if ( isVertex ( index ) ) { indices = getAdjacentEdgesForVertex ( combo . getGeometry ( ) , combo . getIndex ( ) ) ; } else if ( isEdge ( index ) ) { indices = getAdjacentEdgesForEdge ( combo . getGeometry ( ) , combo . getIndex ( ) ) ; } List < GeometryIndex > indexList = new ArrayList < GeometryIndex > ( ) ; for ( int indice : indices ) { indexList . add ( recursiveCreate ( index , indice , GeometryIndexType . TYPE_EDGE ) ) ; } // Can return an empty list . return indexList ;
public class TaskExecutor { /** * Submit a { @ link Fork } to run . * @ param fork { @ link Fork } to be submitted * @ return a { @ link java . util . concurrent . Future } for the submitted { @ link Fork } */ public Future < ? > submit ( Fork fork ) { } }
LOG . info ( String . format ( "Submitting fork %d of task %s" , fork . getIndex ( ) , fork . getTaskId ( ) ) ) ; return this . forkExecutor . submit ( fork ) ;
public class SVGUtil { /** * Make a transform string to add margins * @ param owidth Width of outer ( embedding ) canvas * @ param oheight Height of outer ( embedding ) canvas * @ param iwidth Width of inner ( embedded ) canvas * @ param iheight Height of inner ( embedded ) canvas * @ param xmargin Left and right margin ( in inner canvas ' units ) * @ param ymargin Top and bottom margin ( in inner canvas ' units ) * @ return Transform string */ public static String makeMarginTransform ( double owidth , double oheight , double iwidth , double iheight , double xmargin , double ymargin ) { } }
return makeMarginTransform ( owidth , oheight , iwidth , iheight , xmargin , ymargin , xmargin , ymargin ) ;
public class BaseField { /** * For binary fields , set the current state . * @ param state The state to set this field . * @ param bDisplayOption Display changed fields if true . * @ param iMoveMode The move mode . * @ return The error code ( or NORMAL _ RETURN ) . */ public int setState ( boolean state , boolean bDisplayOption , int iMoveMode ) { } }
String tempString = "N" ; if ( state ) tempString = "Y" ; return this . setString ( tempString , bDisplayOption , iMoveMode ) ; // Move value to this field
public class WatermarkCountTriggerPolicy { /** * Triggers all the pending windows up to the waterMarkEvent timestamp * based on the sliding interval count . * @ param waterMarkEvent the watermark event */ private void handleWaterMarkEvent ( Event < T > waterMarkEvent ) { } }
long watermarkTs = waterMarkEvent . getTimestamp ( ) ; List < Long > eventTs = windowManager . getSlidingCountTimestamps ( lastProcessedTs , watermarkTs , count ) ; for ( long ts : eventTs ) { evictionPolicy . setContext ( ts ) ; handler . onTrigger ( ) ; lastProcessedTs = ts ; }
public class CheckBoxOption { /** * Set the value of this CheckBoxOption . * @ param value the CheckBoxOption value * @ jsptagref . attributedescription A String literal or a data binding expression . The value attribute determines the value submitted * by the checkbox . * @ jsptagref . databindable true * @ jsptagref . attributesyntaxvalue < i > string _ literal _ or _ expression _ value < / i > * @ netui : attribute required = " false " rtexprvalue = " true " * description = " A String literal or a data binding expression . The value attribute determines the value submitted * by the checkbox . " */ public void setValue ( Object value ) throws JspException { } }
if ( value != null ) _state . value = value . toString ( ) ; else _state . value = null ;
public class ClientsEntity { /** * Request all the Applications . A token with scope read : clients is needed . If you also need the client _ secret and encryption _ key attributes the token must have read : client _ keys scope . * See https : / / auth0 . com / docs / api / management / v2 # ! / Clients / get _ clients * @ return a Request to execute . * @ deprecated Calling this method will soon stop returning the complete list of clients and instead , limit to the first page of results . * Please use { @ link # list ( ClientFilter ) } instead as it provides pagination support . */ @ Deprecated public Request < List < Client > > list ( ) { } }
String url = baseUrl . newBuilder ( ) . addPathSegments ( "api/v2/clients" ) . build ( ) . toString ( ) ; CustomRequest < List < Client > > request = new CustomRequest < > ( client , url , "GET" , new TypeReference < List < Client > > ( ) { } ) ; request . addHeader ( "Authorization" , "Bearer " + apiToken ) ; return request ;
public class ConventionActionMapper { /** * reserved method parameter */ public ActionMapping getMapping ( HttpServletRequest request , ConfigurationManager configManager ) { } }
ActionMapping mapping = new ActionMapping ( ) ; parseNameAndNamespace ( RequestUtils . getServletPath ( request ) , mapping ) ; String method = request . getParameter ( MethodParam ) ; if ( Strings . isNotEmpty ( method ) ) mapping . setMethod ( method ) ; return mapping ;
public class PropertiesFile { /** * Returns a value for a given key . * @ param key * Key to find . * @ return Value or < code > null < / code > if the key is unknown . */ public final String get ( final String key ) { } }
final Property prop = find ( key ) ; if ( prop == null ) { return null ; } return prop . getValue ( ) ;
public class IMatrix { /** * Multiplikation from a scalar and a matrix */ public void mul ( Complex a , IMatrix result ) { } }
if ( ( result . rows != rows ) || ( result . columns != columns ) ) result . reshape ( rows , columns ) ; int i , j ; for ( i = 0 ; i < rows ; i ++ ) for ( j = 0 ; j < columns ; j ++ ) { result . realmatrix [ i ] [ j ] = realmatrix [ i ] [ j ] * a . real - imagmatrix [ i ] [ j ] * a . imag ; result . imagmatrix [ i ] [ j ] = realmatrix [ i ] [ j ] * a . imag + imagmatrix [ i ] [ j ] * a . real ; }
public class ClassGraph { /** * Convert the class name into a corresponding URL */ public String classToUrl ( String className ) { } }
ClassDoc classDoc = rootClassdocs . get ( className ) ; if ( classDoc != null ) { String docRoot = optionProvider . getGlobalOptions ( ) . apiDocRoot ; if ( docRoot == null ) return null ; return new StringBuilder ( docRoot . length ( ) + className . length ( ) + 10 ) . append ( docRoot ) . append ( classDoc . containingPackage ( ) . name ( ) . replace ( '.' , '/' ) ) . append ( '/' ) . append ( classDoc . name ( ) ) . append ( ".html" ) . toString ( ) ; } String docRoot = optionProvider . getGlobalOptions ( ) . getApiDocRoot ( className ) ; if ( docRoot == null ) return null ; int split = splitPackageClass ( className ) ; StringBuilder buf = new StringBuilder ( docRoot . length ( ) + className . length ( ) + 10 ) . append ( docRoot ) ; if ( split > 0 ) // Avoid - 1 , and the extra slash then . buf . append ( className . substring ( 0 , split ) . replace ( '.' , '/' ) ) . append ( '/' ) ; return buf . append ( className , Math . min ( split + 1 , className . length ( ) ) , className . length ( ) ) . append ( ".html" ) . toString ( ) ;
public class AlpineQueryManager { /** * Creates a MappedLdapGroup object . * @ param team The team to map * @ param dn the distinguished name of the LDAP group to map * @ return a MappedLdapGroup * @ since 1.4.0 */ public MappedLdapGroup createMappedLdapGroup ( final Team team , final String dn ) { } }
pm . currentTransaction ( ) . begin ( ) ; final MappedLdapGroup mapping = new MappedLdapGroup ( ) ; mapping . setTeam ( team ) ; mapping . setDn ( dn ) ; pm . makePersistent ( mapping ) ; pm . currentTransaction ( ) . commit ( ) ; return getObjectById ( MappedLdapGroup . class , mapping . getId ( ) ) ;
public class ZipFileContainer { /** * Answer the absolute path to the archive file . Do an extraction if this * is a nested archive and the file is not yet extracted . Answer null * if extraction fails . * @ return The absolute path to the archive file . */ private String getArchiveFilePath ( ) { } }
if ( archiveFileLock == null ) { return archiveFilePath ; } else { synchronized ( archiveFileLock ) { @ SuppressWarnings ( "unused" ) File useArchiveFile = getArchiveFile ( ) ; return archiveFilePath ; } }
public class CMAEntry { /** * Return a specific localized field . * @ param key the key of the field * @ param locale the locale of the key * @ param < T > the type of the return value * @ return the value requested or null , if something ( fields , key , locale ) was not found . */ @ SuppressWarnings ( "unchecked" ) public < T > T getField ( String key , String locale ) { } }
if ( fields == null ) { return null ; } LinkedHashMap < String , Object > field = fields . get ( key ) ; if ( field == null ) { return null ; } else { return ( T ) field . get ( locale ) ; }
public class bridgetable { /** * Use this API to update bridgetable resources . */ public static base_responses update ( nitro_service client , bridgetable resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { bridgetable updateresources [ ] = new bridgetable [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new bridgetable ( ) ; updateresources [ i ] . bridgeage = resources [ i ] . bridgeage ; } result = update_bulk_request ( client , updateresources ) ; } return result ;
public class Tuple3dfx { /** * Replies the z property . * @ return the z property . */ @ Pure public DoubleProperty zProperty ( ) { } }
if ( this . z == null ) { this . z = new SimpleDoubleProperty ( this , MathFXAttributeNames . Z ) ; } return this . z ;