signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class GlobalQuartzScheduler { /** * Add a job listener for all jobs . * @ param aJobListener * The job listener to be added . May not be < code > null < / code > . */ public void addJobListener ( @ Nonnull final IJobListener aJobListener ) { } }
ValueEnforcer . notNull ( aJobListener , "JobListener" ) ; try { m_aScheduler . getListenerManager ( ) . addJobListener ( aJobListener , EverythingMatcher . allJobs ( ) ) ; } catch ( final SchedulerException ex ) { throw new IllegalStateException ( "Failed to add job listener " + aJobListener . toString ( ) , ex ) ; }
public class CmsContainerpageController { /** * Checks if any of the containers are nested containers . < p > * @ return true if there are nested containers */ protected boolean hasNestedContainers ( ) { } }
boolean hasNestedContainers = false ; for ( CmsContainer container : m_containers . values ( ) ) { if ( container . getParentContainerName ( ) != null ) { hasNestedContainers = true ; break ; } } return hasNestedContainers ;
public class TargetsMngrImpl { /** * Atomic operations . . . */ @ Override public TargetProperties lockAndGetTarget ( Application app , Instance scopedInstance ) throws IOException { } }
String instancePath = InstanceHelpers . computeInstancePath ( scopedInstance ) ; String targetId = findTargetId ( app , instancePath ) ; if ( targetId == null ) throw new IOException ( "No target was found for " + app + " :: " + instancePath ) ; InstanceContext mappingKey = new InstanceContext ( app , instancePath ) ; synchronized ( LOCK ) { saveUsage ( mappingKey , targetId , true ) ; } this . logger . fine ( "Target " + targetId + "'s lock was acquired for " + instancePath ) ; TargetProperties result = findTargetProperties ( app , instancePath ) ; Map < String , String > newTargetProperties = TargetHelpers . expandProperties ( scopedInstance , result . asMap ( ) ) ; result . asMap ( ) . clear ( ) ; result . asMap ( ) . putAll ( newTargetProperties ) ; return result ;
public class AccessControlUtils { /** * Returns the effective visibility of the given name . This can differ * from the name ' s declared visibility if the file ' s { @ code @ fileoverview } * JsDoc specifies a default visibility . * @ param name The name node to compute effective visibility for . * @ param var The name to compute effective visibility for . * @ param fileVisibilityMap A map of { @ code @ fileoverview } visibility * annotations , used to compute the name ' s default visibility . */ static Visibility getEffectiveNameVisibility ( Node name , Var var , ImmutableMap < StaticSourceFile , Visibility > fileVisibilityMap ) { } }
JSDocInfo jsDocInfo = var . getJSDocInfo ( ) ; Visibility raw = ( jsDocInfo == null || jsDocInfo . getVisibility ( ) == null ) ? Visibility . INHERITED : jsDocInfo . getVisibility ( ) ; if ( raw != Visibility . INHERITED ) { return raw ; } Visibility defaultVisibilityForFile = fileVisibilityMap . get ( var . getSourceFile ( ) ) ; JSType type = name . getJSType ( ) ; boolean createdFromGoogProvide = ( type != null && type . isLiteralObject ( ) ) ; // Ignore @ fileoverview visibility when computing the effective visibility // for names created by goog . provide . // ProcessClosurePrimitives rewrites goog . provide ( ) s as object literal // declarations , but the exact form depends on the ordering of the // input files . If goog . provide ( ' a . b ' ) occurs in the inputs before // goog . provide ( ' a ' ) , it is rewritten like // var a = { } ; a . b = { } ; // If the file containing goog . provide ( ' a . b ' ) also declares a @ fileoverview // visibility , it must not apply to a , as this would make every a . * namespace // effectively package - private . return ( createdFromGoogProvide || defaultVisibilityForFile == null ) ? raw : defaultVisibilityForFile ;
public class FeatureSelectionController { /** * Transform a pixel - length into a real - life distance expressed in map CRS . This depends on the current map scale . * @ param pixels * The number of pixels to calculate the distance for . * @ return The distance the given number of pixels entails . */ private double pixelsToUnits ( int pixels ) { } }
Coordinate c1 = mapPresenter . getViewPort ( ) . getTransformationService ( ) . transform ( new Coordinate ( 0 , 0 ) , RenderSpace . SCREEN , RenderSpace . WORLD ) ; Coordinate c2 = mapPresenter . getViewPort ( ) . getTransformationService ( ) . transform ( new Coordinate ( pixels , 0 ) , RenderSpace . SCREEN , RenderSpace . WORLD ) ; return MathService . distance ( c1 , c2 ) ;
public class Alerts { /** * 弹出错误框 * @ param title 标题 * @ param content 内容 * @ return { @ link ButtonType } */ public static Optional < ButtonType > showError ( String title , String content ) { } }
return showError ( title , null , content ) ;
public class CmsSearchManager { /** * Initializes the spell check index . < p > * @ param adminCms the ROOT _ ADMIN cms context */ public void initSpellcheckIndex ( CmsObject adminCms ) { } }
if ( CmsSpellcheckDictionaryIndexer . updatingIndexNecessesary ( adminCms ) ) { final CmsSolrSpellchecker spellchecker = OpenCms . getSearchManager ( ) . getSolrDictionary ( ) ; if ( spellchecker != null ) { Runnable initRunner = new Runnable ( ) { public void run ( ) { try { spellchecker . parseAndAddDictionaries ( adminCms ) ; } catch ( CmsRoleViolationException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } } } ; new Thread ( initRunner ) . start ( ) ; } }
public class BeanDescriptor { /** * Invokes the annotation of the given type . * @ param name the given setter name * @ param annotationType the annotation type to look for * @ param < T > the annotation type * @ return the annotation object , or null if not found * @ throws NoSuchMethodException when a setter method cannot be found */ public < T extends Annotation > T getSetterAnnotation ( String name , Class < T > annotationType ) throws NoSuchMethodException { } }
Method method = getSetter ( name ) ; return getSetterAnnotation ( method , annotationType ) ;
public class DeleteException { /** * Converts a Throwable to a DeleteException . If the Throwable is a * DeleteException , it will be passed through unmodified ; otherwise , it will be wrapped * in a new DeleteException . * @ param cause the Throwable to convert * @ return a DeleteException */ public static DeleteException fromThrowable ( Throwable cause ) { } }
return ( cause instanceof DeleteException ) ? ( DeleteException ) cause : new DeleteException ( cause ) ;
public class KunderaCriteriaBuilder { /** * ( non - Javadoc ) * @ see * javax . persistence . criteria . CriteriaBuilder # neg ( javax . persistence . criteria * . Expression ) */ @ Override public < N extends Number > Expression < N > neg ( Expression < N > arg0 ) { } }
// TODO Auto - generated method stub return null ;
public class MapUtil { /** * 获取Map指定key的值 , 并转换为Float * @ param map Map * @ param key 键 * @ return 值 * @ since 4.0.6 */ public static Float getFloat ( Map < ? , ? > map , Object key ) { } }
return get ( map , key , Float . class ) ;
public class DSUtil { /** * Checks equality of two objects ; deals with the case when * < code > obj1 < / code > is null , and next invokes * < code > equals < / code > . */ public static < E > boolean checkEq ( E obj1 , E obj2 ) { } }
if ( obj1 == null ) return obj2 == null ; // make sure we do this short - circuit , just in case it ' s not done by equals if ( obj1 == obj2 ) return true ; return obj1 . equals ( obj2 ) ;
public class DiskBuffer { /** * Deletes a buffered { @ link Event } from disk . * @ param event Event to delete from the disk . */ @ Override public void discard ( Event event ) { } }
File eventFile = new File ( bufferDir , event . getId ( ) . toString ( ) + FILE_SUFFIX ) ; if ( eventFile . exists ( ) ) { logger . debug ( "Discarding Event from offline storage: " + eventFile . getAbsolutePath ( ) ) ; if ( ! eventFile . delete ( ) ) { logger . warn ( "Failed to delete Event: " + eventFile . getAbsolutePath ( ) ) ; } }
public class MessageMgr { /** * Resets the collected messages and all counters . * @ return returns self to allow for chained calls */ public MessageMgr clear ( ) { } }
for ( MessageTypeHandler handler : this . messageHandlers . values ( ) ) { handler . clear ( ) ; } this . messages . clear ( ) ; return this ;
public class Context { /** * Sets the maximum stack depth ( in terms of number of call frames ) * allowed in a single invocation of interpreter . If the set depth would be * exceeded , the interpreter will throw an EvaluatorException in the script . * Defaults to Integer . MAX _ VALUE . The setting only has effect for * interpreted functions ( those compiled with optimization level set to - 1 ) . * As the interpreter doesn ' t use the Java stack but rather manages its own * stack in the heap memory , a runaway recursion in interpreted code would * eventually consume all available memory and cause OutOfMemoryError * instead of a StackOverflowError limited to only a single thread . This * setting helps prevent such situations . * @ param max the new maximum interpreter stack depth * @ throws IllegalStateException if this context ' s optimization level is not * @ throws IllegalArgumentException if the new depth is not at least 1 */ public final void setMaximumInterpreterStackDepth ( int max ) { } }
if ( sealed ) onSealedMutation ( ) ; if ( optimizationLevel != - 1 ) { throw new IllegalStateException ( "Cannot set maximumInterpreterStackDepth when optimizationLevel != -1" ) ; } if ( max < 1 ) { throw new IllegalArgumentException ( "Cannot set maximumInterpreterStackDepth to less than 1" ) ; } maximumInterpreterStackDepth = max ;
public class ConfigureNodesLocalHost { /** * If the current node exists in the nodes list , bring it to the front */ @ Override public List < Node > getNodes ( ByteArray key ) { } }
logger . debug ( "Giving pref to localhost ! " ) ; List < Node > nodes = null ; List < Node > reorderedNodes = new ArrayList < Node > ( ) ; try { nodes = super . getNodes ( key ) ; if ( nodes == null ) { return null ; } String currentHost = InetAddress . getLocalHost ( ) . getHostName ( ) ; for ( Node n : nodes ) { if ( currentHost . contains ( n . getHost ( ) ) || n . getHost ( ) . contains ( currentHost ) ) { logger . debug ( "Found localhost ! " ) ; reorderedNodes . add ( n ) ; nodes . remove ( n ) ; break ; } } reorderedNodes . addAll ( nodes ) ; nodes = reorderedNodes ; } catch ( VoldemortException e ) { pipelineData . setFatalError ( e ) ; return null ; } catch ( UnknownHostException e ) { e . printStackTrace ( ) ; return null ; } return nodes ;
public class DefaultBambooClient { /** * / / / / Helpers */ private long getCommitTimestamp ( JSONObject jsonItem ) { } }
if ( jsonItem . get ( "timestamp" ) != null ) { return ( Long ) jsonItem . get ( "timestamp" ) ; } else if ( jsonItem . get ( "date" ) != null ) { String dateString = ( String ) jsonItem . get ( "date" ) ; try { return new SimpleDateFormat ( "yyyy-MM-dd'T'HH:mm:ss.SSSXXX" ) . parse ( dateString ) . getTime ( ) ; } catch ( java . text . ParseException e ) { // Try an alternate date format . . . looks like this one is used by Git try { return new SimpleDateFormat ( "yyyy-MM-dd HH:mm:ss Z" ) . parse ( dateString ) . getTime ( ) ; } catch ( java . text . ParseException e1 ) { LOG . error ( "Invalid date string: " + dateString , e ) ; } } } return 0 ;
public class GetVoiceConnectorRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetVoiceConnectorRequest getVoiceConnectorRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getVoiceConnectorRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getVoiceConnectorRequest . getVoiceConnectorId ( ) , VOICECONNECTORID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class CliCommandBuilder { /** * Sets the timeout used when connecting to the server . * @ param timeout the time out to use * @ return the builder */ public CliCommandBuilder setTimeout ( final int timeout ) { } }
if ( timeout > 0 ) { addCliArgument ( CliArgument . TIMEOUT , Integer . toString ( timeout ) ) ; } else { addCliArgument ( CliArgument . TIMEOUT , null ) ; } return this ;
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EEnum getFNCYftUnits ( ) { } }
if ( fncYftUnitsEEnum == null ) { fncYftUnitsEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 23 ) ; } return fncYftUnitsEEnum ;
public class XSDValidator { /** * Validate some XML against this schema * @ param document * @ throws SchemaValidationException * if the document does not validate against the schema */ public void validate ( final Node document ) throws SchemaValidationException { } }
try { final Validator validator = schema . newValidator ( ) ; validator . validate ( new DOMSource ( document ) ) ; } catch ( SAXException | IOException e ) { throw new SchemaValidationException ( e . getMessage ( ) , e ) ; }
public class StatsInterface { /** * Get the number of views , comments and favorites on a photo for a given date . * @ param date * ( Required ) Stats will be returned for this date . A day according to Flickr Stats starts at midnight GMT for all users , and timestamps will * automatically be rounded down to the start of the day . * @ param photoId * ( Required ) The id of the photo to get stats for . * @ see " http : / / www . flickr . com / services / api / flickr . stats . getPhotoStats . htm " */ public Stats getPhotoStats ( String photoId , Date date ) throws FlickrException { } }
return getStats ( METHOD_GET_PHOTO_STATS , "photo_id" , photoId , date ) ;
public class NtlmContext { /** * { @ inheritDoc } * @ see jcifs . smb . SSPContext # dispose ( ) */ @ Override public void dispose ( ) throws SmbException { } }
this . isEstablished = false ; this . sealClientHandle = null ; this . sealServerHandle = null ; this . sealClientKey = null ; this . sealServerKey = null ; this . masterKey = null ; this . signKey = null ; this . verifyKey = null ; this . type1Bytes = null ;
public class MetaMediaManager { /** * Pauses the sprites and animations that are currently active on this media panel . Also stops * listening to the frame tick while paused . */ public void setPaused ( boolean paused ) { } }
// sanity check if ( ( paused && ( _pauseTime != 0 ) ) || ( ! paused && ( _pauseTime == 0 ) ) ) { log . warning ( "Requested to pause when paused or vice-versa" , "paused" , paused ) ; return ; } _paused = paused ; if ( _paused ) { // make a note of our pause time _pauseTime = _framemgr . getTimeStamp ( ) ; } else { // let the animation and sprite managers know that we just warped into the future long delta = _framemgr . getTimeStamp ( ) - _pauseTime ; _animmgr . fastForward ( delta ) ; _spritemgr . fastForward ( delta ) ; // clear out our pause time _pauseTime = 0 ; }
public class ExceptionDialog { /** * GEN - LAST : event _ btCloseActionPerformed */ private void formWindowOpened ( java . awt . event . WindowEvent evt ) // GEN - FIRST : event _ formWindowOpened { } }
// GEN - HEADEREND : event _ formWindowOpened spDetails . setVisible ( false ) ; pack ( ) ; validate ( ) ;
public class Gild { /** * Moves to the next stage in the staged test run . @ param stageName the next * stage name */ public void nextStage ( final String stageName ) { } }
assertNotNull ( "Cannot move to a null stage" , stageName ) ; execs . forEach ( consumer ( StageExec :: preserve ) ) ; stage = stage . nextStage ( stageName ) ; prepare ( ) ;
public class Boxing { /** * Transforms any array into an array of { @ code byte } . * @ param src source array * @ param srcPos start position * @ param len length * @ return byte array */ public static byte [ ] unboxBytes ( Object src , int srcPos , int len ) { } }
return unboxBytes ( array ( src ) , srcPos , len ) ;
public class DeployerResolverOverriderConverter { /** * Convert the ( ServerDetails ) details to ( ServerDetails ) deployerDetails if it doesn ' t exists already * This convertion comes after a name change ( details - > deployerDetails ) */ private void overrideDeployerDetails ( T overrider , Class overriderClass ) { } }
if ( overrider instanceof DeployerOverrider ) { try { Field deployerDetailsField = overriderClass . getDeclaredField ( "deployerDetails" ) ; deployerDetailsField . setAccessible ( true ) ; Object deployerDetails = deployerDetailsField . get ( overrider ) ; if ( deployerDetails == null ) { Field oldDeployerDetailsField = overriderClass . getDeclaredField ( "details" ) ; oldDeployerDetailsField . setAccessible ( true ) ; Object oldDeployerDetails = oldDeployerDetailsField . get ( overrider ) ; if ( oldDeployerDetails != null ) { ServerDetails deployerServerDetails = createInitialDeployDetailsFromOldDeployDetails ( ( ServerDetails ) oldDeployerDetails ) ; deployerDetailsField . set ( overrider , deployerServerDetails ) ; } } } catch ( NoSuchFieldException | IllegalAccessException e ) { converterErrors . add ( getConversionErrorMessage ( overrider , e ) ) ; } }
public class IOUtils { /** * Reads reverse int * @ param in * Source * @ return int */ public final static int readReverseInt ( IoBuffer in ) { } }
int value = in . getInt ( ) ; value = ( ( value & 0xFF ) << 24 | ( ( value >> 8 ) & 0x00FF ) << 16 | ( ( value >>> 16 ) & 0x000000FF ) << 8 | ( ( value >>> 24 ) & 0x000000FF ) ) ; return value ;
public class XMLEncodingDetector { /** * Returns the next character on the input . * < strong > Note : < / strong > The character is < em > not < / em > consumed . * @ throws IOException Thrown if i / o error occurs . * @ throws EOFException Thrown on end of file . */ public int peekChar ( ) throws IOException { } }
// load more characters , if needed if ( fCurrentEntity . position == fCurrentEntity . count ) { load ( 0 , true ) ; } // peek at character int c = fCurrentEntity . ch [ fCurrentEntity . position ] ; // return peeked character if ( fCurrentEntity . isExternal ( ) ) { return c != '\r' ? c : '\n' ; } else { return c ; }
public class LocalDateTime { /** * Handle broken serialization from other tools . * @ return the resolved object , not null */ private Object readResolve ( ) { } }
if ( iChronology == null ) { return new LocalDateTime ( iLocalMillis , ISOChronology . getInstanceUTC ( ) ) ; } if ( DateTimeZone . UTC . equals ( iChronology . getZone ( ) ) == false ) { return new LocalDateTime ( iLocalMillis , iChronology . withUTC ( ) ) ; } return this ;
public class AsmDecompiler { /** * Loads the URL contents and parses them with ASM , producing a { @ link ClassStub } object representing the structure of * the corresponding class file . Stubs are cached and reused if queried several times with equal URLs . * @ param url an URL from a class loader , most likely a file system file or a JAR entry . * @ return the class stub * @ throws IOException if reading from this URL is impossible */ public static ClassStub parseClass ( URL url ) throws IOException { } }
URI uri ; try { uri = url . toURI ( ) ; } catch ( URISyntaxException e ) { throw new GroovyRuntimeException ( e ) ; } SoftReference < ClassStub > ref = StubCache . map . get ( uri ) ; ClassStub stub = ref == null ? null : ref . get ( ) ; if ( stub == null ) { DecompilingVisitor visitor = new DecompilingVisitor ( ) ; try ( InputStream stream = new BufferedInputStream ( URLStreams . openUncachedStream ( url ) ) ) { new ClassReader ( stream ) . accept ( visitor , ClassReader . SKIP_FRAMES ) ; } stub = visitor . result ; StubCache . map . put ( uri , new SoftReference < ClassStub > ( stub ) ) ; } return stub ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcLightSourceGoniometric ( ) { } }
if ( ifcLightSourceGoniometricEClass == null ) { ifcLightSourceGoniometricEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 347 ) ; } return ifcLightSourceGoniometricEClass ;
public class CPOptionLocalServiceWrapper { /** * Deletes the cp option from the database . Also notifies the appropriate model listeners . * @ param cpOption the cp option * @ return the cp option that was removed * @ throws PortalException */ @ Override public com . liferay . commerce . product . model . CPOption deleteCPOption ( com . liferay . commerce . product . model . CPOption cpOption ) throws com . liferay . portal . kernel . exception . PortalException { } }
return _cpOptionLocalService . deleteCPOption ( cpOption ) ;
public class ConnectionHandle { /** * # ifdef JDK > 6 */ public Properties getClientInfo ( ) throws SQLException { } }
Properties result = null ; checkClosed ( ) ; try { result = this . connection . getClientInfo ( ) ; } catch ( SQLException e ) { throw markPossiblyBroken ( e ) ; } return result ;
public class AmazonRedshiftClient { /** * Returns a list of parameter settings for the specified parameter group family . * For more information about parameters and parameter groups , go to < a * href = " https : / / docs . aws . amazon . com / redshift / latest / mgmt / working - with - parameter - groups . html " > Amazon Redshift * Parameter Groups < / a > in the < i > Amazon Redshift Cluster Management Guide < / i > . * @ param describeDefaultClusterParametersRequest * @ return Result of the DescribeDefaultClusterParameters operation returned by the service . * @ sample AmazonRedshift . DescribeDefaultClusterParameters * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / redshift - 2012-12-01 / DescribeDefaultClusterParameters " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DefaultClusterParameters describeDefaultClusterParameters ( DescribeDefaultClusterParametersRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeDefaultClusterParameters ( request ) ;
public class TileGroupsConfig { /** * Export the group data as a node . * @ param nodeGroups The root node ( must not be < code > null < / code > ) . * @ param group The group to export ( must not be < code > null < / code > ) . */ private static void exportGroup ( Xml nodeGroups , TileGroup group ) { } }
final Xml nodeGroup = nodeGroups . createChild ( NODE_GROUP ) ; nodeGroup . writeString ( ATT_GROUP_NAME , group . getName ( ) ) ; nodeGroup . writeString ( ATT_GROUP_TYPE , group . getType ( ) . name ( ) ) ; for ( final TileRef tileRef : group . getTiles ( ) ) { final Xml nodeTileRef = TileConfig . exports ( tileRef ) ; nodeGroup . add ( nodeTileRef ) ; }
public class StringUtils { /** * < p > Swaps the case of a String changing upper and title case to * lower case , and lower case to upper case . < / p > * < ul > * < li > Upper case character converts to Lower case < / li > * < li > Title case character converts to Lower case < / li > * < li > Lower case character converts to Upper case < / li > * < / ul > * < p > For a word based algorithm , see { @ link org . apache . commons . lang3 . text . WordUtils # swapCase ( String ) } . * A { @ code null } input String returns { @ code null } . < / p > * < pre > * StringUtils . swapCase ( null ) = null * StringUtils . swapCase ( " " ) = " " * StringUtils . swapCase ( " The dog has a BONE " ) = " tHE DOG HAS A bone " * < / pre > * < p > NOTE : This method changed in Lang version 2.0. * It no longer performs a word based algorithm . * If you only use ASCII , you will notice no change . * That functionality is available in org . apache . commons . lang3 . text . WordUtils . < / p > * @ param str the String to swap case , may be null * @ return the changed String , { @ code null } if null String input */ @ GwtIncompatible ( "incompatible method" ) public static String swapCase ( final String str ) { } }
if ( StringUtils . isEmpty ( str ) ) { return str ; } final int strLen = str . length ( ) ; final int newCodePoints [ ] = new int [ strLen ] ; // cannot be longer than the char array int outOffset = 0 ; for ( int i = 0 ; i < strLen ; ) { final int oldCodepoint = str . codePointAt ( i ) ; final int newCodePoint ; if ( Character . isUpperCase ( oldCodepoint ) ) { newCodePoint = Character . toLowerCase ( oldCodepoint ) ; } else if ( Character . isTitleCase ( oldCodepoint ) ) { newCodePoint = Character . toLowerCase ( oldCodepoint ) ; } else if ( Character . isLowerCase ( oldCodepoint ) ) { newCodePoint = Character . toUpperCase ( oldCodepoint ) ; } else { newCodePoint = oldCodepoint ; } newCodePoints [ outOffset ++ ] = newCodePoint ; i += Character . charCount ( newCodePoint ) ; } return new String ( newCodePoints , 0 , outOffset ) ;
public class DeviceImpl { public DevCmdHistory [ ] command_inout_history_2 ( final String command , int n ) throws DevFailed , SystemException { } }
Util . out4 . println ( "Device_2Impl.command_inout_history_2 arrived" ) ; final String cmd_str = command . toLowerCase ( ) ; // Record operation request in black box blackbox . insert_op ( Op_Command_inout_history_2 ) ; // Check that device supports this command check_command_exists ( cmd_str ) ; // Check that the command is polled PollObj polled_cmd = null ; final Vector poll_list = get_poll_obj_list ( ) ; for ( int i = 0 ; i < poll_list . size ( ) ; i ++ ) { final PollObj poll_obj = ( PollObj ) poll_list . elementAt ( i ) ; if ( poll_obj . get_type ( ) == Tango_POLL_CMD && poll_obj . get_name ( ) . equals ( cmd_str ) ) { polled_cmd = poll_obj ; } } if ( polled_cmd == null ) // NOT found { Except . throw_exception ( "API_CmdNotPolled" , "Command " + cmd_str + " not polled" , "Device_2Impl.command_inout_history_2" ) ; } // Check that some data is available in cache assert polled_cmd != null ; if ( polled_cmd . is_ring_empty ( ) ) { Except . throw_exception ( "API_NoDataYet" , "No data available in cache for command " + cmd_str , "Device_2Impl.command_inout_history_2" ) ; } // Set the number of returned records final int in_buf = polled_cmd . get_elt_nb_in_buffer ( ) ; if ( n > in_buf ) { n = in_buf ; } // return command result history return polled_cmd . get_cmd_history ( n ) ;
public class PdfDocument { /** * Sets the margins . * @ parammarginLeftthe margin on the left * @ parammarginRightthe margin on the right * @ parammarginTopthe margin on the top * @ parammarginBottomthe margin on the bottom * @ returna < CODE > boolean < / CODE > */ public boolean setMargins ( float marginLeft , float marginRight , float marginTop , float marginBottom ) { } }
if ( writer != null && writer . isPaused ( ) ) { return false ; } nextMarginLeft = marginLeft ; nextMarginRight = marginRight ; nextMarginTop = marginTop ; nextMarginBottom = marginBottom ; return true ;
public class XesXmlSerializer { /** * ( non - Javadoc ) * @ see * org . deckfour . xes . out . XesSerializer # serialize ( org . deckfour . xes . model . XLog , * java . io . OutputStream ) */ public void serialize ( XLog log , OutputStream out ) throws IOException { } }
XLogging . log ( "start serializing log to XES.XML" , XLogging . Importance . DEBUG ) ; long start = System . currentTimeMillis ( ) ; SXDocument doc = new SXDocument ( out ) ; doc . addComment ( "This file has been generated with the OpenXES library. It conforms" ) ; doc . addComment ( "to the XML serialization of the XES standard for log storage and" ) ; doc . addComment ( "management." ) ; doc . addComment ( "XES standard version: " + XRuntimeUtils . XES_VERSION ) ; doc . addComment ( "OpenXES library version: " + XRuntimeUtils . OPENXES_VERSION ) ; doc . addComment ( "OpenXES is available from http://www.openxes.org/" ) ; SXTag logTag = doc . addNode ( "log" ) ; logTag . addAttribute ( "xes.version" , XRuntimeUtils . XES_VERSION ) ; logTag . addAttribute ( "xes.features" , "nested-attributes" ) ; logTag . addAttribute ( "openxes.version" , XRuntimeUtils . OPENXES_VERSION ) ; logTag . addAttribute ( "xmlns" , "http://www.xes-standard.org/" ) ; // define extensions for ( XExtension extension : log . getExtensions ( ) ) { SXTag extensionTag = logTag . addChildNode ( "extension" ) ; extensionTag . addAttribute ( "name" , extension . getName ( ) ) ; extensionTag . addAttribute ( "prefix" , extension . getPrefix ( ) ) ; extensionTag . addAttribute ( "uri" , extension . getUri ( ) . toString ( ) ) ; } // define global attributes addGlobalAttributes ( logTag , "trace" , log . getGlobalTraceAttributes ( ) ) ; addGlobalAttributes ( logTag , "event" , log . getGlobalEventAttributes ( ) ) ; // define classifiers for ( XEventClassifier classifier : log . getClassifiers ( ) ) { if ( classifier instanceof XEventAttributeClassifier ) { XEventAttributeClassifier attrClass = ( XEventAttributeClassifier ) classifier ; SXTag clsTag = logTag . addChildNode ( "classifier" ) ; clsTag . addAttribute ( "name" , attrClass . name ( ) ) ; clsTag . addAttribute ( "keys" , XTokenHelper . formatTokenString ( ( List < String > ) Arrays . asList ( attrClass . getDefiningAttributeKeys ( ) ) ) ) ; } } // add log attributes addAttributes ( logTag , log . getAttributes ( ) . values ( ) ) ; for ( XTrace trace : log ) { SXTag traceTag = logTag . addChildNode ( "trace" ) ; addAttributes ( traceTag , trace . getAttributes ( ) . values ( ) ) ; for ( XEvent event : trace ) { SXTag eventTag = traceTag . addChildNode ( "event" ) ; addAttributes ( eventTag , event . getAttributes ( ) . values ( ) ) ; } } doc . close ( ) ; String duration = " (" + ( System . currentTimeMillis ( ) - start ) + " msec.)" ; XLogging . log ( "finished serializing log" + duration , XLogging . Importance . DEBUG ) ;
public class ConstructorFilterBuilder { /** * Adds a filter for default access constructors only . * @ return The builder to support method chaining . */ public ConstructorFilterBuilder isDefault ( ) { } }
add ( new NegationConstructorFilter ( new ModifierConstructorFilter ( Modifier . PUBLIC & Modifier . PROTECTED & Modifier . PRIVATE ) ) ) ; return this ;
public class CmsSearchManager { /** * Returns an analyzer for the given language . < p > * The analyzer is selected according to the analyzer configuration . < p > * @ param locale the locale to get the analyzer for * @ return the appropriate lucene analyzer * @ throws CmsSearchException if something goes wrong */ public Analyzer getAnalyzer ( Locale locale ) throws CmsSearchException { } }
Analyzer analyzer = null ; String className = null ; CmsSearchAnalyzer analyzerConf = m_analyzers . get ( locale ) ; if ( analyzerConf == null ) { throw new CmsSearchException ( Messages . get ( ) . container ( Messages . ERR_ANALYZER_NOT_FOUND_1 , locale ) ) ; } try { analyzer = getAnalyzer ( analyzerConf . getClassName ( ) ) ; } catch ( Exception e ) { throw new CmsSearchException ( Messages . get ( ) . container ( Messages . ERR_LOAD_ANALYZER_1 , className ) , e ) ; } return analyzer ;
public class Claim { /** * syntactic sugar */ public Claim addValueCode ( Coding t ) { } }
if ( t == null ) return this ; if ( this . valueCode == null ) this . valueCode = new ArrayList < Coding > ( ) ; this . valueCode . add ( t ) ; return this ;
public class JcrNodeDefinition { /** * Determine if this node definition will allow a child with the supplied primary type . This method checks this definition ' s * { @ link # getRequiredPrimaryTypes ( ) } against the supplied primary type and its supertypes . The supplied primary type for the * child must be or extend all of the types defined by the { @ link # getRequiredPrimaryTypes ( ) required primary types } . * @ param childPrimaryType the primary type of the child * @ return true if the primary type of the child ( or one of its supertypes ) is one of the types required by this definition , * or false otherwise */ final boolean allowsChildWithType ( JcrNodeType childPrimaryType ) { } }
if ( childPrimaryType == null ) { // The definition must have a default primary type . . . if ( defaultPrimaryTypeName != null ) { return true ; } return false ; } // The supplied primary type must be or extend all of the required primary types . . . for ( Name requiredPrimaryTypeName : requiredPrimaryTypeNameSet ) { if ( ! childPrimaryType . isNodeType ( requiredPrimaryTypeName ) ) return false ; } return true ;
public class TFIDFAnalyzer { /** * tfidf分析方法 * @ param content 需要分析的文本 / 文档内容 * @ param topN 需要返回的tfidf值最高的N个关键词 , 若超过content本身含有的词语上限数目 , 则默认返回全部 * @ return */ public List < Keyword > analyze ( String content , int topN ) { } }
List < Keyword > keywordList = new ArrayList < > ( ) ; if ( stopWordsSet == null ) { stopWordsSet = new HashSet < > ( ) ; loadStopWords ( stopWordsSet , this . getClass ( ) . getResourceAsStream ( "/stop_words.txt" ) ) ; } if ( idfMap == null ) { idfMap = new HashMap < > ( ) ; loadIDFMap ( idfMap , this . getClass ( ) . getResourceAsStream ( "/idf_dict.txt" ) ) ; } Map < String , Double > tfMap = getTF ( content ) ; for ( String word : tfMap . keySet ( ) ) { // 若该词不在idf文档中 , 则使用平均的idf值 ( 可能定期需要对新出现的网络词语进行纳入 ) if ( idfMap . containsKey ( word ) ) { keywordList . add ( new Keyword ( word , idfMap . get ( word ) * tfMap . get ( word ) ) ) ; } else keywordList . add ( new Keyword ( word , idfMedian * tfMap . get ( word ) ) ) ; } Collections . sort ( keywordList ) ; if ( keywordList . size ( ) > topN ) { int num = keywordList . size ( ) - topN ; for ( int i = 0 ; i < num ; i ++ ) { keywordList . remove ( topN ) ; } } return keywordList ;
public class CmsPreferences { /** * Builds the html for the language select box of the start settings . < p > * @ param htmlAttributes optional html attributes for the & lgt ; select & gt ; tag * @ return the html for the language select box */ public String buildSelectLanguage ( String htmlAttributes ) { } }
SelectOptions selectOptions = getOptionsForLanguage ( ) ; return buildSelect ( htmlAttributes , selectOptions ) ;
public class PlaceManager { /** * Registers an invocation provider and notes the registration such that it will be * automatically cleared when this manager shuts down . */ protected < T extends InvocationMarshaller < ? > > T addProvider ( InvocationProvider prov , Class < T > mclass ) { } }
T marsh = _invmgr . registerProvider ( prov , mclass ) ; _marshallers . add ( marsh ) ; return marsh ;
public class ConvertHelper { /** * Konvertiert ein beliebiges Objekt in ein byte - Array und schleust dieses durch ein InputStream . * @ param o * @ return * @ throws IOException */ public static InputStream convertObjectToInputStream ( Object o ) throws IOException { } }
byte [ ] bObject = convertObjectToByteArray ( o ) ; return new ByteArrayInputStream ( bObject ) ;
public class SessionImpl { /** * { @ inheritDoc } */ public void move ( String srcAbsPath , String destAbsPath ) throws ItemExistsException , PathNotFoundException , VersionException , LockException , RepositoryException { } }
// In this particular case we rely on the default configuration move ( srcAbsPath , destAbsPath , triggerEventsForDescendantsOnRename , triggerEventsForDescendantsOnMove ) ;
public class FileWriterServices { /** * Copy path / filename , from jar ocelot - processor in classes directory of current project / module * @ param path * @ param filename */ public void copyResourceToClassesOutput ( String path , String filename ) { } }
String fullpath = path + ProcessorConstants . SEPARATORCHAR + filename ; messager . printMessage ( Diagnostic . Kind . MANDATORY_WARNING , " javascript copy js : " + fullpath + " to : class dir" ) ; try ( Writer writer = getFileObjectWriterInClassOutput ( "" , filename ) ) { bodyWriter . write ( writer , OcelotProcessor . class . getResourceAsStream ( fullpath ) ) ; } catch ( IOException ex ) { messager . printMessage ( Diagnostic . Kind . MANDATORY_WARNING , " FAILED TO CREATE : " + fullpath + " : " + ex . getMessage ( ) ) ; }
public class KnowledgePackageImpl { /** * Get the rule flows for this package . The key is the ruleflow id . It will * be Collections . EMPTY _ MAP if none have been added . */ public Map < String , Process > getRuleFlows ( ) { } }
ProcessPackage rtp = ( ProcessPackage ) getResourceTypePackages ( ) . get ( ResourceType . BPMN2 ) ; return rtp == null ? Collections . emptyMap ( ) : rtp . getRuleFlows ( ) ;
public class nsacl6 { /** * Use this API to enable nsacl6 of given name . */ public static base_response enable ( nitro_service client , String acl6name ) throws Exception { } }
nsacl6 enableresource = new nsacl6 ( ) ; enableresource . acl6name = acl6name ; return enableresource . perform_operation ( client , "enable" ) ;
public class Controller { /** * Get para from url with default value if it is null or " " . */ public String getPara ( int index , String defaultValue ) { } }
String result = getPara ( index ) ; return result != null && ! "" . equals ( result ) ? result : defaultValue ;
public class StopProcessEnginesStep { /** * Stops a process engine , failures are logged but no exceptions are thrown . */ private void stopProcessEngine ( String serviceName , PlatformServiceContainer serviceContainer ) { } }
try { serviceContainer . stopService ( serviceName ) ; } catch ( Exception e ) { LOG . exceptionWhileStopping ( "Process Engine" , serviceName , e ) ; }
public class IdRange { /** * Parses a uid sequence , a comma separated list of uid ranges . * Example : 1 2:5 8 : * * @ param idRangeSequence the sequence * @ return a list of ranges , never null . */ public static List < IdRange > parseRangeSequence ( String idRangeSequence ) { } }
StringTokenizer tokenizer = new StringTokenizer ( idRangeSequence , " " ) ; List < IdRange > ranges = new ArrayList < > ( ) ; while ( tokenizer . hasMoreTokens ( ) ) { ranges . add ( parseRange ( tokenizer . nextToken ( ) ) ) ; } return ranges ;
public class BytesWritable { /** * Change the capacity of the backing storage . * The data is preserved . * @ param new _ cap The new capacity in bytes . */ public void setCapacity ( int new_cap ) { } }
if ( new_cap != getCapacity ( ) ) { byte [ ] new_data = new byte [ new_cap ] ; if ( new_cap < size ) { size = new_cap ; } if ( size != 0 ) { System . arraycopy ( bytes , 0 , new_data , 0 , size ) ; } bytes = new_data ; }
public class SequenceLabelerEventStream { /** * Generated previous decision features for each token based on contents of * the specified map . * @ param tokens * The token for which the context is generated . * @ param prevMap * A mapping of tokens to their previous decisions . * @ return An additional context array with features for each token . */ public static String [ ] [ ] additionalContext ( final String [ ] tokens , final Map < String , String > prevMap ) { } }
final String [ ] [ ] ac = new String [ tokens . length ] [ 1 ] ; for ( int ti = 0 ; ti < tokens . length ; ti ++ ) { final String pt = prevMap . get ( tokens [ ti ] ) ; ac [ ti ] [ 0 ] = "pd=" + pt ; } return ac ;
public class Content { /** * Sets the lastModifiedDateTime value for this Content . * @ param lastModifiedDateTime * The date and time at which this content was last modified . * This attribute is read - only . */ public void setLastModifiedDateTime ( com . google . api . ads . admanager . axis . v201902 . DateTime lastModifiedDateTime ) { } }
this . lastModifiedDateTime = lastModifiedDateTime ;
public class UnixResolverDnsServerAddressStreamProvider { /** * Parse a file of the format < a href = " https : / / linux . die . net / man / 5 / resolver " > / etc / resolv . conf < / a > and return the * value corresponding to the first ndots in an options configuration . * @ param etcResolvConf a file of the format < a href = " https : / / linux . die . net / man / 5 / resolver " > / etc / resolv . conf < / a > . * @ return the value corresponding to the first ndots in an options configuration , or { @ link # DEFAULT _ NDOTS } if not * found . * @ throws IOException If a failure occurs parsing the file . */ static int parseEtcResolverFirstNdots ( File etcResolvConf ) throws IOException { } }
FileReader fr = new FileReader ( etcResolvConf ) ; BufferedReader br = null ; try { br = new BufferedReader ( fr ) ; String line ; while ( ( line = br . readLine ( ) ) != null ) { if ( line . startsWith ( OPTIONS_ROW_LABEL ) ) { int i = line . indexOf ( NDOTS_LABEL ) ; if ( i >= 0 ) { i += NDOTS_LABEL . length ( ) ; final int j = line . indexOf ( ' ' , i ) ; return Integer . parseInt ( line . substring ( i , j < 0 ? line . length ( ) : j ) ) ; } break ; } } } finally { if ( br == null ) { fr . close ( ) ; } else { br . close ( ) ; } } return DEFAULT_NDOTS ;
public class ServerHandshaker { /** * Get some " ephemeral " RSA keys for this context . This means * generating them if it ' s not already been done . * Note that we currently do not implement any ciphersuites that use * strong ephemeral RSA . ( We do not support the EXPORT1024 ciphersuites * and standard RSA ciphersuites prohibit ephemeral mode for some reason ) * This means that export is always true and 512 bit keys are generated . */ private boolean setupEphemeralRSAKeys ( boolean export ) { } }
KeyPair kp = sslContext . getEphemeralKeyManager ( ) . getRSAKeyPair ( export , sslContext . getSecureRandom ( ) ) ; if ( kp == null ) { return false ; } else { tempPublicKey = kp . getPublic ( ) ; tempPrivateKey = kp . getPrivate ( ) ; return true ; }
public class ClientRegistry { /** * Calls the { @ link IRenderWorldLast } registered to render . < br > * @ param event the event */ @ SubscribeEvent public void onRenderLast ( RenderWorldLastEvent event ) { } }
for ( IRenderWorldLast renderer : renderWorldLastRenderers ) { if ( renderer . shouldRender ( event , Utils . getClientWorld ( ) ) ) renderer . renderWorldLastEvent ( event , Utils . getClientWorld ( ) ) ; }
public class TimeSourceProvider { /** * Get a specific TimeSource by class name * @ param className Class name of the TimeSource to return the instance for * @ return TimeSource instance */ public static TimeSource getInstance ( String className ) { } }
try { Class < ? > c = Class . forName ( className ) ; Method m = c . getMethod ( "getInstance" ) ; return ( TimeSource ) m . invoke ( null ) ; } catch ( Exception e ) { throw new RuntimeException ( "Error getting TimeSource instance for class \"" + className + "\"" , e ) ; }
import java . math . BigDecimal ; import java . math . RoundingMode ; class Main { /** * This function rounds up a number to a specific number of digits . * @ param num A number to round up * @ param precision Number of decimal places to round up to * @ return The number rounded up to the given number of decimal places */ public static double roundNumber ( double num , int precision ) { } public static void main ( String [ ] args ) { System . out . println ( roundNumber ( 123.01247 , 0 ) ) ; // Output : 123.0 System . out . println ( roundNumber ( 123.01247 , 1 ) ) ; // Output : 123.1 System . out . println ( roundNumber ( 123.01247 , 2 ) ) ; // Output : 123.01 } }
BigDecimal bd = new BigDecimal ( Double . toString ( num ) ) ; bd = bd . setScale ( precision , RoundingMode . HALF_UP ) ; return bd . doubleValue ( ) ;
public class ZooKeeperMasterModel { /** * Returns the current status of the host named by { @ code host } . */ @ Override public HostStatus getHostStatus ( final String host ) { } }
final ZooKeeperClient client = provider . get ( "getHostStatus" ) ; if ( ! ZooKeeperRegistrarUtil . isHostRegistered ( client , host ) ) { log . warn ( "Host {} isn't registered in ZooKeeper." , host ) ; return null ; } final boolean up = checkHostUp ( client , host ) ; final HostInfo hostInfo = getHostInfo ( client , host ) ; final AgentInfo agentInfo = getAgentInfo ( client , host ) ; final Map < JobId , Deployment > tasks = getTasks ( client , host ) ; final Map < JobId , TaskStatus > statuses = getTaskStatuses ( client , host ) ; final Map < String , String > environment = getEnvironment ( client , host ) ; final Map < String , String > labels = getLabels ( client , host ) ; return HostStatus . newBuilder ( ) . setJobs ( tasks ) . setStatuses ( fromNullable ( statuses ) . or ( EMPTY_STATUSES ) ) . setHostInfo ( hostInfo ) . setAgentInfo ( agentInfo ) . setStatus ( up ? UP : DOWN ) . setEnvironment ( environment ) . setLabels ( labels ) . build ( ) ;
public class StringUtilities { /** * Return a variable substituted string , but if a value hasn ' t been * specified , then leave take out the $ { variable } part and leave it * blank / empty / null . * @ param attrString to use * @ param attributes to check for and substitute * @ return substituted string */ public static String computeAttrString ( final String attrString , final Map < String , ? extends Object > attributes ) { } }
return computeAttrString ( attrString , attributes , true , null ) ;
public class PropertyLoader { /** * Returns the generic type of the value for given element . { @ link Field } and { @ link Method } * are only supported . */ protected Type getValueGenericType ( AnnotatedElement element ) { } }
if ( element instanceof Field ) { return ( ( Field ) element ) . getGenericType ( ) ; } if ( element instanceof Method ) { return ( ( Method ) element ) . getGenericReturnType ( ) ; } throw new PropertyLoaderException ( "Could not get generic type for element" ) ;
public class PrefsTransformer { /** * Get java . util type Transformable * @ param type the type * @ return the util transform */ static PrefsTransform getUtilTransform ( TypeName type ) { } }
String typeName = type . toString ( ) ; // Integer . class . getCanonicalName ( ) . equals ( typeName ) if ( Date . class . getCanonicalName ( ) . equals ( typeName ) ) { return new DatePrefsTransform ( ) ; } if ( Locale . class . getCanonicalName ( ) . equals ( typeName ) ) { return new LocalePrefsTransform ( ) ; } if ( Currency . class . getCanonicalName ( ) . equals ( typeName ) ) { return new CurrencyPrefsTransform ( ) ; } if ( Calendar . class . getCanonicalName ( ) . equals ( typeName ) ) { return new CalendarPrefsTransform ( ) ; } if ( TimeZone . class . getCanonicalName ( ) . equals ( typeName ) ) { return new TimeZonePrefsTransform ( ) ; } return null ;
public class CashbillServiceImp { /** * ( non - Javadoc ) * @ see com . popbill . api . CashbillService # getChargeInfo ( java . lang . String ) */ @ Override public ChargeInfo getChargeInfo ( String CorpNum ) throws PopbillException { } }
return httpget ( "/Cashbill/ChargeInfo" , CorpNum , null , ChargeInfo . class ) ;
public class LocatedString { /** * Gets the substring of the reference string corresponding to a range of content string * offsets . This will include intervening characters which may not themselves correspond to * any content string character . * Will return { @ link Optional # absent ( ) } if and only if { @ link # referenceString ( ) } returns * absent . * Please refer to the class Javadoc for coverage of available substring options . */ public final Optional < UnicodeFriendlyString > referenceSubstringByContentOffsets ( final OffsetRange < CharOffset > contentOffsets ) { } }
if ( referenceString ( ) . isPresent ( ) ) { return Optional . of ( referenceString ( ) . get ( ) . substringByCodePoints ( OffsetGroupRange . from ( startReferenceOffsetsForContentOffset ( contentOffsets . startInclusive ( ) ) , endReferenceOffsetsForContentOffset ( contentOffsets . endInclusive ( ) ) ) . asCharOffsetRange ( ) ) ) ; } else { return Optional . absent ( ) ; }
public class ServletOutputStreamImpl { /** * { @ inheritDoc } */ public void write ( final byte [ ] b , final int off , final int len ) throws IOException { } }
if ( anyAreSet ( state , FLAG_CLOSED ) || servletRequestContext . getOriginalResponse ( ) . isTreatAsCommitted ( ) ) { throw UndertowServletMessages . MESSAGES . streamIsClosed ( ) ; } if ( len < 1 ) { return ; } if ( listener == null ) { ByteBuffer buffer = buffer ( ) ; if ( buffer . remaining ( ) < len ) { writeTooLargeForBuffer ( b , off , len , buffer ) ; } else { buffer . put ( b , off , len ) ; if ( buffer . remaining ( ) == 0 ) { writeBufferBlocking ( false ) ; } } updateWritten ( len ) ; } else { writeAsync ( b , off , len ) ; }
public class AppServiceEnvironmentsInner { /** * Get metrics for a specific instance of a multi - role pool of an App Service Environment . * Get metrics for a specific instance of a multi - role pool of an App Service Environment . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ param instance Name of the instance in the multi - role pool . * @ param details Specify & lt ; code & gt ; true & lt ; / code & gt ; to include instance details . The default is & lt ; code & gt ; false & lt ; / code & gt ; . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; ResourceMetricInner & gt ; object */ public Observable < Page < ResourceMetricInner > > listMultiRolePoolInstanceMetricsAsync ( final String resourceGroupName , final String name , final String instance , final Boolean details ) { } }
return listMultiRolePoolInstanceMetricsWithServiceResponseAsync ( resourceGroupName , name , instance , details ) . map ( new Func1 < ServiceResponse < Page < ResourceMetricInner > > , Page < ResourceMetricInner > > ( ) { @ Override public Page < ResourceMetricInner > call ( ServiceResponse < Page < ResourceMetricInner > > response ) { return response . body ( ) ; } } ) ;
public class ParentsRenderer { /** * This method is public for testing purposes only . Do not try to call it * outside of the context of the rendering engine . * @ param builder Buffer for holding the rendition * @ param pad Minimum number spaces for padding each line of the output * @ param mother Mother */ public void renderMother ( final StringBuilder builder , final int pad , final Person mother ) { } }
renderParent ( builder , pad , mother , "Mother" ) ;
public class ExcelUtils { /** * 读取Excel表格数据 , 返回 { @ code List [ List [ String ] ] } 类型的数据集合 * @ param excelPath 待读取Excel的路径 * @ param offsetLine Excel表头行 ( 默认是0) * @ return 返回 { @ code List < List < String > > } 类型的数据集合 * @ throws IOException 异常 * @ throws InvalidFormatException 异常 * @ author Crab2Died */ public List < List < String > > readExcel2List ( String excelPath , int offsetLine ) throws IOException , InvalidFormatException { } }
try ( Workbook workbook = WorkbookFactory . create ( new FileInputStream ( new File ( excelPath ) ) ) ) { return readExcel2ObjectsHandler ( workbook , offsetLine , Integer . MAX_VALUE , 0 ) ; }
public class HttpSessionImpl { /** * Method getValueNames * @ deprecated * @ see javax . servlet . http . HttpSession # getValueNames ( ) */ public String [ ] getValueNames ( ) { } }
if ( ! _iSession . isValid ( ) ) throw new IllegalStateException ( ) ; Enumeration enumeration = this . getAttributeNames ( ) ; Vector valueNames = new Vector ( ) ; String name = null ; while ( enumeration . hasMoreElements ( ) ) { name = ( String ) enumeration . nextElement ( ) ; valueNames . add ( name ) ; } String [ ] names = new String [ valueNames . size ( ) ] ; return ( String [ ] ) valueNames . toArray ( names ) ;
public class MercatorProjection { /** * Converts a pixel Y coordinate at a certain scale to a latitude coordinate . * @ param pixelY the pixel Y coordinate that should be converted . * @ param scaleFactor the scale factor at which the coordinate should be converted . * @ return the latitude value of the pixel Y coordinate . * @ throws IllegalArgumentException if the given pixelY coordinate is invalid . */ public static double pixelYToLatitudeWithScaleFactor ( double pixelY , double scaleFactor , int tileSize ) { } }
long mapSize = getMapSizeWithScaleFactor ( scaleFactor , tileSize ) ; if ( pixelY < 0 || pixelY > mapSize ) { throw new IllegalArgumentException ( "invalid pixelY coordinate at scale " + scaleFactor + ": " + pixelY ) ; } double y = 0.5 - ( pixelY / mapSize ) ; return 90 - 360 * Math . atan ( Math . exp ( - y * ( 2 * Math . PI ) ) ) / Math . PI ;
public class TagletManager { /** * Initialize the custom tag Lists . */ private void initCustomTaglets ( ) { } }
moduleTags = new ArrayList < > ( ) ; packageTags = new ArrayList < > ( ) ; typeTags = new ArrayList < > ( ) ; fieldTags = new ArrayList < > ( ) ; constructorTags = new ArrayList < > ( ) ; methodTags = new ArrayList < > ( ) ; inlineTags = new ArrayList < > ( ) ; overviewTags = new ArrayList < > ( ) ; for ( Taglet current : customTags . values ( ) ) { if ( current . inModule ( ) && ! current . isInlineTag ( ) ) { moduleTags . add ( current ) ; } if ( current . inPackage ( ) && ! current . isInlineTag ( ) ) { packageTags . add ( current ) ; } if ( current . inType ( ) && ! current . isInlineTag ( ) ) { typeTags . add ( current ) ; } if ( current . inField ( ) && ! current . isInlineTag ( ) ) { fieldTags . add ( current ) ; } if ( current . inConstructor ( ) && ! current . isInlineTag ( ) ) { constructorTags . add ( current ) ; } if ( current . inMethod ( ) && ! current . isInlineTag ( ) ) { methodTags . add ( current ) ; } if ( current . isInlineTag ( ) ) { inlineTags . add ( current ) ; } if ( current . inOverview ( ) && ! current . isInlineTag ( ) ) { overviewTags . add ( current ) ; } } // Init the serialized form tags serializedFormTags = new ArrayList < > ( ) ; serializedFormTags . add ( customTags . get ( SERIAL_DATA . tagName ) ) ; serializedFormTags . add ( customTags . get ( THROWS . tagName ) ) ; if ( ! nosince ) serializedFormTags . add ( customTags . get ( SINCE . tagName ) ) ; serializedFormTags . add ( customTags . get ( SEE . tagName ) ) ;
public class ClassFieldsTypeField { /** * Get the conversion Map . */ public String [ ] [ ] getPopupMap ( ) { } }
String string [ ] [ ] = { { INCLUDE_CLASS_PACKAGE , "Include class package" } , { INCLUDE_PACKAGE , "Include package" } , { INCLUDE_CLASS , "Include class" } , { CLASS_FIELD , "Class field" } , { NATIVE_FIELD , "Native field" } , { CLASS_NAME , "Class name" } , { SCREEN_CLASS_NAME , "Screen class name" } , { INCLUDE_MODEL_PACKAGE , "Include model package" } , { INCLUDE_THIN_PACKAGE , "Include thin package" } , { INCLUDE_RES_PACKAGE , "Include resource package" } , } ; return string ;
public class ExpressionVisitorImpl { /** * expression : expression ( TIMES | DIVIDE ) expression */ @ Override public Object visitMultiplicationOrDivisionExpression ( ExcellentParser . MultiplicationOrDivisionExpressionContext ctx ) { } }
boolean multiplication = ctx . TIMES ( ) != null ; BigDecimal arg1 = Conversions . toDecimal ( visit ( ctx . expression ( 0 ) ) , m_evalContext ) ; BigDecimal arg2 = Conversions . toDecimal ( visit ( ctx . expression ( 1 ) ) , m_evalContext ) ; if ( ! multiplication && arg2 . equals ( BigDecimal . ZERO ) ) { throw new EvaluationError ( "Division by zero" ) ; } return multiplication ? arg1 . multiply ( arg2 , ExpressionUtils . MATH ) : arg1 . divide ( arg2 , ExpressionUtils . MATH ) ;
public class StrSubstitutor { /** * Replaces all the occurrences of variables within the given source * builder with their matching values from the resolver . * @ param source the builder to replace in , updated , null returns zero * @ return true if altered */ public boolean replaceIn ( final StrBuilder source ) { } }
if ( source == null ) { return false ; } return substitute ( source , 0 , source . length ( ) ) ;
public class NonVoltDBBackend { /** * Returns true if the < i > columnName < / i > is one of the specified * < i > columnTypes < / i > , e . g . , one of the integer column types , or one of * the Geospatial column types - for one or more of the < i > tableNames < / i > , * if specified ; otherwise , for any table in the database schema . */ private boolean isColumnType ( List < String > columnTypes , String columnName , List < String > tableNames , boolean debugPrint ) { } }
if ( debugPrint ) { System . out . println ( " In NonVoltDBBackend.isColumnType:" ) ; System . out . println ( " columnTypes: " + columnTypes ) ; System . out . println ( " columnName : " + columnName ) ; System . out . println ( " tableNames : " + tableNames ) ; } if ( tableNames == null || tableNames . size ( ) == 0 ) { tableNames = Arrays . asList ( ( String ) null ) ; if ( debugPrint ) { System . out . println ( " tableNames2: " + tableNames ) ; } } for ( String tn : tableNames ) { // Lower - case table and column names are required for PostgreSQL ; // we might need to alter this if we use another comparison // database ( besides HSQL ) someday String tableName = ( tn == null ) ? tn : tn . trim ( ) . toLowerCase ( ) ; if ( debugPrint ) { System . out . println ( " tableName : " + tableName ) ; } try { ResultSet rs = dbconn . getMetaData ( ) . getColumns ( null , null , tableName , columnName . trim ( ) . toLowerCase ( ) ) ; while ( rs . next ( ) ) { String columnType = getVoltColumnTypeName ( rs . getString ( 6 ) ) ; if ( debugPrint ) { System . out . println ( " tableName : " + rs . getString ( 3 ) ) ; System . out . println ( " columnName : " + rs . getString ( 4 ) ) ; System . out . println ( " columnType : " + columnType ) ; } if ( columnTypes . contains ( columnType ) ) { if ( debugPrint ) { System . out . println ( " returning : true" ) ; } return true ; } } } catch ( SQLException e ) { printCaughtException ( "In NonVoltDBBackend.isColumnType, with tableName " + tableName + ", columnName " + columnName + ", columnTypes " + columnTypes + ", caught SQLException:\n " + e ) ; } } if ( debugPrint ) { System . out . println ( " returning : false" ) ; } return false ;
public class Grena3 { /** * Calculate topocentric solar position , i . e . the location of the sun on the sky for a certain point in time on a * certain point of the Earth ' s surface . * This follows the no . 3 algorithm described in Grena , ' Five new algorithms for the computation of sun position * from 2010 to 2110 ' , Solar Energy 86 ( 2012 ) pp . 1323-1337. * The algorithm is supposed to work for the years 2010 to 2110 , with a maximum error of 0.01 degrees . * @ param date Observer ' s local date and time . * @ param latitude Observer ' s latitude , in degrees ( negative south of equator ) . * @ param longitude Observer ' s longitude , in degrees ( negative west of Greenwich ) . * @ param deltaT Difference between earth rotation time and terrestrial time ( or Universal Time and Terrestrial Time ) , * in seconds . See * < a href = " http : / / asa . usno . navy . mil / SecK / DeltaT . html " > http : / / asa . usno . navy . mil / SecK / DeltaT . html < / a > . * For the year 2015 , a reasonably accurate default would be 68. * @ param pressure Annual average local pressure , in millibars ( or hectopascals ) . Used for refraction * correction of zenith angle . If unsure , 1000 is a reasonable default . * @ param temperature Annual average local temperature , in degrees Celsius . Used for refraction correction of zenith angle . * @ return Topocentric solar position ( azimuth measured eastward from north ) * @ see AzimuthZenithAngle */ public static AzimuthZenithAngle calculateSolarPosition ( final GregorianCalendar date , final double latitude , final double longitude , final double deltaT , final double pressure , final double temperature ) { } }
final double t = calcT ( date ) ; final double tE = t + 1.1574e-5 * deltaT ; final double omegaAtE = 0.0172019715 * tE ; final double lambda = - 1.388803 + 1.720279216e-2 * tE + 3.3366e-2 * sin ( omegaAtE - 0.06172 ) + 3.53e-4 * sin ( 2.0 * omegaAtE - 0.1163 ) ; final double epsilon = 4.089567e-1 - 6.19e-9 * tE ; final double sLambda = sin ( lambda ) ; final double cLambda = cos ( lambda ) ; final double sEpsilon = sin ( epsilon ) ; final double cEpsilon = sqrt ( 1 - sEpsilon * sEpsilon ) ; double alpha = atan2 ( sLambda * cEpsilon , cLambda ) ; if ( alpha < 0 ) { alpha += 2 * PI ; } final double delta = asin ( sLambda * sEpsilon ) ; double H = 1.7528311 + 6.300388099 * t + toRadians ( longitude ) - alpha ; H = ( ( H + PI ) % ( 2 * PI ) ) - PI ; if ( H < - PI ) { H += 2 * PI ; } // end of " short procedure " final double sPhi = sin ( toRadians ( latitude ) ) ; final double cPhi = sqrt ( ( 1 - sPhi * sPhi ) ) ; final double sDelta = sin ( delta ) ; final double cDelta = sqrt ( 1 - sDelta * sDelta ) ; final double sH = sin ( H ) ; final double cH = cos ( H ) ; final double sEpsilon0 = sPhi * sDelta + cPhi * cDelta * cH ; final double eP = asin ( sEpsilon0 ) - 4.26e-5 * sqrt ( 1.0 - sEpsilon0 * sEpsilon0 ) ; final double gamma = atan2 ( sH , cH * sPhi - sDelta * cPhi / cDelta ) ; // refraction correction ( disabled for silly parameter values ) final double deltaRe = ( temperature < - 273 || temperature > 273 || pressure < 0 || pressure > 3000 ) ? 0.0 : ( ( ( eP > 0.0 ) ? ( 0.08422 * ( pressure / 1000 ) ) / ( ( 273.0 + temperature ) * tan ( eP + 0.003138 / ( eP + 0.08919 ) ) ) : 0.0 ) ) ; final double z = PI / 2 - eP - deltaRe ; return new AzimuthZenithAngle ( toDegrees ( gamma + PI ) % 360.0 , toDegrees ( z ) ) ;
public class BucketingSink { public static FileSystem createHadoopFileSystem ( Path path , @ Nullable Configuration extraUserConf ) throws IOException { } }
// try to get the Hadoop File System via the Flink File Systems // that way we get the proper configuration final org . apache . flink . core . fs . FileSystem flinkFs = org . apache . flink . core . fs . FileSystem . getUnguardedFileSystem ( path . toUri ( ) ) ; final FileSystem hadoopFs = ( flinkFs instanceof HadoopFileSystem ) ? ( ( HadoopFileSystem ) flinkFs ) . getHadoopFileSystem ( ) : null ; // fast path : if the Flink file system wraps Hadoop anyways and we need no extra config , // then we use it directly if ( extraUserConf == null && hadoopFs != null ) { return hadoopFs ; } else { // we need to re - instantiate the Hadoop file system , because we either have // a special config , or the Path gave us a Flink FS that is not backed by // Hadoop ( like file : / / ) final org . apache . hadoop . conf . Configuration hadoopConf ; if ( hadoopFs != null ) { // have a Hadoop FS but need to apply extra config hadoopConf = hadoopFs . getConf ( ) ; } else { // the Path gave us a Flink FS that is not backed by Hadoop ( like file : / / ) // we need to get access to the Hadoop file system first // we access the Hadoop FS in Flink , which carries the proper // Hadoop configuration . we should get rid of this once the bucketing sink is // properly implemented against Flink ' s FS abstraction URI genericHdfsUri = URI . create ( "hdfs://localhost:12345/" ) ; org . apache . flink . core . fs . FileSystem accessor = org . apache . flink . core . fs . FileSystem . getUnguardedFileSystem ( genericHdfsUri ) ; if ( ! ( accessor instanceof HadoopFileSystem ) ) { throw new IOException ( "Cannot instantiate a Hadoop file system to access the Hadoop configuration. " + "FS for hdfs:// is " + accessor . getClass ( ) . getName ( ) ) ; } hadoopConf = ( ( HadoopFileSystem ) accessor ) . getHadoopFileSystem ( ) . getConf ( ) ; } // finalize the configuration final org . apache . hadoop . conf . Configuration finalConf ; if ( extraUserConf == null ) { finalConf = hadoopConf ; } else { finalConf = new org . apache . hadoop . conf . Configuration ( hadoopConf ) ; for ( String key : extraUserConf . keySet ( ) ) { finalConf . set ( key , extraUserConf . getString ( key , null ) ) ; } } // we explicitly re - instantiate the file system here in order to make sure // that the configuration is applied . URI fsUri = path . toUri ( ) ; final String scheme = fsUri . getScheme ( ) ; final String authority = fsUri . getAuthority ( ) ; if ( scheme == null && authority == null ) { fsUri = FileSystem . getDefaultUri ( finalConf ) ; } else if ( scheme != null && authority == null ) { URI defaultUri = FileSystem . getDefaultUri ( finalConf ) ; if ( scheme . equals ( defaultUri . getScheme ( ) ) && defaultUri . getAuthority ( ) != null ) { fsUri = defaultUri ; } } final Class < ? extends FileSystem > fsClass = FileSystem . getFileSystemClass ( fsUri . getScheme ( ) , finalConf ) ; final FileSystem fs ; try { fs = fsClass . newInstance ( ) ; } catch ( Exception e ) { throw new IOException ( "Cannot instantiate the Hadoop file system" , e ) ; } fs . initialize ( fsUri , finalConf ) ; // We don ' t perform checksums on Hadoop ' s local filesystem and use the raw filesystem . // Otherwise buffers are not flushed entirely during checkpointing which results in data loss . if ( fs instanceof LocalFileSystem ) { return ( ( LocalFileSystem ) fs ) . getRaw ( ) ; } return fs ; }
public class DataProvider { /** * Save entity asynchronously * @ param entity Target entity * @ return Operation status result */ public Boolean save ( @ NotNull V entity ) { } }
Timer saveAsyncTimer = METRICS . getTimer ( MetricsType . DATA_PROVIDER_SAVE . name ( ) ) ; Insert insert = QueryBuilder . insertInto ( getEntityMetadata ( ) . getTableName ( ) ) ; List < ColumnMetadata > columns = getEntityMetadata ( ) . getFieldMetaData ( ) ; columns . forEach ( column -> insert . value ( column . getName ( ) , QueryBuilder . bindMarker ( ) ) ) ; try { String keyspace = getEntityMetadata ( ) . getKeyspace ( ) ; PreparedStatement prepare = getCassandraClient ( ) . prepare ( keyspace , insert . getQueryString ( ) ) ; prepare . setConsistencyLevel ( getWriteConsistencyLevel ( ) ) ; BoundStatement boundStatement = createBoundStatement ( prepare , entity , columns ) ; ResultSet input = getCassandraClient ( ) . execute ( keyspace , boundStatement ) ; return input . wasApplied ( ) ; } catch ( SyntaxError e ) { LOGGER . warn ( "Can't prepare query: " + insert . getQueryString ( ) , e ) ; } finally { saveAsyncTimer . stop ( ) ; } return false ;
public class Maybe { /** * Returns a Maybe instance that runs the given Action for each subscriber and * emits either its exception or simply completes . * < dl > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code fromAction } does not operate by default on a particular { @ link Scheduler } . < / dd > * < dt > < b > Error handling : < / b > < / dt > * < dd > If the { @ link Action } throws an exception , the respective { @ link Throwable } is * delivered to the downstream via { @ link MaybeObserver # onError ( Throwable ) } , * except when the downstream has disposed this { @ code Maybe } source . * In this latter case , the { @ code Throwable } is delivered to the global error handler via * { @ link RxJavaPlugins # onError ( Throwable ) } as an { @ link io . reactivex . exceptions . UndeliverableException UndeliverableException } . * < / dd > * < / dl > * @ param < T > the target type * @ param run the runnable to run for each subscriber * @ return the new Maybe instance * @ throws NullPointerException if run is null */ @ CheckReturnValue @ SchedulerSupport ( SchedulerSupport . NONE ) public static < T > Maybe < T > fromAction ( final Action run ) { } }
ObjectHelper . requireNonNull ( run , "run is null" ) ; return RxJavaPlugins . onAssembly ( new MaybeFromAction < T > ( run ) ) ;
public class ClickEventLinker { /** * { @ inheritDoc } */ @ Override public void link ( EventLinker . Configuration config ) { } }
final Object context = config . getContext ( ) ; Set < Method > methods = config . getListenerTargets ( EventCategory . CLICK ) ; for ( final Method method : methods ) { OnClickListener onClickListener = new OnClickListener ( ) { @ Override public void onClick ( View v ) { try { if ( ! method . isAccessible ( ) ) method . setAccessible ( true ) ; Class < ? > [ ] params = method . getParameterTypes ( ) ; boolean viewArgPresent = false ; if ( params . length == 1 ) viewArgPresent = View . class . isAssignableFrom ( params [ 0 ] ) ; if ( viewArgPresent ) method . invoke ( context , v ) ; else method . invoke ( context ) ; } catch ( Exception e ) { StringBuilder builder = new StringBuilder ( ) . append ( "Invocation of " ) . append ( method . getName ( ) ) . append ( " at " ) . append ( context . getClass ( ) . getName ( ) ) . append ( " failed for event OnClick." ) ; Log . e ( getClass ( ) . getName ( ) , builder . toString ( ) , e ) ; } } } ; try { int [ ] views = method . getAnnotation ( Click . class ) . value ( ) ; for ( int id : views ) { try { if ( ContextUtils . isActivity ( context ) ) { ContextUtils . asActivity ( context ) . findViewById ( id ) . setOnClickListener ( onClickListener ) ; } else if ( ContextUtils . isFragment ( context ) ) { ContextUtils . asFragment ( context ) . getView ( ) . findViewById ( id ) . setOnClickListener ( onClickListener ) ; } else if ( ContextUtils . isSupportFragment ( context ) ) { ContextUtils . asSupportFragment ( context ) . getView ( ) . findViewById ( id ) . setOnClickListener ( onClickListener ) ; } } catch ( Exception e ) { StringBuilder builder = new StringBuilder ( ) . append ( "Click listener linking failed on method " ) . append ( method . getName ( ) ) . append ( " at " ) . append ( context . getClass ( ) . getName ( ) ) . append ( " for view with ID " ) . append ( ContextUtils . isActivity ( context ) ? ContextUtils . asActivity ( context ) . getResources ( ) . getResourceName ( id ) : ContextUtils . asFragment ( context ) . getResources ( ) . getResourceName ( id ) ) . append ( "." ) ; Log . e ( getClass ( ) . getName ( ) , builder . toString ( ) , e ) ; } } } catch ( Exception e ) { StringBuilder builder = new StringBuilder ( ) . append ( "Click listener linking failed on method " ) . append ( method . getName ( ) ) . append ( " at " ) . append ( context . getClass ( ) . getName ( ) ) . append ( "." ) ; Log . e ( getClass ( ) . getName ( ) , builder . toString ( ) , e ) ; } }
public class TopologyFactory { /** * Instantiates a new Topology instance . * @ param operatorName specified name of the operator * @ param topologyClass specified topology type * @ return Topology instance * @ throws InjectionException */ public Topology getNewInstance ( final Class < ? extends Name < String > > operatorName , final Class < ? extends Topology > topologyClass ) throws InjectionException { } }
final Injector newInjector = injector . forkInjector ( ) ; newInjector . bindVolatileParameter ( OperatorNameClass . class , operatorName ) ; return newInjector . getInstance ( topologyClass ) ;
public class DefaultSerialSessionConfig { /** * { @ inheritDoc } */ @ Override protected void doSetAll ( IoSessionConfig config ) { } }
if ( config instanceof SerialSessionConfig ) { SerialSessionConfig cfg = ( SerialSessionConfig ) config ; setInputBufferSize ( cfg . getInputBufferSize ( ) ) ; setReceiveThreshold ( cfg . getReceiveThreshold ( ) ) ; }
public class ComponentsInner { /** * Returns an Application Insights component . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the Application Insights component resource . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the ApplicationInsightsComponentInner object */ public Observable < ApplicationInsightsComponentInner > getByResourceGroupAsync ( String resourceGroupName , String resourceName ) { } }
return getByResourceGroupWithServiceResponseAsync ( resourceGroupName , resourceName ) . map ( new Func1 < ServiceResponse < ApplicationInsightsComponentInner > , ApplicationInsightsComponentInner > ( ) { @ Override public ApplicationInsightsComponentInner call ( ServiceResponse < ApplicationInsightsComponentInner > response ) { return response . body ( ) ; } } ) ;
public class PutPolicyRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( PutPolicyRequest putPolicyRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( putPolicyRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( putPolicyRequest . getPolicy ( ) , POLICY_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class GridFTPClient { /** * Create a symbolic link on the FTP server . * @ param link _ target the path to which the symbolic link should point * @ param link _ name the path of the symbolic link to create * @ throws IOException * @ throws ServerException if an error occurred . */ public void createSymbolicLink ( String link_target , String link_name ) throws IOException , ServerException { } }
String arguments = link_target . replaceAll ( " " , "%20" ) + " " + link_name ; Command cmd = new Command ( "SITE SYMLINK" , arguments ) ; try { controlChannel . execute ( cmd ) ; } catch ( UnexpectedReplyCodeException urce ) { throw ServerException . embedUnexpectedReplyCodeException ( urce ) ; } catch ( FTPReplyParseException rpe ) { throw ServerException . embedFTPReplyParseException ( rpe ) ; }
public class Cycles { /** * Find all simple cycles in a molecule . The threshold values can not be * tuned and is set at a value which will complete in reasonable time for * most molecules . To change the threshold values please use the stand - alone * { @ link AllCycles } or { @ link org . openscience . cdk . ringsearch . AllRingsFinder } . * All cycles is every possible simple cycle ( i . e . non - repeating vertices ) * in the chemical graph . As an example - all simple cycles of anthracene * includes , 3 cycles of length 6 , 2 of length 10 and 1 of length 14. * < blockquote > * < pre > * for ( IAtomContainer m : ms ) { * try { * Cycles cycles = Cycles . all ( m ) ; * IRingSet rings = cycles . toRingSet ( ) ; * } catch ( Intractable e ) { * / / handle error - note it is common that finding all simple * / / cycles in chemical graphs is intractable * < / pre > * < / blockquote > * @ return all simple cycles * @ throws Intractable the algorithm reached a limit which caused it to * abort in reasonable time * @ see # all ( ) * @ see AllCycles * @ see org . openscience . cdk . ringsearch . AllRingsFinder */ public static Cycles all ( IAtomContainer container ) throws Intractable { } }
return all ( ) . find ( container , container . getAtomCount ( ) ) ;
public class CmsMultiplexReport { /** * This searches for the first instance of a link in the internal delegate list and * returns the value of it ' s invocation . * If no such report is found an empty String will be returned . * @ see org . opencms . report . I _ CmsReport # getReportUpdate ( ) */ public String getReportUpdate ( ) { } }
for ( I_CmsReport report : m_delegates ) { if ( report . getClass ( ) . getName ( ) . toLowerCase ( ) . contains ( "html" ) ) { return report . getReportUpdate ( ) ; } } return "" ;
public class PnPInfinitesimalPlanePoseEstimation { /** * R _ v is a 3x3 matrix * R _ v = I + sin ( theta ) * [ k ] _ x + ( 1 - cos ( theta ) ) [ k ] _ x ^ 2 */ void compute_Rv ( ) { } }
double t = Math . sqrt ( v1 * v1 + v2 * v2 ) ; double s = Math . sqrt ( t * t + 1 ) ; double cosT = 1.0 / s ; double sinT = Math . sqrt ( 1 - 1.0 / ( s * s ) ) ; K_x . data [ 0 ] = 0 ; K_x . data [ 1 ] = 0 ; K_x . data [ 2 ] = v1 ; K_x . data [ 3 ] = 0 ; K_x . data [ 4 ] = 0 ; K_x . data [ 5 ] = v2 ; K_x . data [ 6 ] = - v1 ; K_x . data [ 7 ] = - v2 ; K_x . data [ 8 ] = 0 ; CommonOps_DDRM . divide ( K_x , t ) ; CommonOps_DDRM . setIdentity ( R_v ) ; CommonOps_DDRM . addEquals ( R_v , sinT , K_x ) ; CommonOps_DDRM . multAdd ( 1.0 - cosT , K_x , K_x , R_v ) ;
public class AbsQuery { /** * Order result by the given key , reversing the order . * Please do not forget to include the content type if you are requesting to order * by a field . * @ param key the key to be reversely ordered by . * @ return the calling query for chaining . * @ throws IllegalArgumentException if key is null . * @ throws IllegalArgumentException if key is empty . * @ throws IllegalStateException if key requests a field , but no content type is requested . * @ see # withContentType ( String ) */ @ SuppressWarnings ( "unchecked" ) public Query reverseOrderBy ( String key ) { } }
checkNotEmpty ( key , "Key to order by must not be empty" ) ; if ( key . startsWith ( "fields." ) && ! hasContentTypeSet ( ) ) { throw new IllegalStateException ( "\"fields.\" cannot be used without setting a content type " + "first." ) ; } this . params . put ( PARAMETER_ORDER , "-" + key ) ; return ( Query ) this ;
public class StreamEx { /** * Returns a stream where the last element is the replaced with the result * of applying the given function while the other elements are left intact . * This is a < a href = " package - summary . html # StreamOps " > quasi - intermediate * operation < / a > . * The mapper function is called at most once . It could be not called at all * if the stream is empty or there is short - circuiting operation downstream . * @ param mapper a < a * href = " package - summary . html # NonInterference " > non - interfering < / a > , * < a href = " package - summary . html # Statelessness " > stateless < / a > * function to apply to the last element * @ return the new stream * @ since 0.4.1 */ public StreamEx < T > mapLast ( Function < ? super T , ? extends T > mapper ) { } }
return supply ( new PairSpliterator . PSOfRef < > ( mapper , spliterator ( ) , false ) ) ;
public class RequestDispatcher { /** * Dispatch the request from a client * @ param jsonPath built { @ link JsonPath } instance which represents the URI sent in the request * @ param method type of the request e . g . POST , GET , PATCH * @ param parameterProvider repository method parameter provider * @ param requestBody deserialized body of the client request * @ return the response form the Katharsis */ public Response dispatchRequest ( JsonPath jsonPath , String method , Map < String , Set < String > > parameters , RepositoryMethodParameterProvider parameterProvider , Document requestBody ) { } }
try { BaseController controller = controllerRegistry . getController ( jsonPath , method ) ; ResourceInformation resourceInformation = getRequestedResource ( jsonPath ) ; QueryAdapter queryAdapter = queryAdapterBuilder . build ( resourceInformation , parameters ) ; DefaultFilterRequestContext context = new DefaultFilterRequestContext ( jsonPath , queryAdapter , parameterProvider , requestBody , method ) ; DefaultFilterChain chain = new DefaultFilterChain ( controller ) ; return chain . doFilter ( context ) ; } catch ( Exception e ) { Optional < JsonApiExceptionMapper > exceptionMapper = exceptionMapperRegistry . findMapperFor ( e . getClass ( ) ) ; if ( exceptionMapper . isPresent ( ) ) { // noinspection unchecked return exceptionMapper . get ( ) . toErrorResponse ( e ) . toResponse ( ) ; } else { logger . error ( "failed to process request" , e ) ; throw e ; } }
public class Cipher { /** * Returns an AlgorithmParameterSpec object which contains * the maximum cipher parameter value according to the * jurisdiction policy file . If JCE unlimited strength jurisdiction * policy files are installed or there is no maximum limit on the * parameters for the specified transformation in the policy file , * null will be returned . * @ param transformation the cipher transformation . * @ return an AlgorithmParameterSpec which holds the maximum * value or null . * @ exception NullPointerException if < code > transformation < / code > * is null . * @ exception NoSuchAlgorithmException if < code > transformation < / code > * is not a valid transformation , i . e . in the form of " algorithm " or * " algorithm / mode / padding " . * @ since 1.5 */ public static final AlgorithmParameterSpec getMaxAllowedParameterSpec ( String transformation ) throws NoSuchAlgorithmException { } }
// Android - changed : Remove references to CryptoPermission and throw early // if transformation = = null or isn ' t valid . // CryptoPermission cp = getConfiguredPermission ( transformation ) ; // return cp . getAlgorithmParameterSpec ( ) ; if ( transformation == null ) { throw new NullPointerException ( "transformation == null" ) ; } // Throws NoSuchAlgorithmException if necessary . tokenizeTransformation ( transformation ) ; return null ;
public class OpenIabHelper { /** * Discovers Open Stores . * @ param listener The callback to handle the result with a list of Open Stores */ public void discoverOpenStores ( @ NotNull final OpenStoresDiscoveredListener listener ) { } }
final List < ServiceInfo > serviceInfos = queryOpenStoreServices ( ) ; final Queue < Intent > bindServiceIntents = new LinkedList < Intent > ( ) ; for ( final ServiceInfo serviceInfo : serviceInfos ) { bindServiceIntents . add ( getBindServiceIntent ( serviceInfo ) ) ; } discoverOpenStores ( listener , bindServiceIntents , new ArrayList < Appstore > ( ) ) ;
public class ProcessCache { /** * Either a specific version number can be specified , or a Smart Version can be specified which designates an allowable range . * @ see com . centurylink . mdw . model . asset . Asset # meetsVersionSpec ( String ) */ public static Process getProcessSmart ( AssetVersionSpec spec ) throws DataAccessException { } }
if ( spec . getPackageName ( ) == null ) throw new DataAccessException ( "Spec must be package-qualified: " + spec ) ; Process match = null ; String specQualifiedName = spec . getQualifiedName ( ) ; if ( specQualifiedName . endsWith ( ".proc" ) ) specQualifiedName = specQualifiedName . substring ( 0 , specQualifiedName . length ( ) - 5 ) ; for ( Process process : getAllProcesses ( ) ) { if ( specQualifiedName . equals ( process . getQualifiedName ( ) ) ) { if ( process . meetsVersionSpec ( spec . getVersion ( ) ) && ( match == null || process . getVersion ( ) > match . getVersion ( ) ) ) match = process ; } } if ( match == null ) { return null ; } else { return getProcess ( match . getId ( ) ) ; }