signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class StringUtil { /** * 根据指定的样式进行转换 * @ param str * @ param style * @ return */ public static String convertByStyle ( String str , Style style ) { } }
switch ( style ) { case camelhump : return camelhumpToUnderline ( str ) ; case uppercase : return str . toUpperCase ( ) ; case lowercase : return str . toLowerCase ( ) ; case camelhumpAndLowercase : return camelhumpToUnderline ( str ) . toLowerCase ( ) ; case camelhumpAndUppercase : return camelhumpToUnderline ( str ) . toUpperCase ( ) ; case normal : default : return str ; }
public class AtomPositionMap { /** * Calculates the number of residues of the specified chain in a given range . * Will return a negative value if the start is past the end . * @ param positionStart index of the first atom to count * @ param positionEnd index of the last atom to count * @ param startingChain Case - sensitive chain * @ return The number of atoms from A to B inclusive belonging to the given chain */ public int getLengthDirectional ( int positionStart , int positionEnd , String startingChain ) { } }
int count = getLength ( positionStart , positionEnd , startingChain ) ; if ( positionStart <= positionEnd ) { return count ; } else { return - count ; }
public class JobSchedulesImpl { /** * Updates the properties of the specified job schedule . * This replaces only the job schedule properties specified in the request . For example , if the schedule property is not specified with this request , then the Batch service will keep the existing schedule . Changes to a job schedule only impact jobs created by the schedule after the update has taken place ; currently running jobs are unaffected . * @ param jobScheduleId The ID of the job schedule to update . * @ param jobSchedulePatchParameter The parameters for the request . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponseWithHeaders } object if successful . */ public Observable < ServiceResponseWithHeaders < Void , JobSchedulePatchHeaders > > patchWithServiceResponseAsync ( String jobScheduleId , JobSchedulePatchParameter jobSchedulePatchParameter ) { } }
if ( this . client . batchUrl ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.batchUrl() is required and cannot be null." ) ; } if ( jobScheduleId == null ) { throw new IllegalArgumentException ( "Parameter jobScheduleId is required and cannot be null." ) ; } if ( jobSchedulePatchParameter == null ) { throw new IllegalArgumentException ( "Parameter jobSchedulePatchParameter is required and cannot be null." ) ; } if ( this . client . apiVersion ( ) == null ) { throw new IllegalArgumentException ( "Parameter this.client.apiVersion() is required and cannot be null." ) ; } Validator . validate ( jobSchedulePatchParameter ) ; final JobSchedulePatchOptions jobSchedulePatchOptions = null ; Integer timeout = null ; UUID clientRequestId = null ; Boolean returnClientRequestId = null ; DateTime ocpDate = null ; String ifMatch = null ; String ifNoneMatch = null ; DateTime ifModifiedSince = null ; DateTime ifUnmodifiedSince = null ; String parameterizedHost = Joiner . on ( ", " ) . join ( "{batchUrl}" , this . client . batchUrl ( ) ) ; DateTimeRfc1123 ocpDateConverted = null ; if ( ocpDate != null ) { ocpDateConverted = new DateTimeRfc1123 ( ocpDate ) ; } DateTimeRfc1123 ifModifiedSinceConverted = null ; if ( ifModifiedSince != null ) { ifModifiedSinceConverted = new DateTimeRfc1123 ( ifModifiedSince ) ; } DateTimeRfc1123 ifUnmodifiedSinceConverted = null ; if ( ifUnmodifiedSince != null ) { ifUnmodifiedSinceConverted = new DateTimeRfc1123 ( ifUnmodifiedSince ) ; } return service . patch ( jobScheduleId , jobSchedulePatchParameter , this . client . apiVersion ( ) , this . client . acceptLanguage ( ) , timeout , clientRequestId , returnClientRequestId , ocpDateConverted , ifMatch , ifNoneMatch , ifModifiedSinceConverted , ifUnmodifiedSinceConverted , parameterizedHost , this . client . userAgent ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < ServiceResponseWithHeaders < Void , JobSchedulePatchHeaders > > > ( ) { @ Override public Observable < ServiceResponseWithHeaders < Void , JobSchedulePatchHeaders > > call ( Response < ResponseBody > response ) { try { ServiceResponseWithHeaders < Void , JobSchedulePatchHeaders > clientResponse = patchDelegate ( response ) ; return Observable . just ( clientResponse ) ; } catch ( Throwable t ) { return Observable . error ( t ) ; } } } ) ;
public class PathUtils { /** * Test if a file is reached by a path . Handle symbolic links . The * test uses the canonical path of the file , and does a case sensitive * string comparison . * Ignore a leading slash of the path . * @ param file The file which is to be tested . * @ param trailingPath The path which is to be tested . * @ return True or false telling if the path reaches the file . * @ throws PrivilegedActionException Thrown if the caller does not * have privileges to access the file or its ascending path . */ private static boolean checkCaseSymlink ( File file , String pathToTest ) throws PrivilegedActionException { } }
// java . nio . Path . toRealPath ( LinkOption . NOFOLLOW _ LINKS ) in java 7 seems to do what // we are trying to do here // On certain platforms , i . e . iSeries , the path starts with a slash . // Remove this slash before continuing . if ( pathToTest . startsWith ( "/" ) ) pathToTest = pathToTest . substring ( 1 ) ; String [ ] splitPathToTest = pathToTest . split ( "/" ) ; File symLinkTestFile = file ; for ( int i = splitPathToTest . length - 1 ; i >= 0 ; i -- ) { File symLinkParentFile = symLinkTestFile . getParentFile ( ) ; if ( symLinkParentFile == null ) { return false ; } // If the current file isn ' t a symbolic link , make sure the case matches using the // canonical file . Otherwise get the parents list of files to compare against . if ( ! isSymbolicLink ( symLinkTestFile , symLinkParentFile ) ) { if ( ! getCanonicalFile ( symLinkTestFile ) . getName ( ) . equals ( splitPathToTest [ i ] ) ) { return false ; } } else if ( ! contains ( symLinkParentFile . list ( ) , splitPathToTest [ i ] ) ) { return false ; } symLinkTestFile = symLinkParentFile ; } return true ;
public class DefaultReportService { /** * All characters different from a standard set are deleted */ private String encodeFileName ( String fileName ) { } }
String result = Normalizer . normalize ( fileName , Normalizer . Form . NFD ) . replaceAll ( "\\s+" , "-" ) . replaceAll ( "[^A-Za-z0-9_\\-\\.]" , "" ) ; if ( result . isEmpty ( ) ) { result = "report" ; } return result ;
public class BaseSource { /** * HasNext Method . */ public boolean hasNext ( ) { } }
try { if ( ! this . parseNextLine ( ) ) { try { m_record . addNew ( ) ; } catch ( DBException e ) { e . printStackTrace ( ) ; } m_record . close ( ) ; m_reader . close ( ) ; return false ; } } catch ( IOException e ) { e . printStackTrace ( ) ; } return true ;
public class UfsInputStreamManager { /** * Releases an input stream . The input stream is closed if it ' s already expired . * @ param inputStream the input stream to release * @ throws IOException when input stream fails to close */ public void release ( InputStream inputStream ) throws IOException { } }
// for non - seekable input stream , close and return if ( ! ( inputStream instanceof CachedSeekableInputStream ) || ! CACHE_ENABLED ) { inputStream . close ( ) ; return ; } synchronized ( mFileIdToInputStreamIds ) { if ( ! mFileIdToInputStreamIds . containsKey ( ( ( CachedSeekableInputStream ) inputStream ) . getFileId ( ) ) ) { LOG . debug ( "The resource {} is already expired" , ( ( CachedSeekableInputStream ) inputStream ) . getResourceId ( ) ) ; // the cache no longer tracks this input stream inputStream . close ( ) ; return ; } UfsInputStreamIdSet resources = mFileIdToInputStreamIds . get ( ( ( CachedSeekableInputStream ) inputStream ) . getFileId ( ) ) ; if ( ! resources . release ( ( ( CachedSeekableInputStream ) inputStream ) . getResourceId ( ) ) ) { LOG . debug ( "Close the expired input stream resource of {}" , ( ( CachedSeekableInputStream ) inputStream ) . getResourceId ( ) ) ; // the input stream expired , close it inputStream . close ( ) ; } }
public class BridgeType { /** * Gets the value of the genericApplicationPropertyOfBridge property . * This accessor method returns a reference to the live list , * not a snapshot . Therefore any modification you make to the * returned list will be present inside the JAXB object . * This is why there is not a < CODE > set < / CODE > method for the genericApplicationPropertyOfBridge property . * For example , to add a new item , do as follows : * < pre > * get _ GenericApplicationPropertyOfBridge ( ) . add ( newItem ) ; * < / pre > * Objects of the following type ( s ) are allowed in the list * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } * { @ link JAXBElement } { @ code < } { @ link Object } { @ code > } */ public List < JAXBElement < Object > > get_GenericApplicationPropertyOfBridge ( ) { } }
if ( _GenericApplicationPropertyOfBridge == null ) { _GenericApplicationPropertyOfBridge = new ArrayList < JAXBElement < Object > > ( ) ; } return this . _GenericApplicationPropertyOfBridge ;
public class BeanO { /** * New for EJB 3.0 d366807.1 */ @ Override public Object lookup ( String name ) { } }
// Note : this context method is allowed from all bean methods , // except the constructor . . . which has not way to access // the context . Therefore , no ' state ' checking needs // to be performed . . . just validate the parameter . if ( name == null ) throw new IllegalArgumentException ( "null 'name' parameter." ) ; Object result = null ; final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "lookup: " + name ) ; // Previously , this method used Naming for comp / env , which tolerated the // ' name ' starting with ' java : comp / env / ' , but that is NOT part of the // name in our map . d473811 String lookupName = name ; if ( name . startsWith ( "java:comp/env/" ) ) // F743-609 remove useless null check { lookupName = name . substring ( 14 ) ; } // Rather than perform a lookup using Naming , the InjectionBinding // that was created during populateJavaNameSpace will be located // and used to obtain / create the result object , just like it // would be done for injection . d473811 InjectionBinding < ? > binding = home . beanMetaData . ivJavaColonCompEnvMap . get ( lookupName ) ; if ( binding != null ) { try { result = binding . getInjectionObject ( ) ; } catch ( InjectionException ex ) { FFDCFilter . processException ( ex , CLASS_NAME + ".lookup" , "1342" , this ) ; IllegalArgumentException iae = new IllegalArgumentException ( "Failure occurred obtaining object for " + name + " reference defined for " + home . beanMetaData . j2eeName , ex ) ; if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "lookup: " + iae ) ; throw iae ; } } else { result = container . getEJBRuntime ( ) . javaColonLookup ( name , home ) ; } if ( isTraceOn && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "lookup: " + result . getClass ( ) . getName ( ) ) ; return result ;
public class TeaLog { /** * Splits the stack trace into separate lines and extracts the template * name and line number . * @ param t * @ return the separated stack trace lines */ private TeaStackTraceLine [ ] getTeaStackTraceLines ( Throwable t ) { } }
// grab the existing stack trace StringWriter stackTraceGrabber = new StringWriter ( ) ; t . printStackTrace ( new PrintWriter ( stackTraceGrabber ) ) ; String stackTrace = stackTraceGrabber . toString ( ) ; int extensionIndex = stackTrace . lastIndexOf ( TEA_EXCEPTION ) ; boolean isTeaException = extensionIndex != - 1 ; if ( isTeaException ) { // trim off all lines after the last template exception int endIndex = stackTrace . indexOf ( '\n' , extensionIndex ) ; int endRIndex = stackTrace . indexOf ( '\r' , extensionIndex ) ; if ( endRIndex > - 1 && endRIndex < endIndex ) endIndex = endRIndex ; if ( endIndex <= 0 ) { endIndex = stackTrace . length ( ) ; } stackTrace = stackTrace . substring ( 0 , endIndex ) ; // parse each line List < TeaStackTraceLine > teaStackTraceLines = new ArrayList < TeaStackTraceLine > ( ) ; StringTokenizer tokenizer = new StringTokenizer ( stackTrace , "\n" ) ; while ( tokenizer . hasMoreElements ( ) ) { String line = ( String ) tokenizer . nextElement ( ) ; if ( line . indexOf ( TEA_EXCEPTION ) != - 1 ) { /* TODO : make sure this works for lines that don ' t have line numbers . Look in ESPN logs for examples . at org . teatrove . teaservlet . template . schedule . substitute ( schedule . tea : 78) at org . teatrove . teaservlet . template . shell . story . frame . substitute ( shell / story / frame . tea ) */ String tempLine = line ; int bracket = tempLine . indexOf ( '(' ) ; tempLine = tempLine . substring ( bracket + 1 ) ; bracket = tempLine . indexOf ( ')' ) ; tempLine = tempLine . substring ( 0 , bracket ) ; int colonIndex = tempLine . indexOf ( ':' ) ; String templateName = null ; Integer lineNumber = null ; if ( colonIndex >= 0 ) { templateName = tempLine . substring ( 0 , colonIndex ) ; try { lineNumber = new Integer ( tempLine . substring ( colonIndex + 1 ) ) ; } catch ( NumberFormatException nfe ) { lineNumber = null ; } } else { templateName = tempLine ; lineNumber = null ; } teaStackTraceLines . add ( new TeaStackTraceLine ( templateName , lineNumber , line ) ) ; } else { teaStackTraceLines . add ( new TeaStackTraceLine ( null , null , line ) ) ; } } return ( TeaStackTraceLine [ ] ) teaStackTraceLines . toArray ( new TeaStackTraceLine [ teaStackTraceLines . size ( ) ] ) ; } else { return null ; }
public class InstanceSet { /** * 分割样本集 , 将样本集合中样本放随机放在两个集合 , 大小分别为i / n , ( n - i ) / n * @ param percent 分割比例 必须在0,1之间 * @ return */ public InstanceSet [ ] split ( float percent ) { } }
shuffle ( ) ; int length = this . size ( ) ; InstanceSet [ ] sets = new InstanceSet [ 2 ] ; sets [ 0 ] = new InstanceSet ( pipes , factory ) ; sets [ 1 ] = new InstanceSet ( pipes , factory ) ; int idx = ( int ) Math . round ( percent * length ) ; sets [ 0 ] . addAll ( subList ( 0 , idx ) ) ; if ( idx + 1 < length ) sets [ 1 ] . addAll ( subList ( idx + 1 , length ) ) ; return sets ;
public class IndexSet { /** * This has pool semantics , so consider making this a checkin / checkout style operation . */ public LuceneIndex current ( ) { } }
// Reopen index searcher if necessary . IndexSearcher oldSearcher = index . searcher . get ( ) ; try { // Guarded lock , to prevent us from unnecessarily synchronizing every time . if ( oldSearcher == null ) { synchronized ( this ) { if ( ( oldSearcher = index . searcher . get ( ) ) == null ) { IndexSearcher newSearcher = new IndexSearcher ( index . directory ) ; index . searcher . compareAndSet ( null , newSearcher ) ; } } // Yes it ' s open , but is it stale ? } else { DirectoryReader directoryReader = DirectoryReader . openIfChanged ( index . directory ) ; if ( null != directoryReader ) { index . directory = directoryReader ; synchronized ( this ) { IndexSearcher newSearcher ; try { newSearcher = new IndexSearcher ( directoryReader ) ; } catch ( AlreadyClosedException e ) { log . warning ( "Old index reader could not be reopened. Opening a new one..." ) ; newSearcher = new IndexSearcher ( DirectoryReader . open ( index . writer , true ) ) ; } if ( ! index . searcher . compareAndSet ( oldSearcher , newSearcher ) ) { // Someone beat us to it . No worries . log . finest ( "Another searcher was already open for this index. Nothing to do." ) ; } } } } } catch ( IOException e ) { throw new RuntimeException ( e ) ; } return index ;
public class IntervalTagger { /** * reads in heideltime ' s resource files . * @ throws ResourceInitializationException */ private void readResources ( ResourceMap hmResourcesRules ) throws ResourceInitializationException { } }
Pattern paReadRules = Pattern . compile ( "RULENAME=\"(.*?)\",EXTRACTION=\"(.*?)\",NORM_VALUE=\"(.*?)\"(.*)" ) ; Pattern paVariable = Pattern . compile ( "%(re[a-zA-Z0-9]*)" ) ; // read normalization data InputStream is = null ; InputStreamReader isr = null ; BufferedReader br = null ; try { for ( String resource : hmResourcesRules . keySet ( ) ) { is = hmResourcesRules . getInputStream ( resource ) ; isr = new InputStreamReader ( is , "UTF-8" ) ; br = new BufferedReader ( isr ) ; Logger . printDetail ( component , "Adding rule resource: " + resource ) ; for ( String line ; ( line = br . readLine ( ) ) != null ; ) { if ( line . startsWith ( "//" ) || line . equals ( "" ) ) { continue ; } Logger . printDetail ( "DEBUGGING: reading rules..." + line ) ; // check each line for the name , extraction , and normalization part for ( MatchResult r : Toolbox . findMatches ( paReadRules , line ) ) { String rule_name = r . group ( 1 ) ; String rule_extraction = r . group ( 2 ) ; String rule_normalization = r . group ( 3 ) ; // RULE EXTRACTION PARTS ARE TRANSLATED INTO REGULAR EXPRESSSIONS / / // create pattern for rule extraction part RePatternManager rpm = RePatternManager . getInstance ( language , false ) ; for ( MatchResult mr : Toolbox . findMatches ( paVariable , rule_extraction ) ) { Logger . printDetail ( "DEBUGGING: replacing patterns..." + mr . group ( ) ) ; if ( ! ( rpm . containsKey ( mr . group ( 1 ) ) ) ) { Logger . printError ( "Error creating rule:" + rule_name ) ; Logger . printError ( "The following pattern used in this rule does not exist, does it? %" + mr . group ( 1 ) ) ; System . exit ( - 1 ) ; } rule_extraction = rule_extraction . replaceAll ( "%" + mr . group ( 1 ) , rpm . get ( mr . group ( 1 ) ) ) ; } rule_extraction = rule_extraction . replaceAll ( " " , "[\\\\s]+" ) ; Pattern pattern = null ; try { pattern = Pattern . compile ( rule_extraction ) ; } catch ( java . util . regex . PatternSyntaxException e ) { Logger . printError ( "Compiling rules resulted in errors." ) ; Logger . printError ( "Problematic rule is " + rule_name ) ; Logger . printError ( "Cannot compile pattern: " + rule_extraction ) ; e . printStackTrace ( ) ; System . exit ( - 1 ) ; } // READ INTERVAL RULES AND MAKE THEM AVAILABLE / / if ( resource . equals ( "intervalrules" ) ) { hmIntervalPattern . put ( pattern , rule_name ) ; hmIntervalNormalization . put ( rule_name , rule_normalization ) ; } } } } } catch ( IOException e ) { e . printStackTrace ( ) ; throw new ResourceInitializationException ( ) ; } finally { try { if ( br != null ) { br . close ( ) ; } if ( isr != null ) { isr . close ( ) ; } if ( is != null ) { is . close ( ) ; } } catch ( Exception e ) { e . printStackTrace ( ) ; } }
public class ProgramEvaluator { /** * Compile the program */ public void compileProgram ( ) { } }
program . inputs ( ) . forEach ( var -> var . calculateFuzzySpace ( ) ) ; program . outputs ( ) . forEach ( var -> var . calculateFuzzySpace ( ) ) ; program . rules ( ) . forEach ( rule -> { updateMemberReferenceCount ( rule . getCondition ( ) ) ; rule . assignmentMembers ( ) . forEach ( member -> member . incrReferenceCount ( ) ) ; } ) ;
public class ConfigSupport { /** * Convert duration to an representation accepted by Configuration parser . * @ param duration A duration . * @ return A StringBuilder with the string representation of the duration . */ public static StringBuilder durationConfigString ( Duration duration ) { } }
Duration remainder = duration ; long days = remainder . getStandardDays ( ) ; remainder = remainder . minus ( Duration . standardDays ( days ) ) ; long hours = remainder . getStandardHours ( ) ; remainder = remainder . minus ( Duration . standardHours ( hours ) ) ; long minutes = remainder . getStandardMinutes ( ) ; remainder = remainder . minus ( Duration . standardMinutes ( minutes ) ) ; long seconds = remainder . getStandardSeconds ( ) ; remainder = remainder . minus ( Duration . standardSeconds ( seconds ) ) ; if ( ! remainder . isEqual ( Duration . ZERO ) ) Logger . getLogger ( ConfigSupport . class . getName ( ) ) . log ( Level . WARNING , "Duration is more precise than configuration will handle: {0}, dropping remainder: {1}" , new Object [ ] { duration , remainder } ) ; StringBuilder result = new StringBuilder ( ) ; if ( days != 0 ) { if ( result . length ( ) != 0 ) result . append ( ' ' ) ; result . append ( days ) . append ( 'd' ) ; } if ( hours != 0 ) { if ( result . length ( ) != 0 ) result . append ( ' ' ) ; result . append ( hours ) . append ( 'h' ) ; } if ( minutes != 0 ) { if ( result . length ( ) != 0 ) result . append ( ' ' ) ; result . append ( minutes ) . append ( 'm' ) ; } if ( result . length ( ) == 0 || seconds != 0 ) { if ( result . length ( ) != 0 ) result . append ( ' ' ) ; result . append ( seconds ) . append ( 's' ) ; } return result ;
public class Radar { /** * Defines the position of the center of the radar by the * coordinates of the given point of interest ( poi ) object * @ param NEW _ LOCATION */ public void setMyLocation ( final Poi NEW_LOCATION ) { } }
this . MY_LOCATION . setLocation ( NEW_LOCATION . getLocation ( ) ) ; checkForBlips ( ) ; init ( getInnerBounds ( ) . width , getInnerBounds ( ) . height ) ; repaint ( ) ;
public class AddVertexMutation { /** * Mutates the list of vertices for a given polygon by adding a new random point . * Whether or not a point is actually added is determined by the configured mutation probability . * @ param vertices A list of the points that make up the polygon . * @ param rng A source of randomness . * @ return A mutated list of points . */ @ Override protected List < Point > mutateVertices ( List < Point > vertices , Random rng ) { } }
// A single point is added with the configured probability , unless // we already have the maximum permitted number of points . if ( vertices . size ( ) < MAX_VERTEX_COUNT && getMutationProbability ( ) . nextValue ( ) . nextEvent ( rng ) ) { List < Point > newVertices = new ArrayList < Point > ( vertices ) ; newVertices . add ( rng . nextInt ( newVertices . size ( ) ) , new Point ( rng . nextInt ( getCanvasSize ( ) . width ) , rng . nextInt ( getCanvasSize ( ) . height ) ) ) ; return newVertices ; } else // Nothing changed . { return vertices ; }
public class JsonInput { /** * accepts a comma or object end */ JsonEvent acceptObjectSeparator ( ) throws IOException { } }
JsonEvent event = readEvent ( ) ; if ( event == JsonEvent . COMMA ) { return readEvent ( ) ; // leniently allow comma before objectEnd } else { return ensureEvent ( event , JsonEvent . OBJECT_END ) ; }
public class ServletContextResourceReaderHandler { /** * ( non - Javadoc ) * @ see * net . jawr . web . resource . handler . reader . ResourceReaderHandler # getResource ( * java . lang . String ) */ @ Override public Reader getResource ( String resourceName ) throws ResourceNotFoundException { } }
return getResource ( null , resourceName , false ) ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcCurveStyle ( ) { } }
if ( ifcCurveStyleEClass == null ) { ifcCurveStyleEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 164 ) ; } return ifcCurveStyleEClass ;
public class AbstractFramedStreamSourceChannel { /** * Called when this stream is no longer valid . Reads from the stream will result * in an exception . */ protected void markStreamBroken ( ) { } }
if ( anyAreSet ( state , STATE_STREAM_BROKEN ) ) { return ; } synchronized ( lock ) { state |= STATE_STREAM_BROKEN ; PooledByteBuffer data = this . data ; if ( data != null ) { try { data . close ( ) ; // may have been closed by the read thread } catch ( Throwable e ) { // ignore } this . data = null ; } for ( FrameData frame : pendingFrameData ) { frame . frameData . close ( ) ; } pendingFrameData . clear ( ) ; if ( isReadResumed ( ) ) { resumeReadsInternal ( true ) ; } if ( waiters > 0 ) { lock . notifyAll ( ) ; } }
public class DatenFilm { public DatenFilm getCopy ( ) { } }
DatenFilm ret = new DatenFilm ( ) ; System . arraycopy ( this . arr , 0 , ret . arr , 0 , arr . length ) ; ret . datumFilm = this . datumFilm ; ret . nr = this . nr ; ret . dateigroesseL = this . dateigroesseL ; ret . dauerL = this . dauerL ; ret . abo = this . abo ; return ret ;
public class ServiceIdMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ServiceId serviceId , ProtocolMarshaller protocolMarshaller ) { } }
if ( serviceId == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( serviceId . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( serviceId . getNames ( ) , NAMES_BINDING ) ; protocolMarshaller . marshall ( serviceId . getAccountId ( ) , ACCOUNTID_BINDING ) ; protocolMarshaller . marshall ( serviceId . getType ( ) , TYPE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LauncherModel { /** * Compute the force vector depending of the target . * @ param vector The initial vector used for launch . * @ param target The target reference . * @ return The computed force to reach target . */ private Force computeVector ( Force vector , Localizable target ) { } }
final double sx = localizable . getX ( ) ; final double sy = localizable . getY ( ) ; double dx = target . getX ( ) ; double dy = target . getY ( ) ; if ( target instanceof Transformable ) { final Transformable transformable = ( Transformable ) target ; final double ray = UtilMath . getDistance ( localizable . getX ( ) , localizable . getY ( ) , target . getX ( ) , target . getY ( ) ) ; dx += ( int ) ( ( target . getX ( ) - transformable . getOldX ( ) ) / vector . getDirectionHorizontal ( ) * ray ) ; dy += ( int ) ( ( target . getY ( ) - transformable . getOldY ( ) ) / vector . getDirectionVertical ( ) * ray ) ; } final double dist = Math . max ( Math . abs ( sx - dx ) , Math . abs ( sy - dy ) ) ; final double vecX = ( dx - sx ) / dist * vector . getDirectionHorizontal ( ) ; final double vecY = ( dy - sy ) / dist * vector . getDirectionVertical ( ) ; final Force force = new Force ( vector ) ; force . setDestination ( vecX , vecY ) ; return force ;
public class IntervalCollection { /** * / * [ deutsch ] * < p > Sucht die L & uuml ; cken mit allen Zeitpunkten , die nicht zu irgendeinem * Intervall dieser Instanz geh & ouml ; ren . < / p > * < p > < img src = " doc - files / withGaps . jpg " alt = " withGaps " > < / p > * @ return new interval collection containing the inner gaps between * the own intervals while this instance remains unaffected * @ since 2.0 */ public IntervalCollection < T > withGaps ( ) { } }
int len = this . intervals . size ( ) ; if ( len == 0 ) { return this ; } else if ( len == 1 ) { List < ChronoInterval < T > > zero = Collections . emptyList ( ) ; return this . create ( zero ) ; } List < ChronoInterval < T > > gaps = new ArrayList < > ( ) ; T previous = null ; for ( int i = 0 , n = len - 1 ; i < n ; i ++ ) { ChronoInterval < T > current = this . intervals . get ( i ) ; if ( current . getEnd ( ) . isInfinite ( ) ) { break ; } T gapStart = current . getEnd ( ) . getTemporal ( ) ; if ( current . getEnd ( ) . isClosed ( ) ) { gapStart = this . getTimeLine ( ) . stepForward ( gapStart ) ; if ( gapStart == null ) { break ; } } if ( ( previous == null ) || this . isAfter ( gapStart , previous ) ) { previous = gapStart ; } else { gapStart = previous ; } T gapEnd = this . intervals . get ( i + 1 ) . getStart ( ) . getTemporal ( ) ; if ( ( gapEnd == null ) || ! this . isAfter ( gapEnd , gapStart ) ) { continue ; } IntervalEdge edge = IntervalEdge . OPEN ; if ( this . isCalendrical ( ) ) { edge = IntervalEdge . CLOSED ; gapEnd = this . getTimeLine ( ) . stepBackwards ( gapEnd ) ; if ( gapEnd == null ) { continue ; } } Boundary < T > s = Boundary . ofClosed ( gapStart ) ; Boundary < T > e = Boundary . of ( edge , gapEnd ) ; gaps . add ( this . newInterval ( s , e ) ) ; } return this . create ( gaps ) ;
public class Gen { /** * Append value ( on tos ) to string buffer ( on tos - 1 ) . */ void appendString ( JCTree tree ) { } }
Type t = tree . type . baseType ( ) ; if ( ! t . isPrimitive ( ) && t . tsym != syms . stringType . tsym ) { t = syms . objectType ; } items . makeMemberItem ( getStringBufferAppend ( tree , t ) , false ) . invoke ( ) ;
public class FilterOddChars { /** * This function returns a string after removing characters at even indices from the input string . * Args : * input _ string ( String ) : The original string . * Returns : * String : The resultant string with characters at even indices removed . * Examples : * filterOddChars ( ' python ' ) * ' pto ' * filterOddChars ( ' program ' ) * ' porm ' * filterOddChars ( ' language ' ) * ' lnug ' */ public static String filterOddChars ( String inputString ) { } }
StringBuilder filteredString = new StringBuilder ( ) ; for ( int index = 0 ; index < inputString . length ( ) ; index ++ ) { // Indexes are 0 - based , so , characters at even indices will have odd index + 1 if ( index % 2 == 0 ) { filteredString . append ( inputString . charAt ( index ) ) ; } } return filteredString . toString ( ) ;
public class LiveDataSeries { /** * Adds a javascript parameter that will be passed into the * { @ link # update ( LiveDataUpdateEvent ) } method . * @ param parameterName * the name of the parameter * @ param javascriptExpression * a javascript expression . The value this expression evaluates to * will be transmitted to the server via AJAX and will be passed into * update ( LiveDataUpdateEvent ) . The javascript expression * may be a function call or a literal . If it is a literal string , * you have to surround the string with single quotes . Don ' t use * double quotes in an expression since they are not escaped correctly . * @ return this object for chaining */ public LiveDataSeries addJavaScriptParameter ( final String parameterName , final String javascriptExpression ) { } }
this . javascriptParameters . put ( parameterName , javascriptExpression ) ; return this ;
public class ClassLocator { /** * Lists all packages it can find in the classpath . * @ return a list with all the found packages */ public List < String > findPackages ( ) { } }
List < String > result ; Iterator < String > packages ; result = new ArrayList < > ( ) ; packages = m_Cache . packages ( ) ; while ( packages . hasNext ( ) ) result . add ( packages . next ( ) ) ; Collections . sort ( result , new StringCompare ( ) ) ; return result ;
public class AppServiceEnvironmentsInner { /** * Move an App Service Environment to a different VNET . * Move an App Service Environment to a different VNET . * @ param resourceGroupName Name of the resource group to which the resource belongs . * @ param name Name of the App Service Environment . * @ param vnetInfo Details for the new virtual network . * @ param serviceCallback the async ServiceCallback to handle successful and failed responses . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceFuture } object */ public ServiceFuture < List < SiteInner > > changeVnetAsync ( final String resourceGroupName , final String name , final VirtualNetworkProfile vnetInfo , final ListOperationCallback < SiteInner > serviceCallback ) { } }
return AzureServiceFuture . fromPageResponse ( changeVnetSinglePageAsync ( resourceGroupName , name , vnetInfo ) , new Func1 < String , Observable < ServiceResponse < Page < SiteInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SiteInner > > > call ( String nextPageLink ) { return changeVnetNextSinglePageAsync ( nextPageLink ) ; } } , serviceCallback ) ;
public class PreferenceFragment { /** * Returns , whether the title of the example dialog should be shown , or not . * @ return True , if the title should be shown , false otherwise */ private boolean shouldTitleBeShown ( ) { } }
SharedPreferences sharedPreferences = PreferenceManager . getDefaultSharedPreferences ( getActivity ( ) ) ; String key = getString ( R . string . show_dialog_title_preference_key ) ; boolean defaultValue = getResources ( ) . getBoolean ( R . bool . show_dialog_title_preference_default_value ) ; return sharedPreferences . getBoolean ( key , defaultValue ) ;
public class DroolsValidator { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public boolean validateOnExitScriptType ( OnExitScriptType onExitScriptType , DiagnosticChain diagnostics , Map < Object , Object > context ) { } }
return validate_EveryDefaultConstraint ( onExitScriptType , diagnostics , context ) ;
public class Telefonnummer { /** * Gibt den String nach DIN 5008 aus . Die Nummern werden dabei * funktionsbezogen durch Leerzeichen und die Durchwahl mit Bindestrich * abgetrennt . * @ return z . B . " + 49 30 12345-67 " bzw . " 030 12345-67" */ public String toDinString ( ) { } }
Optional < String > laenderkennzahl = getLaenderkennzahl ( ) ; return laenderkennzahl . map ( s -> s + " " + getVorwahl ( ) . substring ( 1 ) + " " + getRufnummer ( ) ) . orElseGet ( ( ) -> getVorwahl ( ) + " " + getRufnummer ( ) ) ;
public class StreamSegmentMetadata { /** * region UpdateableSegmentMetadata Implementation */ @ Override public synchronized void setStorageLength ( long value ) { } }
Exceptions . checkArgument ( value >= 0 , "value" , "Storage Length must be a non-negative number." ) ; Exceptions . checkArgument ( value >= this . storageLength , "value" , "New Storage Length cannot be smaller than the previous one." ) ; log . trace ( "{}: StorageLength changed from {} to {}." , this . traceObjectId , this . storageLength , value ) ; this . storageLength = value ;
public class Lists { /** * Returns every possible list that can be formed by choosing one element * from each of the given lists in order ; the " n - ary * < a href = " http : / / en . wikipedia . org / wiki / Cartesian _ product " > Cartesian * product < / a > " of the lists . For example : < pre > { @ code * Lists . cartesianProduct ( ImmutableList . of ( * ImmutableList . of ( 1 , 2 ) , * ImmutableList . of ( " A " , " B " , " C " ) ) ) } < / pre > * < p > returns a list containing six lists in the following order : * < ul > * < li > { @ code ImmutableList . of ( 1 , " A " ) } * < li > { @ code ImmutableList . of ( 1 , " B " ) } * < li > { @ code ImmutableList . of ( 1 , " C " ) } * < li > { @ code ImmutableList . of ( 2 , " A " ) } * < li > { @ code ImmutableList . of ( 2 , " B " ) } * < li > { @ code ImmutableList . of ( 2 , " C " ) } * < / ul > * < p > The result is guaranteed to be in the " traditional " , lexicographical * order for Cartesian products that you would get from nesting for loops : * < pre > { @ code * for ( B b0 : lists . get ( 0 ) ) { * for ( B b1 : lists . get ( 1 ) ) { * ImmutableList < B > tuple = ImmutableList . of ( b0 , b1 , . . . ) ; * / / operate on tuple * } } < / pre > * < p > Note that if any input list is empty , the Cartesian product will also be * empty . If no lists at all are provided ( an empty list ) , the resulting * Cartesian product has one element , an empty list ( counter - intuitive , but * mathematically consistent ) . * < p > < i > Performance notes : < / i > while the cartesian product of lists of size * { @ code m , n , p } is a list of size { @ code m x n x p } , its actual memory * consumption is much smaller . When the cartesian product is constructed , the * input lists are merely copied . Only as the resulting list is iterated are * the individual lists created , and these are not retained after iteration . * @ param lists the lists to choose elements from , in the order that * the elements chosen from those lists should appear in the resulting * lists * @ param < B > any common base class shared by all axes ( often just { @ link * Object } ) * @ return the Cartesian product , as an immutable list containing immutable * lists * @ throws IllegalArgumentException if the size of the cartesian product would * be greater than { @ link Integer # MAX _ VALUE } * @ throws NullPointerException if { @ code lists } , any one of the { @ code lists } , * or any element of a provided list is null */ static < B > List < List < B > > cartesianProduct ( List < ? extends List < ? extends B > > lists ) { } }
return CartesianList . create ( lists ) ;
public class CmsSmallElementsHandler { /** * Checks if a given widget counts as ' small ' . < p > * @ param widget the widget to check * @ return true if the widget is small */ public static boolean isSmall ( Widget widget ) { } }
assert widget . isAttached ( ) ; return ( CmsPositionBean . generatePositionInfo ( widget . getElement ( ) ) . getHeight ( ) < NECESSARY_HEIGHT ) && ( CmsPositionBean . getBoundingClientRect ( widget . getElement ( ) ) . getHeight ( ) < NECESSARY_HEIGHT ) ;
public class CmsAttributeHandler { /** * Adds a new attribute value and adds the required widgets to the editor DOM . < p > * @ param value the simple value */ public void addNewAttributeValue ( String value ) { } }
// make sure not to add more values than allowed int maxOccurrence = getEntityType ( ) . getAttributeMaxOccurrence ( m_attributeName ) ; CmsEntityAttribute attribute = m_entity . getAttribute ( m_attributeName ) ; boolean mayHaveMore = ( ( attribute == null ) || ( attribute . getValueCount ( ) < maxOccurrence ) ) ; if ( mayHaveMore && getAttributeType ( ) . isSimpleType ( ) ) { I_CmsFormEditWidget widget = m_widgetService . getAttributeFormWidget ( m_attributeName ) ; m_entity . addAttributeValue ( m_attributeName , value ) ; int valueCount = m_entity . getAttribute ( m_attributeName ) . getValueCount ( ) ; String defaultValue = m_widgetService . getDefaultAttributeValue ( m_attributeName , getSimplePath ( valueCount - 1 ) ) ; CmsAttributeValueView valueView = null ; if ( ( m_attributeValueViews . size ( ) == 1 ) && ! m_attributeValueViews . get ( 0 ) . hasValue ( ) ) { valueView = m_attributeValueViews . get ( 0 ) ; valueView . setActive ( ) ; // setActive may have reset the value , so we set it again m_entity . setAttributeValue ( m_attributeName , value , valueCount - 1 ) ; valueView . getValueWidget ( ) . setValue ( value ) ; } else { valueView = new CmsAttributeValueView ( this , m_widgetService . getAttributeLabel ( m_attributeName ) , m_widgetService . getAttributeHelp ( m_attributeName ) ) ; if ( m_widgetService . isDisplaySingleLine ( m_attributeName ) ) { valueView . setCompactMode ( CmsAttributeValueView . COMPACT_MODE_SINGLE_LINE ) ; } ( ( FlowPanel ) m_attributeValueViews . get ( 0 ) . getParent ( ) ) . add ( valueView ) ; valueView . setValueWidget ( widget , value , defaultValue , true ) ; } CmsUndoRedoHandler handler = CmsUndoRedoHandler . getInstance ( ) ; if ( handler . isIntitalized ( ) ) { handler . addChange ( m_entity . getId ( ) , m_attributeName , m_entity . getAttribute ( m_attributeName ) . getValueCount ( ) - 1 , ChangeType . add ) ; } updateButtonVisisbility ( ) ; }
public class BNFHeadersImpl { /** * Increment the number of headers in storage counter by one . If this puts * it over the limit for the message , then an exception is thrown . * @ throws IllegalArgumentException if there are now too many headers */ private void incrementHeaderCounter ( ) { } }
this . numberOfHeaders ++ ; this . headerAddCount ++ ; if ( this . limitNumHeaders < this . numberOfHeaders ) { String msg = "Too many headers in storage: " + this . numberOfHeaders ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , msg ) ; } throw new IllegalArgumentException ( msg ) ; }
public class LinuxUtilizationGauger { /** * Execute " ps - eo pid , ppid , pcpu , rss , command " * @ return String [ ] which contains the execution result */ protected String [ ] getPS ( ) { } }
ShellCommandExecutor shellExecutor = new ShellCommandExecutor ( CMD ) ; try { shellExecutor . execute ( ) ; } catch ( IOException e ) { LOG . error ( StringUtils . stringifyException ( e ) ) ; return null ; } return shellExecutor . getOutput ( ) . split ( "\n" ) ;
public class CollectionUtil { /** * 将集合中的数据全排列 * @ param list 集合数据 * @ param < T > 数据类型 * @ return 全排列结果 , 例如传入 [ 1,2 ] , 返回 [ [ 1,2 ] , [ 2,1 ] ] */ public static < T > List < List < T > > permutations ( List < T > list ) { } }
if ( list == null || list . isEmpty ( ) ) { return Collections . emptyList ( ) ; } long size = MathUtil . factorial ( list . size ( ) ) ; if ( size > Integer . MAX_VALUE ) { throw new OutOfMemoryError ( "全排列结果集大小为[" + size + "],超过数组能容纳的最大结果" ) ; } List < List < T > > result = new ArrayList < > ( ( int ) size ) ; permutations ( result , list , 0 ) ; return result ;
public class Viterbi { /** * 仅仅利用了转移矩阵的 “ 维特比 ” 算法 * @ param roleTagList 观测序列 * @ param transformMatrixDictionary 转移矩阵 * @ param < E > EnumItem的具体类型 * @ return 预测结果 */ public static < E extends Enum < E > > List < E > computeEnumSimply ( List < EnumItem < E > > roleTagList , TransformMatrixDictionary < E > transformMatrixDictionary ) { } }
int length = roleTagList . size ( ) - 1 ; List < E > tagList = new LinkedList < E > ( ) ; Iterator < EnumItem < E > > iterator = roleTagList . iterator ( ) ; EnumItem < E > start = iterator . next ( ) ; E pre = start . labelMap . entrySet ( ) . iterator ( ) . next ( ) . getKey ( ) ; E perfect_tag = pre ; // 第一个是确定的 tagList . add ( pre ) ; for ( int i = 0 ; i < length ; ++ i ) { double perfect_cost = Double . MAX_VALUE ; EnumItem < E > item = iterator . next ( ) ; for ( E cur : item . labelMap . keySet ( ) ) { double now = transformMatrixDictionary . transititon_probability [ pre . ordinal ( ) ] [ cur . ordinal ( ) ] - Math . log ( ( item . getFrequency ( cur ) + 1e-8 ) / transformMatrixDictionary . getTotalFrequency ( cur ) ) ; if ( perfect_cost > now ) { perfect_cost = now ; perfect_tag = cur ; } } pre = perfect_tag ; tagList . add ( pre ) ; } return tagList ;
public class MiniCluster { /** * This method executes a job in detached mode . The method returns immediately after the job * has been added to the * @ param job The Flink job to execute * @ throws JobExecutionException Thrown if anything went amiss during initial job launch , * or if the job terminally failed . */ public void runDetached ( JobGraph job ) throws JobExecutionException , InterruptedException { } }
checkNotNull ( job , "job is null" ) ; final CompletableFuture < JobSubmissionResult > submissionFuture = submitJob ( job ) ; try { submissionFuture . get ( ) ; } catch ( ExecutionException e ) { throw new JobExecutionException ( job . getJobID ( ) , ExceptionUtils . stripExecutionException ( e ) ) ; }
public class FnLocalDate { /** * The input { @ link Date } is converted into a { @ link LocalDate } with the given * { @ link Chronology } * @ param chronology { @ link Chronology } to be used * @ return the { @ link LocalDate } created from the input and arguments */ public static final < T extends Date > Function < T , LocalDate > dateToLocalDate ( Chronology chronology ) { } }
return new DateToLocalDate < T > ( chronology ) ;
public class ValueBinder { /** * Binds to { @ code Value . int64Array ( values ) } */ public R toInt64Array ( @ Nullable Iterable < Long > values ) { } }
return handle ( Value . int64Array ( values ) ) ;
public class PubchemFingerprinter { /** * based on NCBI C implementation */ private static String base64Encode ( byte [ ] data ) { } }
char c64 [ ] = new char [ data . length * 4 / 3 + 5 ] ; for ( int i = 0 , k = 0 ; i < data . length ; i += 3 , k += 4 ) { c64 [ k + 0 ] = ( char ) ( data [ i ] >> 2 ) ; c64 [ k + 1 ] = ( char ) ( ( data [ i ] & 0x03 ) << 4 ) ; c64 [ k + 2 ] = c64 [ k + 3 ] = 64 ; if ( ( i + i ) < data . length ) { c64 [ k + 1 ] |= data [ i + 1 ] >> 4 ; c64 [ k + 2 ] = ( char ) ( ( data [ i + 1 ] & 0x0f ) << 2 ) ; } if ( ( i + 2 ) < data . length ) { c64 [ k + 2 ] |= data [ i + 2 ] >> 6 ; c64 [ k + 3 ] = ( char ) ( data [ i + 2 ] & 0x3f ) ; } for ( int j = 0 ; j < 4 ; ++ j ) { c64 [ k + j ] = BASE64_LUT . charAt ( c64 [ k + j ] ) ; } } return new String ( c64 ) ;
public class IdentityTemplateLibrary { /** * Add a created entry to the library . * @ param entry entry */ void add ( Entry < String , Point2d [ ] > entry ) { } }
if ( entry != null ) templateMap . put ( entry . getKey ( ) , entry . getValue ( ) ) ;
public class RetryPolicy { /** * Retry requests up to { @ code n } times . */ public static RetryPolicy nAttempts ( final int n ) { } }
return new RetryPolicy ( ) { @ Override public Instance createInstance ( ) { return new Instance ( ) { private int count = 0 ; @ Override public boolean shouldRetry ( GenomicsRequest genomicsRequest , IOException e ) { return count ++ < n ; } } ; } } ;
public class Message { /** * Acknowledges the message to the streaming cluster . * @ throws IOException if an I / O exception occurs */ public void ack ( ) throws IOException { } }
String ackSubject ; boolean isManualAck ; StreamingConnectionImpl sc ; // Look up subscription sub . rLock ( ) ; try { ackSubject = sub . getAckInbox ( ) ; isManualAck = sub . getOptions ( ) . isManualAcks ( ) ; sc = sub . getConnection ( ) ; } finally { sub . rUnlock ( ) ; } // Check for error conditions . if ( sc == null ) { throw new IllegalStateException ( NatsStreaming . ERR_BAD_SUBSCRIPTION ) ; } if ( ! isManualAck ) { throw new IllegalStateException ( StreamingConnectionImpl . ERR_MANUAL_ACK ) ; } // Ack here . Ack ack = Ack . newBuilder ( ) . setSubject ( getSubject ( ) ) . setSequence ( getSequence ( ) ) . build ( ) ; sc . getNatsConnection ( ) . publish ( ackSubject , ack . toByteArray ( ) ) ;
public class ThemeController { /** * Gets the singleton . * The singleton is instantiated with GWT . create ( ThemeController . class ) , so you can customize your * implementation in your module . gwt . xml * @ return the theme controller */ public static ThemeController get ( ) { } }
if ( ThemeController . instance == null ) { ThemeController . instance = GWT . create ( ThemeController . class ) ; } return ThemeController . instance ;
public class IOFactory { /** * Creates a new DataReader to read from a file . * @ param file - file to read . * @ param type - I / O type . * @ return a new DataReader instance . */ public final static DataReader createDataReader ( File file , IOType type ) { } }
if ( type == IOType . MAPPED ) { if ( file . length ( ) <= Integer . MAX_VALUE ) { return new MappedReader ( file ) ; } else { return new MultiMappedReader ( file ) ; } } else { return new ChannelReader ( file ) ; }
public class GobblinMetrics { /** * Add a { @ link List } of { @ link Tag } s to a { @ link org . apache . gobblin . configuration . State } with key { @ link # METRICS _ STATE _ CUSTOM _ TAGS } . * { @ link org . apache . gobblin . metrics . Tag } s under this key can later be parsed using the method { @ link # getCustomTagsFromState } . * @ param state { @ link org . apache . gobblin . configuration . State } state to add the tag to . * @ param tags list of { @ link Tag } s to add . */ public static void addCustomTagToState ( State state , List < ? extends Tag < ? > > tags ) { } }
for ( Tag < ? > tag : tags ) { state . appendToListProp ( METRICS_STATE_CUSTOM_TAGS , tag . toString ( ) ) ; }
public class AmazonPinpointClient { /** * Delete an APNS VoIP channel * @ param deleteApnsVoipChannelRequest * @ return Result of the DeleteApnsVoipChannel operation returned by the service . * @ throws BadRequestException * 400 response * @ throws InternalServerErrorException * 500 response * @ throws ForbiddenException * 403 response * @ throws NotFoundException * 404 response * @ throws MethodNotAllowedException * 405 response * @ throws TooManyRequestsException * 429 response * @ sample AmazonPinpoint . DeleteApnsVoipChannel * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / pinpoint - 2016-12-01 / DeleteApnsVoipChannel " target = " _ top " > AWS * API Documentation < / a > */ @ Override public DeleteApnsVoipChannelResult deleteApnsVoipChannel ( DeleteApnsVoipChannelRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteApnsVoipChannel ( request ) ;
public class EnvironmentInformation { /** * Tries to retrieve the maximum number of open file handles . This method will only work on * UNIX - based operating systems with Sun / Oracle Java versions . * < p > If the number of max open file handles cannot be determined , this method returns { @ code - 1 } . < / p > * @ return The limit of open file handles , or { @ code - 1 } , if the limit could not be determined . */ public static long getOpenFileHandlesLimit ( ) { } }
if ( OperatingSystem . isWindows ( ) ) { // getMaxFileDescriptorCount method is not available on Windows return - 1L ; } Class < ? > sunBeanClass ; try { sunBeanClass = Class . forName ( "com.sun.management.UnixOperatingSystemMXBean" ) ; } catch ( ClassNotFoundException e ) { return - 1L ; } try { Method fhLimitMethod = sunBeanClass . getMethod ( "getMaxFileDescriptorCount" ) ; Object result = fhLimitMethod . invoke ( ManagementFactory . getOperatingSystemMXBean ( ) ) ; return ( Long ) result ; } catch ( Throwable t ) { LOG . warn ( "Unexpected error when accessing file handle limit" , t ) ; return - 1L ; }
public class Transformation2D { /** * Returns TRUE if this matrix is degenerated ( does not have an inverse ) * within the given tolerance . * @ param tol * The tolerance value . */ public boolean isDegenerate ( double tol ) { } }
return Math . abs ( xx * yy - yx * xy ) <= 2 * tol * ( Math . abs ( xx * yy ) + Math . abs ( yx * xy ) ) ;
public class Units4JUtils { /** * Validates the given object by using a newly created validator . * @ param obj * Object to validate using the given scopes . * @ param scopes * Scopes or < code > null < / code > for the default scope . * @ return Constraint violations . */ public static Set < ConstraintViolation < Object > > validate ( final Object obj , final Class < ? > ... scopes ) { } }
if ( scopes == null ) { return validator ( ) . validate ( obj , Default . class ) ; } return validator ( ) . validate ( obj , scopes ) ;
public class Qcow2OverlapChecks { /** * This overrides @ JsonUnwrapped . */ @ JsonValue public Object toJsonValue ( ) { } }
if ( flags != null ) return flags ; if ( mode != null ) return mode ; return null ;
public class ServerReadyEventHandler { /** * Map configuration properties to handler object * @ param handler structure of handler class * @ param instance a handler instance */ protected void assignDataToHandler ( ServerHandlerClass handler , Object instance ) { } }
if ( getParentExtension ( ) . getConfigProperties ( ) != null ) { new ConfigPropertyDeserializer ( ) . deserialize ( handler . getPropertiesClassWrapper ( ) , instance , getParentExtension ( ) . getConfigProperties ( ) ) ; }
public class ApiOvhLicenseoffice { /** * Accounts associated to this office tenant * REST : GET / license / office / { serviceName } / user * @ param firstName [ required ] Filter the value of firstName property ( like ) * @ param licences [ required ] Filter the value of licences property ( = ) * @ param lastName [ required ] Filter the value of lastName property ( like ) * @ param activationEmail [ required ] Filter the value of activationEmail property ( like ) * @ param serviceName [ required ] The unique identifier of your Office service */ public ArrayList < String > serviceName_user_GET ( String serviceName , String activationEmail , String firstName , String lastName , OvhLicenceEnum [ ] licences ) throws IOException { } }
String qPath = "/license/office/{serviceName}/user" ; StringBuilder sb = path ( qPath , serviceName ) ; query ( sb , "activationEmail" , activationEmail ) ; query ( sb , "firstName" , firstName ) ; query ( sb , "lastName" , lastName ) ; query ( sb , "licences" , licences ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t1 ) ;
public class TECore { /** * Execute tests */ public void execute ( ) throws Exception { } }
try { TestEntry grandParent = new TestEntry ( ) ; grandParent . setType ( "Mandatory" ) ; testStack . push ( grandParent ) ; String sessionId = opts . getSessionId ( ) ; int mode = opts . getMode ( ) ; ArrayList < String > params = opts . getParams ( ) ; if ( mode == Test . RESUME_MODE ) { reexecute_test ( sessionId ) ; } else if ( mode == Test . REDO_FROM_CACHE_MODE ) { reexecute_test ( sessionId ) ; } else if ( mode == Test . RETEST_MODE ) { for ( String testPath : opts . getTestPaths ( ) ) { reexecute_test ( testPath ) ; } } else if ( mode == Test . TEST_MODE ) { String testName = opts . getTestName ( ) ; if ( testName != null ) { XdmNode contextNode = opts . getContextNode ( ) ; execute_test ( testName , params , contextNode ) ; } else { String suiteName = opts . getSuiteName ( ) ; List < String > profiles = opts . getProfiles ( ) ; if ( suiteName != null || profiles . size ( ) == 0 ) { execute_suite ( suiteName , params ) ; } if ( profiles . contains ( "*" ) ) { for ( String profile : index . getProfileKeys ( ) ) { try { execute_profile ( profile , params , false ) ; } catch ( Exception e ) { jlogger . log ( Level . WARNING , e . getMessage ( ) , e . getCause ( ) ) ; } } } else { for ( String profile : profiles ) { try { execute_profile ( profile , params , true ) ; } catch ( Exception e ) { jlogger . log ( Level . WARNING , e . getMessage ( ) , e . getCause ( ) ) ; } } } } } else { throw new Exception ( "Unsupported mode" ) ; } } finally { if ( ! web ) { SwingForm . destroy ( ) ; } if ( opts . getLogDir ( ) != null ) { // Create xml execution report file LogUtils . createFullReportLog ( opts . getLogDir ( ) . getAbsolutePath ( ) + File . separator + opts . getSessionId ( ) ) ; File resultsDir = new File ( opts . getLogDir ( ) , opts . getSessionId ( ) ) ; if ( supportHtmlReport == true ) { Map < String , String > testInputMap = new HashMap < String , String > ( ) ; testInputMap = extractTestInputs ( userInputs , opts ) ; if ( ! new File ( resultsDir , "testng" ) . exists ( ) && null != testInputMap ) { /* * Transform CTL result into EARL result , * when the CTL test is executed through the webapp . */ try { File testLog = new File ( resultsDir , "report_logs.xml" ) ; CtlEarlReporter report = new CtlEarlReporter ( ) ; if ( null != opts . getSourcesName ( ) ) { report . generateEarlReport ( resultsDir , testLog , opts . getSourcesName ( ) , testInputMap ) ; } } catch ( IOException iox ) { throw new RuntimeException ( "Failed to serialize EARL results to " + iox ) ; } } } } }
public class S3TaskClientImpl { /** * { @ inheritDoc } */ @ Override public GetSignedUrlTaskResult getSignedUrl ( String spaceId , String contentId , String resourcePrefix ) throws ContentStoreException { } }
int defaultExpire = GetSignedUrlTaskParameters . USE_DEFAULT_MINUTES_TO_EXPIRE ; return getSignedUrl ( spaceId , contentId , resourcePrefix , defaultExpire , null ) ;
public class DaliBlurDrawerToggle { /** * This will blur the view behind it and set it in * a imageview over the content with a alpha value * that corresponds to slideOffset . */ private void renderBlurLayer ( float slideOffset ) { } }
if ( enableBlur ) { if ( slideOffset == 0 || forceRedraw ) { clearBlurView ( ) ; } if ( slideOffset > 0f && blurView == null ) { if ( drawerLayout . getChildCount ( ) == 2 ) { blurView = new ImageView ( drawerLayout . getContext ( ) ) ; blurView . setLayoutParams ( new ViewGroup . LayoutParams ( ViewGroup . LayoutParams . MATCH_PARENT , ViewGroup . LayoutParams . MATCH_PARENT ) ) ; blurView . setScaleType ( ImageView . ScaleType . FIT_CENTER ) ; drawerLayout . addView ( blurView , 1 ) ; } if ( BuilderUtil . isOnUiThread ( ) ) { if ( cacheMode . equals ( CacheMode . AUTO ) || forceRedraw ) { dali . load ( drawerLayout . getChildAt ( 0 ) ) . blurRadius ( blurRadius ) . downScale ( downSample ) . noFade ( ) . error ( Dali . NO_RESID ) . concurrent ( ) . skipCache ( ) . into ( blurView ) ; forceRedraw = false ; } else { dali . load ( drawerLayout . getChildAt ( 0 ) ) . blurRadius ( blurRadius ) . downScale ( downSample ) . noFade ( ) . error ( Dali . NO_RESID ) . concurrent ( ) . into ( blurView ) ; } } } if ( slideOffset > 0f && slideOffset < 1f ) { int alpha = ( int ) Math . ceil ( ( double ) slideOffset * 255d ) ; LegacySDKUtil . setImageAlpha ( blurView , alpha ) ; } }
public class S3ObjectInputStream { /** * { @ inheritDoc } * Aborts the underlying http request without reading any more data and * closes the stream . * By default Apache { @ link HttpClient } tries to reuse http connections by * reading to the end of an attached input stream on * { @ link InputStream # close ( ) } . This is efficient from a socket pool * management perspective , but for objects with large payloads can incur * significant overhead while bytes are read from s3 and discarded . It ' s up * to clients to decide when to take the performance hit implicit in not * reusing an http connection in order to not read unnecessary information * from S3. * @ see EofSensorInputStream */ @ Override public void abort ( ) { } }
super . abort ( ) ; if ( httpRequest != null ) { httpRequest . abort ( ) ; } // The default abort ( ) implementation calls abort on the wrapped stream // if it ' s an SdkFilterInputStream ; otherwise we ' ll need to close the // stream . if ( ! ( in instanceof SdkFilterInputStream ) ) { IOUtils . closeQuietly ( in , null ) ; }
public class MarkerViewManager { /** * Add a MarkerView to the map using MarkerView and LatLng . * @ param markerView the markerView to synchronise on the map */ @ UiThread public void addMarker ( @ NonNull MarkerView markerView ) { } }
if ( mapView . isDestroyed ( ) || markers . contains ( markerView ) ) { return ; } if ( ! initialised ) { initialised = true ; mapView . addOnDidFinishRenderingFrameListener ( this ) ; } markerView . setProjection ( mapboxMap . getProjection ( ) ) ; mapView . addView ( markerView . getView ( ) ) ; markers . add ( markerView ) ;
public class HttpRequestMessageImpl { /** * Initialize this incoming HTTP request message with specific headers , * ie . ones stored in a cache perhaps . * @ param sc * @ param hdrs */ public void init ( HttpInboundServiceContext sc , BNFHeaders hdrs ) { } }
// for requests , we don ' t care about the validation setHeaderValidation ( false ) ; setOwner ( sc ) ; setBinaryParseState ( HttpInternalConstants . PARSING_BINARY_VERSION ) ; if ( null != hdrs ) { hdrs . duplicate ( this ) ; }
public class ServiceAgentManagedServiceFactory { /** * Close all the configured service agents . */ public void close ( ) { } }
LOGGER . entering ( CLASS_NAME , "close" ) ; for ( String pid : serviceAgents . keySet ( ) ) { deleted ( pid ) ; } LOGGER . exiting ( CLASS_NAME , "close" ) ;
public class MinioClient { /** * Executes given request parameters . * @ param method HTTP method . * @ param region Amazon S3 region of the bucket . * @ param bucketName Bucket name . * @ param objectName Object name in the bucket . * @ param headerMap Map of HTTP headers for the request . * @ param queryParamMap Map of HTTP query parameters of the request . * @ param body HTTP request body . * @ param length Length of HTTP request body . */ private HttpResponse execute ( Method method , String region , String bucketName , String objectName , Map < String , String > headerMap , Map < String , String > queryParamMap , Object body , int length ) throws InvalidBucketNameException , NoSuchAlgorithmException , InsufficientDataException , IOException , InvalidKeyException , NoResponseException , XmlPullParserException , ErrorResponseException , InternalException { } }
if ( headerMap != null ) { headerMap = normalizeHeaders ( headerMap ) ; } Multimap < String , String > queryParamMultiMap = null ; if ( queryParamMap != null ) { queryParamMultiMap = Multimaps . forMap ( queryParamMap ) ; } Multimap < String , String > headerMultiMap = null ; if ( headerMap != null ) { headerMultiMap = Multimaps . forMap ( headerMap ) ; } return executeReq ( method , region , bucketName , objectName , headerMultiMap , queryParamMultiMap , body , length ) ;
public class SqlgGraph { @ SuppressWarnings ( "unchecked" ) private < X extends Element > Iterable < X > elements ( boolean returnVertices , final List < RecordId > elementIds ) { } }
if ( returnVertices ) { return ( Iterable < X > ) this . traversal ( ) . V ( elementIds ) . toList ( ) ; } else { return ( Iterable < X > ) this . traversal ( ) . E ( elementIds ) . toList ( ) ; }
public class CollUtil { /** * { @ link Iterator } 转为 { @ link Iterable } * @ param < E > 元素类型 * @ param iter { @ link Iterator } * @ return { @ link Iterable } * @ see IterUtil # asIterable ( Iterator ) */ public static < E > Iterable < E > asIterable ( final Iterator < E > iter ) { } }
return IterUtil . asIterable ( iter ) ;
public class SgUtils { /** * Create a simple HTML table for the modifier matrix . This is helpful to * check if the matrix is valid . * @ return Modifier matrix HTML table . */ public static String modifierMatrixToHtml ( ) { } }
final StringBuffer sb = new StringBuffer ( ) ; sb . append ( "<table border=\"1\">\n" ) ; // Header sb . append ( "<tr>" ) ; sb . append ( "<th>&nbsp;</th>" ) ; for ( int type = FIELD ; type <= INNER_INTERFACE ; type ++ ) { sb . append ( "<th>" ) ; sb . append ( TYPE_NAMES [ type ] ) ; sb . append ( "</th>" ) ; } sb . append ( "</tr>\n" ) ; // Content for ( int modifier = ABSTRACT ; modifier <= STRICTFP ; modifier ++ ) { sb . append ( "<tr>" ) ; sb . append ( "<td>" ) ; sb . append ( MODIFIER_NAMES [ modifier ] ) ; sb . append ( "</td>" ) ; for ( int type = FIELD ; type <= INNER_INTERFACE ; type ++ ) { sb . append ( "<td>" ) ; sb . append ( MODIFIERS_MATRIX [ modifier ] [ type ] ) ; sb . append ( "</td>" ) ; } sb . append ( "</tr>\n" ) ; } sb . append ( "</table>\n" ) ; return sb . toString ( ) ;
public class PickupUrl { /** * Get Resource Url for GetPickup * @ param orderId Unique identifier of the order . * @ param pickupId Unique identifier of the pickup to remove . * @ param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object . This parameter should only be used to retrieve data . Attempting to update data using this parameter may cause data loss . * @ return String Resource Url */ public static MozuUrl getPickupUrl ( String orderId , String pickupId , String responseFields ) { } }
UrlFormatter formatter = new UrlFormatter ( "/api/commerce/orders/{orderId}/pickups/{pickupId}?responseFields={responseFields}" ) ; formatter . formatUrl ( "orderId" , orderId ) ; formatter . formatUrl ( "pickupId" , pickupId ) ; formatter . formatUrl ( "responseFields" , responseFields ) ; return new MozuUrl ( formatter . getResourceUrl ( ) , MozuUrl . UrlLocation . TENANT_POD ) ;
public class SipStandardContext { /** * ( non - Javadoc ) * @ see org . mobicents . servlet . sip . startup . SipContext # exitSipApp ( org . mobicents . servlet . sip . core . session . MobicentsSipApplicationSession , org . mobicents . servlet . sip . core . session . MobicentsSipSession ) */ public void exitSipApp ( MobicentsSipApplicationSession sipApplicationSession , MobicentsSipSession sipSession ) { } }
switch ( concurrencyControlMode ) { case SipSession : if ( sipSession != null ) { sipSession . release ( ) ; } else { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "NOT RELEASING SipSession on exit sipApplicationSession=" + sipApplicationSession + " sipSession=" + sipSession + " semaphore=null" ) ; } } break ; case SipApplicationSession : boolean wasSessionReleased = false ; SipApplicationSessionCreationThreadLocal sipApplicationSessionCreationThreadLocal = SipApplicationSessionCreationThreadLocal . getTHRef ( ) . get ( ) ; if ( sipApplicationSessionCreationThreadLocal != null ) { for ( MobicentsSipApplicationSession sipApplicationSessionAccessed : SipApplicationSessionCreationThreadLocal . getTHRef ( ) . get ( ) . getSipApplicationSessions ( ) ) { sipApplicationSessionAccessed . release ( ) ; if ( sipApplicationSessionAccessed . equals ( sipApplicationSession ) ) { wasSessionReleased = true ; } } SipApplicationSessionCreationThreadLocal . getTHRef ( ) . get ( ) . getSipApplicationSessions ( ) . clear ( ) ; SipApplicationSessionCreationThreadLocal . getTHRef ( ) . set ( null ) ; SipApplicationSessionCreationThreadLocal . getTHRef ( ) . remove ( ) ; } isManagedThread . set ( null ) ; isManagedThread . remove ( ) ; if ( ! wasSessionReleased ) { if ( sipApplicationSession != null ) { sipApplicationSession . release ( ) ; } else { if ( logger . isDebugEnabled ( ) ) { logger . debug ( "NOT RELEASING SipApplicationSession on exit sipApplicationSession=" + sipApplicationSession + " sipSession=" + sipSession + " semaphore=null" ) ; } } } break ; case None : break ; }
public class GedDocumentFileLoader { /** * Reload all of the data sets . */ public final void reloadAll ( ) { } }
final List < String > list = new ArrayList < > ( ) ; for ( final RootDocument mongo : repositoryManager . getRootDocumentRepository ( ) . findAll ( ) ) { list . add ( mongo . getDbName ( ) ) ; } reset ( ) ; for ( final String dbname : list ) { loadDocument ( dbname ) ; }
public class TypeAnalysis { /** * Compute the set of exception types that can be thrown by given basic * block . * @ param basicBlock * the basic block * @ return the set of exceptions that can be thrown by the block */ private ExceptionSet computeThrownExceptionTypes ( BasicBlock basicBlock ) throws DataflowAnalysisException { } }
ExceptionSet exceptionTypeSet = exceptionSetFactory . createExceptionSet ( ) ; InstructionHandle pei = basicBlock . getExceptionThrower ( ) ; Instruction ins = pei . getInstruction ( ) ; // Get the exceptions that BCEL knows about . // Note that all of these are unchecked . ExceptionThrower exceptionThrower = ( ExceptionThrower ) ins ; Class < ? > [ ] exceptionList = exceptionThrower . getExceptions ( ) ; for ( Class < ? > aExceptionList : exceptionList ) { exceptionTypeSet . addImplicit ( ObjectTypeFactory . getInstance ( aExceptionList . getName ( ) ) ) ; } // Assume that an Error may be thrown by any instruction . exceptionTypeSet . addImplicit ( Hierarchy . ERROR_TYPE ) ; if ( ins instanceof ATHROW ) { // For ATHROW instructions , we generate * two * blocks // for which the ATHROW is an exception thrower . // - The first , empty basic block , does the null check // - The second block , which actually contains the ATHROW , // throws the object on the top of the operand stack // We make a special case of the block containing the ATHROW , // by removing all of the implicit exceptions , // and using type information to figure out what is thrown . if ( basicBlock . containsInstruction ( pei ) ) { // This is the actual ATHROW , not the null check // and implicit exceptions . exceptionTypeSet . clear ( ) ; // The frame containing the thrown value is the start fact // for the block , because ATHROW is guaranteed to be // the only instruction in the block . TypeFrame frame = getStartFact ( basicBlock ) ; // Check whether or not the frame is valid . // Sun ' s javac sometimes emits unreachable code . // For example , it will emit code that follows a JSR // subroutine call that never returns . // If the frame is invalid , then we can just make // a conservative assumption that anything could be // thrown at this ATHROW . if ( ! frame . isValid ( ) ) { exceptionTypeSet . addExplicit ( Type . THROWABLE ) ; } else if ( frame . getStackDepth ( ) == 0 ) { throw new IllegalStateException ( "empty stack " + " thrown by " + pei + " in " + SignatureConverter . convertMethodSignature ( methodGen ) ) ; } else { Type throwType = frame . getTopValue ( ) ; if ( throwType instanceof ObjectType ) { exceptionTypeSet . addExplicit ( ( ObjectType ) throwType ) ; } else if ( throwType instanceof ExceptionObjectType ) { exceptionTypeSet . addAll ( ( ( ExceptionObjectType ) throwType ) . getExceptionSet ( ) ) ; } else { // Not sure what is being thrown here . // Be conservative . if ( DEBUG ) { System . out . println ( "Non object type " + throwType + " thrown by " + pei + " in " + SignatureConverter . convertMethodSignature ( methodGen ) ) ; } exceptionTypeSet . addExplicit ( Type . THROWABLE ) ; } } } } // If it ' s an InvokeInstruction , add declared exceptions and // RuntimeException if ( ins instanceof InvokeInstruction ) { ConstantPoolGen cpg = methodGen . getConstantPool ( ) ; InvokeInstruction inv = ( InvokeInstruction ) ins ; ObjectType [ ] declaredExceptionList = Hierarchy2 . findDeclaredExceptions ( inv , cpg ) ; if ( declaredExceptionList == null ) { // Couldn ' t find declared exceptions , // so conservatively assume it could thrown any checked // exception . if ( DEBUG ) { System . out . println ( "Couldn't find declared exceptions for " + SignatureConverter . convertMethodSignature ( inv , cpg ) ) ; } exceptionTypeSet . addExplicit ( Hierarchy . EXCEPTION_TYPE ) ; } else { for ( ObjectType aDeclaredExceptionList : declaredExceptionList ) { exceptionTypeSet . addExplicit ( aDeclaredExceptionList ) ; } } exceptionTypeSet . addImplicit ( Hierarchy . RUNTIME_EXCEPTION_TYPE ) ; } if ( DEBUG ) { System . out . println ( pei + " can throw " + exceptionTypeSet ) ; } return exceptionTypeSet ;
public class CountersManager { /** * Allocate a counter record and wrap it with a new { @ link AtomicCounter } for use . * @ param label to describe the counter . * @ param typeId for the type of counter . * @ return a newly allocated { @ link AtomicCounter } */ public AtomicCounter newCounter ( final String label , final int typeId ) { } }
return new AtomicCounter ( valuesBuffer , allocate ( label , typeId ) , this ) ;
public class CmsWidgetDialog { /** * Creates the dialog HTML for all defined widgets of the named dialog ( page ) . < p > * To get a more complex layout variation , you have to overwrite this method in your dialog class . < p > * @ param dialog the dialog ( page ) to get the HTML for * @ return the dialog HTML for all defined widgets of the named dialog ( page ) */ protected String createDialogHtml ( String dialog ) { } }
StringBuffer result = new StringBuffer ( 1024 ) ; // create table result . append ( createWidgetTableStart ( ) ) ; // show error header once if there were validation errors result . append ( createWidgetErrorHeader ( ) ) ; Iterator < CmsWidgetDialogParameter > i = getWidgets ( ) . iterator ( ) ; // iterate the type sequence while ( i . hasNext ( ) ) { // get the current widget base definition CmsWidgetDialogParameter base = i . next ( ) ; // check if the element is on the requested dialog page if ( ( dialog == null ) || dialog . equals ( base . getDialogPage ( ) ) ) { // add the HTML for the dialog element result . append ( createDialogRowHtml ( base ) ) ; } } // close table result . append ( createWidgetTableEnd ( ) ) ; return result . toString ( ) ;
public class GenericMap { /** * Returns the value to which the specified key is mapped , or null if this * map contains no mapping for the key . More formally , if this map contains * a mapping from a key k to a value v such that ( key = = null ? k = = null : * key . equals ( k ) ) , then this method returns v ; otherwise it returns null . * ( There can be at most one such mapping . ) * @ param key * @ param type * @ return the value to which the specified key is mapped , or null if this * map contains no mapping for the key */ public < T > T get ( String key , Class < T > type ) { } }
return type . cast ( grab ( key ) ) ;
public class Container { /** * Create a container initialized with a range of consecutive values * @ param start first index * @ param last last index ( range is exclusive ) * @ return a new container initialized with the specified values */ public static Container rangeOfOnes ( final int start , final int last ) { } }
final int arrayContainerOverRunThreshold = 2 ; final int cardinality = last - start ; if ( cardinality <= arrayContainerOverRunThreshold ) { return new ArrayContainer ( start , last ) ; } return new RunContainer ( start , last ) ;
public class TimeZone { /** * { @ inheritDoc } */ public final boolean useDaylightTime ( ) { } }
final ComponentList < Observance > daylights = vTimeZone . getObservances ( ) . getComponents ( Observance . DAYLIGHT ) ; return ( ! daylights . isEmpty ( ) ) ;
public class SyntacticCategory { /** * Get a syntactic category identical to this one except with all * feature values replaced by the default value . * @ return */ public SyntacticCategory getWithoutFeatures ( ) { } }
if ( isAtomic ( ) ) { return createAtomic ( value , DEFAULT_FEATURE_VALUE , - 1 ) ; } else { return createFunctional ( getDirection ( ) , returnType . getWithoutFeatures ( ) , argumentType . getWithoutFeatures ( ) ) ; }
public class DateModifier { /** * 修改日期 * @ param calendar { @ link Calendar } * @ param dateField 日期字段 , 既保留到哪个日期字段 * @ param modifyType 修改类型 , 包括舍去 、 四舍五入 、 进一等 * @ return 修改后的 { @ link Calendar } */ public static Calendar modify ( Calendar calendar , int dateField , ModifyType modifyType ) { } }
// 上下午特殊处理 if ( Calendar . AM_PM == dateField ) { boolean isAM = DateUtil . isAM ( calendar ) ; switch ( modifyType ) { case TRUNCATE : calendar . set ( Calendar . HOUR_OF_DAY , isAM ? 0 : 12 ) ; break ; case CEILING : calendar . set ( Calendar . HOUR_OF_DAY , isAM ? 11 : 23 ) ; break ; case ROUND : int min = isAM ? 0 : 12 ; int max = isAM ? 11 : 23 ; int href = ( max - min ) / 2 + 1 ; int value = calendar . get ( Calendar . HOUR_OF_DAY ) ; calendar . set ( Calendar . HOUR_OF_DAY , ( value < href ) ? min : max ) ; break ; } } // 当用户指定了无关字段时 , 降级字段 if ( ArrayUtil . contains ( ignoreFields , dateField ) ) { return modify ( calendar , dateField + 1 , modifyType ) ; } for ( int i = Calendar . MILLISECOND ; i > dateField ; i -- ) { if ( ArrayUtil . contains ( ignoreFields , i ) || Calendar . WEEK_OF_MONTH == i ) { // 忽略无关字段 ( WEEK _ OF _ MONTH ) 始终不做修改 continue ; } if ( Calendar . WEEK_OF_MONTH == dateField ) { // 在星期模式下 , 月的处理忽略之 if ( Calendar . DAY_OF_MONTH == i ) { continue ; } else if ( Calendar . DAY_OF_WEEK_IN_MONTH == i ) { // 星期模式下 , 星期几统一用DAY _ OF _ WEEK处理 i = Calendar . DAY_OF_WEEK ; } } else if ( Calendar . DAY_OF_WEEK_IN_MONTH == i ) { // 非星期模式下 , 星期处理忽略之 // 由于DAY _ OF _ WEEK忽略 , 自动降级到DAY _ OF _ WEEK _ IN _ MONTH continue ; } modifyField ( calendar , i , modifyType ) ; } return calendar ;
public class StatementAnnotationMapTable { /** * Adds a mapping of { @ code statementIndex } to * { @ code annotationValueDefinitionPairs } in the * { @ code statementAnnotationIndex } map . A { @ code statementIndex } will not * be indexed if the { @ code annotationValueDefinitionPairs } collection is * empty . * @ param statementIndex { @ link Integer } , the statement index as the key , * must not be null * @ param annotationPairs { @ link List } of { @ code Integer [ ] } , the annotation * value / definition index , which cannot be null , and if empty the * { @ code statementIndex } will not be indexed * @ throws InvalidArgument Thrown if { @ code statementIndex } or * { @ code annotationValueDefinitionPairs } is null */ public void addStatementAnnotation ( Integer statementIndex , Set < AnnotationPair > annotationPairs ) { } }
if ( statementIndex == null ) { throw new InvalidArgument ( "statementIndex is null" ) ; } if ( annotationPairs == null ) { throw new InvalidArgument ( "annotationValueDefinitionPairs is null." ) ; } if ( annotationPairs . isEmpty ( ) ) { return ; } Set < AnnotationPair > annotationValueDefinitions = statementAnnotationPairsIndex . get ( statementIndex ) ; if ( annotationValueDefinitions == null ) { statementAnnotationPairsIndex . put ( statementIndex , annotationPairs ) ; } else { annotationValueDefinitions . addAll ( annotationPairs ) ; }
public class ReferenceMap { /** * Writes this object to the given output stream . * @ param out the output stream to write to * @ throws java . io . IOException if the stream raises it */ private void writeObject ( ObjectOutputStream out ) throws IOException { } }
out . defaultWriteObject ( ) ; out . writeInt ( table . length ) ; // Have to use null - terminated list because size might shrink // during iteration for ( Iterator iter = entrySet ( ) . iterator ( ) ; iter . hasNext ( ) ; ) { Map . Entry entry = ( Map . Entry ) iter . next ( ) ; out . writeObject ( entry . getKey ( ) ) ; out . writeObject ( entry . getValue ( ) ) ; } out . writeObject ( null ) ;
public class MathExpressions { /** * Create a { @ code sign ( num ) } expression * < p > Returns the positive ( + 1 ) , zero ( 0 ) , or negative ( - 1 ) sign of num . < / p > * @ param num numeric expression * @ return sign ( num ) */ public static < A extends Number & Comparable < ? > > NumberExpression < Integer > sign ( Expression < A > num ) { } }
return Expressions . numberOperation ( Integer . class , Ops . MathOps . SIGN , num ) ;
public class Model { /** * 可以在模板中利用 Model 自身的属性参与动态生成 sql , 例如 : * select * from user where nickName = # ( nickName ) * new Account ( ) . setNickName ( " James " ) . getSqlPara ( . . . ) * 注意 : 由于 dao 对象上的 attrs 不允许读写 , 不要调用其 getSqlPara ( String ) 方法 * public SqlPara getSqlPara ( String key ) { * return getSqlPara ( key , this . attrs ) ; */ public SqlPara getSqlPara ( String key , Model model ) { } }
return getSqlPara ( key , model . attrs ) ;
public class CmsElementView { /** * Parses the edit view resource . < p > * @ param cms the cms context * @ throws Exception if parsing the resource fails */ @ SuppressWarnings ( "null" ) private void init ( CmsObject cms ) throws Exception { } }
CmsFile configFile = cms . readFile ( m_resource ) ; CmsXmlContent content = CmsXmlContentFactory . unmarshal ( cms , configFile ) ; m_title = content . getValue ( N_TITLE , CmsConfigurationReader . DEFAULT_LOCALE ) . getStringValue ( cms ) ; I_CmsXmlContentValue titleKey = content . getValue ( N_TITLE_KEY , CmsConfigurationReader . DEFAULT_LOCALE ) ; if ( ( titleKey != null ) && CmsStringUtil . isNotEmptyOrWhitespaceOnly ( titleKey . getStringValue ( cms ) ) ) { m_titleKey = titleKey . getStringValue ( cms ) ; } I_CmsXmlContentValue orderVal = content . getValue ( N_ORDER , CmsConfigurationReader . DEFAULT_LOCALE ) ; if ( orderVal != null ) { try { m_order = Integer . parseInt ( orderVal . getStringValue ( cms ) ) ; } catch ( Exception e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; m_order = DEFAULT_ORDER ; } } else { m_order = DEFAULT_ORDER ; } I_CmsXmlContentValue parentView = content . getValue ( "Parent" , CmsConfigurationReader . DEFAULT_LOCALE ) ; if ( parentView != null ) { CmsUUID parentViewId = null ; try { CmsXmlVarLinkValue elementViewValue = ( CmsXmlVarLinkValue ) parentView ; String stringValue = elementViewValue . getStringValue ( cms ) ; if ( CmsStringUtil . isEmpty ( stringValue ) ) { parentViewId = CmsUUID . getNullUUID ( ) ; } else if ( stringValue . equals ( PARENT_NONE ) ) { parentViewId = null ; } else if ( stringValue . startsWith ( CmsConfigurationReader . VIEW_SCHEME ) ) { parentViewId = new CmsUUID ( stringValue . substring ( CmsConfigurationReader . VIEW_SCHEME . length ( ) ) ) ; } else { parentViewId = elementViewValue . getLink ( cms ) . getStructureId ( ) ; } } catch ( Exception e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } m_parentViewId = parentViewId ; }
public class SRTServletRequest { @ Override public boolean authenticate ( HttpServletResponse arg0 ) throws ServletException , IOException { } }
if ( WCCustomProperties . CHECK_REQUEST_OBJECT_IN_USE ) { checkRequestObjectInUse ( ) ; } WebApp webApp = this . getDispatchContext ( ) . getWebApp ( ) ; return webApp . getCollaboratorHelper ( ) . getSecurityCollaborator ( ) . authenticate ( this , arg0 ) ;
public class ConcurrentSparqlGraphStoreManager { /** * Put ( create / replace ) a named graph of a Dataset * @ param graphUri * @ param data */ @ Override public void putGraph ( URI graphUri , Model data ) { } }
if ( graphUri == null || data == null ) return ; // Use HTTP protocol if possible if ( this . sparqlServiceEndpoint != null ) { datasetAccessor . putModel ( graphUri . toASCIIString ( ) , data ) ; } else { this . putGraphSparqlQuery ( graphUri , data ) ; } log . info ( "Graph added to store: {}" , graphUri . toASCIIString ( ) ) ;
public class Depiction { /** * Access the specified padding value or fallback to a provided * default . * @ param defaultPadding default value if the parameter is ' automatic ' * @ return padding */ double getPaddingValue ( double defaultPadding ) { } }
double padding = model . get ( RendererModel . Padding . class ) ; if ( padding == AUTOMATIC ) padding = defaultPadding ; return padding ;
public class ProtoLexer { /** * $ ANTLR start " BYTES " */ public final void mBYTES ( ) throws RecognitionException { } }
try { int _type = BYTES ; int _channel = DEFAULT_TOKEN_CHANNEL ; // com / dyuproject / protostuff / parser / ProtoLexer . g : 211:5 : ( ' bytes ' ) // com / dyuproject / protostuff / parser / ProtoLexer . g : 211:9 : ' bytes ' { match ( "bytes" ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class StatementExecutor { /** * Return a long from a raw query with String [ ] arguments . */ public long queryForLong ( DatabaseConnection databaseConnection , String query , String [ ] arguments ) throws SQLException { } }
logger . debug ( "executing raw query for long: {}" , query ) ; if ( arguments . length > 0 ) { // need to do the ( Object ) cast to force args to be a single object logger . trace ( "query arguments: {}" , ( Object ) arguments ) ; } CompiledStatement compiledStatement = null ; DatabaseResults results = null ; try { compiledStatement = databaseConnection . compileStatement ( query , StatementType . SELECT , noFieldTypes , DatabaseConnection . DEFAULT_RESULT_FLAGS , false ) ; assignStatementArguments ( compiledStatement , arguments ) ; results = compiledStatement . runQuery ( null ) ; if ( results . first ( ) ) { return results . getLong ( 0 ) ; } else { throw new SQLException ( "No result found in queryForLong: " + query ) ; } } finally { IOUtils . closeThrowSqlException ( results , "results" ) ; IOUtils . closeThrowSqlException ( compiledStatement , "compiled statement" ) ; }
public class DFSClient { /** * Append to an existing HDFS file . * @ param src file name * @ param buffersize buffer size * @ param progress for reporting write - progress * @ return an output stream for writing into the file * @ throws IOException * @ see ClientProtocol # append ( String , String ) */ OutputStream append ( String src , int buffersize , Progressable progress ) throws IOException { } }
checkOpen ( ) ; clearFileStatusCache ( ) ; FileStatus stat = null ; LocatedBlock lastBlock = null ; boolean success = false ; try { stat = getFileInfo ( src ) ; if ( namenodeProtocolProxy != null && namenodeProtocolProxy . isMethodSupported ( "appendAndFetchOldGS" , String . class , String . class ) ) { LocatedBlockWithOldGS loc = namenode . appendAndFetchOldGS ( src , clientName ) ; lastBlock = loc ; if ( loc != null ) { updateNamespaceIdIfNeeded ( loc . getNamespaceID ( ) ) ; updateDataTransferProtocolVersionIfNeeded ( loc . getDataProtocolVersion ( ) ) ; getNewNameNodeIfNeeded ( loc . getMethodFingerPrint ( ) ) ; } } else if ( namenodeProtocolProxy != null && dataTransferVersion >= DataTransferProtocol . APPEND_BLOCK_VERSION ) { // fail the request if the data transfer version support the new append // protocol , but the namenode method is not supported . // This should not happen unless there is a bug . throw new IOException ( "DataTransferVersion " + dataTransferVersion + "requires the method appendAndFetchOldGS is supported in Namenode" ) ; } else if ( namenodeProtocolProxy != null && namenodeProtocolProxy . isMethodSupported ( "appendAndFetchMetaInfo" , String . class , String . class ) ) { LocatedBlockWithMetaInfo loc = namenode . appendAndFetchMetaInfo ( src , clientName ) ; lastBlock = loc ; if ( loc != null ) { updateNamespaceIdIfNeeded ( loc . getNamespaceID ( ) ) ; updateDataTransferProtocolVersionIfNeeded ( loc . getDataProtocolVersion ( ) ) ; getNewNameNodeIfNeeded ( loc . getMethodFingerPrint ( ) ) ; } } else { lastBlock = namenode . append ( src , clientName ) ; } OutputStream result = new DFSOutputStream ( this , src , buffersize , progress , lastBlock , stat , conf . getInt ( "io.bytes.per.checksum" , 512 ) , namespaceId ) ; leasechecker . put ( src , result ) ; success = true ; return result ; } catch ( RemoteException re ) { throw re . unwrapRemoteException ( FileNotFoundException . class , AccessControlException . class , NSQuotaExceededException . class , DSQuotaExceededException . class ) ; } finally { if ( ! success ) { try { namenode . abandonFile ( src , clientName ) ; } catch ( RemoteException e ) { if ( e . unwrapRemoteException ( ) instanceof LeaseExpiredException ) { LOG . debug ( String . format ( "client %s attempting to abandon file %s which it does not own" , clientName , src ) , e ) ; } else { throw e ; } } } }
public class PickerUtilities { /** * isSameYearMonth , This compares two YearMonth variables to see if their values are equal . * Returns true if the values are equal , otherwise returns false . * More specifically : This returns true if both values are null ( an empty YearMonth ) . Or , this * returns true if both of the supplied YearMonths contain a YearMonth and represent the same * year and month . Otherwise this returns false . */ public static boolean isSameYearMonth ( YearMonth first , YearMonth second ) { } }
// If both values are null , return true . if ( first == null && second == null ) { return true ; } // At least one value contains a YearMonth . If the other value is null , then return false . if ( first == null || second == null ) { return false ; } // Both values contain a YearMonth . // Return true if the YearMonth are equal , otherwise return false . return first . equals ( second ) ;
public class ValidatorVault { /** * { @ code Web . service ( Class ) } validation */ public < T > void vaultClass ( Class < T > vaultClass ) { } }
if ( ! Vault . class . isAssignableFrom ( vaultClass ) ) { throw new IllegalStateException ( ) ; } Set < ? > bogusMethods ; if ( Modifier . isAbstract ( vaultClass . getModifiers ( ) ) && ( bogusMethods = getBogusMethods ( vaultClass ) ) . size ( ) > 0 ) { throw error ( "vault class '{0}' is invalid because the abstract methods '{1}' can't be generated." , vaultClass . getName ( ) , bogusMethods ) ; } TypeRef vaultRef = TypeRef . of ( vaultClass ) ; TypeRef keyRef = vaultRef . to ( Vault . class ) . param ( 0 ) ; TypeRef assetRef = vaultRef . to ( Vault . class ) . param ( 1 ) ; _validatorService . serviceClass ( assetRef . rawClass ( ) ) ; idValidation ( assetRef . rawClass ( ) , keyRef . rawClass ( ) ) ; createValidation ( vaultClass , assetRef . rawClass ( ) , keyRef . rawClass ( ) ) ; findValidation ( vaultClass , assetRef . rawClass ( ) , keyRef . rawClass ( ) ) ;
public class RemoveTagsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( RemoveTagsRequest removeTagsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( removeTagsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( removeTagsRequest . getResourceId ( ) , RESOURCEID_BINDING ) ; protocolMarshaller . marshall ( removeTagsRequest . getTagsList ( ) , TAGSLIST_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LocationInfoFilter { /** * If this event does not already contain location information , * evaluate the event against the expression . * If the expression evaluates to true , * force generation of location information by * calling getLocationInfo . * @ param event event * @ return Filter . NEUTRAL . */ public int decide ( final LoggingEvent event ) { } }
if ( expressionRule . evaluate ( event , null ) ) { event . getLocationInformation ( ) ; } return Filter . NEUTRAL ;
public class JPAEMPool { /** * Returns an EntityManager instance from the pool , or a newly created * instance if the pool is empty . < p > * If a global JTA transaction is present , the EntityManager will have * joined that transaction . < p > * @ param jtaTxExists * true if a global jta transaction exists ; otherwise false . * @ param unsynchronized * true if SynchronizationType . UNSYNCHRONIZED is requested , false if not . */ public EntityManager getEntityManager ( boolean jtaTxExists , boolean unsynchronized ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "getEntityManager : [" + ivPoolSize + "] tx = " + jtaTxExists + " unsynchronized = " + unsynchronized ) ; EntityManager em = ivPool . poll ( ) ; if ( em != null ) { synchronized ( this ) { -- ivPoolSize ; } if ( jtaTxExists && ! unsynchronized ) { em . joinTransaction ( ) ; } } else { // createEntityManager will join transaction if present and is SYNCHRONIZED . em = ivAbstractJpaComponent . getJPARuntime ( ) . createEntityManagerInstance ( ivFactory , ivProperties , unsynchronized ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "getEntityManager : [" + ivPoolSize + "] " + em ) ; return em ;
public class SVGIcon { /** * Reset the current foreground icon color to default . */ public void setForegroundIconColorDefault ( ) { } }
stopForegroundIconColorFadeAnimation ( ) ; foregroundIcon . getStyleClass ( ) . clear ( ) ; foregroundIcon . getStyleClass ( ) . add ( JFXConstants . CSS_ICON ) ; foregroundFadeIcon . setFill ( Color . TRANSPARENT ) ;
public class BatchLinkingService { /** * @ param context the current instance that owns the referenced proxy . * @ param reference the { @ link EReference } that has the proxy value . * @ param uriFragment the lazy linking fragment . * @ param monitor used to cancel type resolution * @ return the resolved object for the given context or < code > null < / code > if it couldn ' t be resolved * @ since 2.7 */ public EObject resolveBatched ( EObject context , EReference reference , String uriFragment , CancelIndicator monitor ) { } }
if ( reference . isMany ( ) ) throw new IllegalArgumentException ( "Not yet implemented for #many references" ) ; batchTypeResolver . resolveTypes ( context , monitor ) ; EObject result = ( EObject ) context . eGet ( reference , false ) ; if ( result . eIsProxy ( ) ) return null ; return result ;
public class AttributesInformation { /** * Sets the attribute information . * @ param v the new attribute information */ public void setAttributes ( Attribute [ ] v ) { } }
this . attributes = v ; this . numberAttributes = v . length ; this . indexValues = new int [ numberAttributes ] ; for ( int i = 0 ; i < numberAttributes ; i ++ ) { this . indexValues [ i ] = i ; }
public class LockedMessageEnumerationImpl { /** * Private method to determine how many messages have been seen and not unlocked or deleted . It * does this by looking at the number of non - null elements before the current item . * Note that if an item was seen but then the cursor was reset it will become unseen again * until it is viewed with nextLocked ( ) . * @ return Returns the number of seen items that have not been deleted or unlocked . */ private int getSeenMessageCount ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( this , tc , "getSeenMessageCount" ) ; int seenMsgs = 0 ; for ( int x = 0 ; x < nextIndex ; x ++ ) { if ( messages [ x ] != null ) seenMsgs ++ ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( this , tc , "getSeenMessageCount" , "" + seenMsgs ) ; return seenMsgs ;