signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class HttpHeaderMap { /** * Set the passed header as a date header . * @ param sName * Header name . May neither be < code > null < / code > nor empty . * @ param nMillis * The milliseconds to set as a date . */ public void setDateHeader ( @ Nonnull @ Nonempty final String sName , final long nMillis ) { } }
setDateHeader ( sName , PDTFactory . createZonedDateTime ( nMillis ) ) ;
public class RandomUtil { /** * Pick a random index from the array , weighted by the value of the corresponding array * element . * @ param weights an array of non - negative floats . * @ return an index into the array , or - 1 if the sum of the weights is less than or equal to * 0.0 or any individual element is negative . For example , passing in { 0.2 , 0.0 , 0.6 , 0.8} * will return : * < pre > { @ code * 0 - 1/8th of the time * 1 - never * 2 - 3/8th of the time * 3 - half of the time * } < / pre > */ public static int getWeightedIndex ( float [ ] weights , Random r ) { } }
float sum = 0.0f ; for ( float weight : weights ) { if ( weight < 0.0f ) { return - 1 ; } sum += weight ; } if ( sum <= 0.0 ) { return - 1 ; } float pick = getFloat ( sum , r ) ; for ( int ii = 0 , nn = weights . length ; ii < nn ; ii ++ ) { pick -= weights [ ii ] ; if ( pick < 0.0 ) { return ii ; } } log . warning ( "getWeightedIndex failed" , new Throwable ( ) ) ; // Impossible ! return 0 ;
public class Inferences { /** * Null if there is no escaping mode for the given < code > { print } < / code > node . */ public ImmutableList < EscapingMode > getEscapingMode ( PrintNode printNode ) { } }
// See if we have already inferred an escaping mode for the node . ImmutableList < EscapingMode > escapingModes = nodeToEscapingModes . get ( printNode ) ; if ( escapingModes != null ) { return escapingModes ; } // Look for an escaping mode in the existing directives . ImmutableList . Builder < EscapingMode > modes = ImmutableList . builder ( ) ; for ( PrintDirectiveNode directiveNode : printNode . getChildren ( ) ) { // TODO ( lukes ) : it is mostly illegal to add an escaping directive to a template as most have // been marked as internalOnly . See if this can be simplified or deleted . EscapingMode mode = EscapingMode . fromDirective ( directiveNode . getName ( ) ) ; if ( mode != null ) { modes . add ( mode ) ; } } return modes . build ( ) ;
public class JavacParser { /** * MemberReferenceSuffix = " : : " [ TypeArguments ] Ident * | " : : " [ TypeArguments ] " new " */ JCExpression memberReferenceSuffix ( JCExpression t ) { } }
int pos1 = token . pos ; accept ( COLCOL ) ; return memberReferenceSuffix ( pos1 , t ) ;
public class BundleUtils { /** * Returns a optional short array value . In other words , returns the value mapped by key if it exists and is a short array . * The bundle argument is allowed to be { @ code null } . If the bundle is null , this method returns null . * @ param bundle a bundle . If the bundle is null , this method will return null . * @ param key a key for the value . * @ return a short array value if exists , null otherwise . * @ see android . os . Bundle # getShort ( String , short ) */ @ Nullable public static short [ ] optShortArray ( @ Nullable Bundle bundle , @ Nullable String key , @ Nullable short [ ] fallback ) { } }
if ( bundle == null ) { return fallback ; } return bundle . getShortArray ( key ) ;
public class MethodUtils { /** * Sets the value of a bean property to an Object . * @ param object the bean to change * @ param setterName the property name or setter method name * @ param arg use this argument * @ throws NoSuchMethodException the no such method exception * @ throws IllegalAccessException the illegal access exception * @ throws InvocationTargetException the invocation target exception */ public static void invokeSetter ( Object object , String setterName , Object arg ) throws NoSuchMethodException , IllegalAccessException , InvocationTargetException { } }
Object [ ] args = { arg } ; invokeSetter ( object , setterName , args ) ;
public class RowIdList { /** * { @ inheritDoc } */ @ Override public boolean contains ( final Object object ) { } }
if ( object instanceof Integer ) { int index = ( Integer ) object ; return index >= startIndex && index <= endIndex ; } return false ;
public class Applications { /** * Creates an iterator yielding the result of the transformation applied by * the function on the elements of the source array . This transformation is * evaluated lazily when the resulting iterator is consumed . E . g : * < code > * transform ( [ 1,2,3 ] , toStringTransformer ) - > [ " 1 " , " 2 " , " 3 " ] * < / code > * @ param < R > the result iterator element type parameter * @ param < E > the input array element type parameter * @ param array the array where elements are fetched from * @ param function a function used to transform each element * @ return the transformed iterator */ public static < R , E > Iterator < R > transform ( E [ ] array , Function < E , R > function ) { } }
return new TransformingIterator < > ( new ArrayIterator < E > ( array ) , function ) ;
public class CompletableCallback { /** * Tries to complete this callback ; driver code should call this method once * < em > after < / em > the asynchronous operation to detect whether the * asynchronous operation has already completed or not . * @ return whether the attempt to complete was successful . */ public boolean tryComplete ( ) { } }
while ( true ) { State current = state . get ( ) ; switch ( current ) { case IDLE : { if ( state . compareAndSet ( current , State . COMPLETED ) ) return true ; break ; } case SUCCEEDED : case FAILED : { return false ; } default : { throw new IllegalStateException ( current . toString ( ) ) ; } } }
public class JsDocTokenStream { /** * Tokenizes JSDoc comments . */ @ SuppressWarnings ( "fallthrough" ) final JsDocToken getJsDocToken ( ) { } }
int c ; stringBufferTop = 0 ; for ( ; ; ) { // eat white spaces for ( ; ; ) { charno = - 1 ; c = getChar ( ) ; if ( c == EOF_CHAR ) { return JsDocToken . EOF ; } else if ( c == '\n' ) { return JsDocToken . EOL ; } else if ( ! TokenUtil . isJSSpace ( c ) ) { break ; } } switch ( c ) { // annotation , e . g . @ type or @ constructor case '@' : do { c = getChar ( ) ; if ( isAlpha ( c ) ) { addToString ( c ) ; } else { ungetChar ( c ) ; this . string = getStringFromBuffer ( ) ; stringBufferTop = 0 ; return JsDocToken . ANNOTATION ; } } while ( true ) ; case '*' : if ( matchChar ( '/' ) ) { return JsDocToken . EOC ; } else { return JsDocToken . STAR ; } case ',' : return JsDocToken . COMMA ; case '>' : return JsDocToken . RIGHT_ANGLE ; case '(' : return JsDocToken . LEFT_PAREN ; case ')' : return JsDocToken . RIGHT_PAREN ; case '{' : return JsDocToken . LEFT_CURLY ; case '}' : return JsDocToken . RIGHT_CURLY ; case '[' : return JsDocToken . LEFT_SQUARE ; case ']' : return JsDocToken . RIGHT_SQUARE ; case '?' : return JsDocToken . QMARK ; case '!' : return JsDocToken . BANG ; case ':' : return JsDocToken . COLON ; case '=' : return JsDocToken . EQUALS ; case '|' : return JsDocToken . PIPE ; case '<' : return JsDocToken . LEFT_ANGLE ; case '.' : c = getChar ( ) ; if ( c == '<' ) { return JsDocToken . LEFT_ANGLE ; } else { if ( c == '.' ) { c = getChar ( ) ; if ( c == '.' ) { return JsDocToken . ELLIPSIS ; } else { addToString ( '.' ) ; } } // we may backtrack across line boundary ungetBuffer [ ungetCursor ++ ] = c ; c = '.' ; } // fall through default : { // recognize a JsDoc string but discard last . if it is followed by // a non - JsDoc comment char , e . g . Array . < int c1 = c ; addToString ( c ) ; int c2 = getChar ( ) ; if ( ! isJSDocString ( c2 ) ) { ungetChar ( c2 ) ; this . string = getStringFromBuffer ( ) ; stringBufferTop = 0 ; return JsDocToken . STRING ; } else { do { c1 = c2 ; c2 = getChar ( ) ; if ( c1 == '.' && c2 == '<' ) { ungetChar ( c2 ) ; ungetChar ( c1 ) ; this . string = getStringFromBuffer ( ) ; stringBufferTop = 0 ; return JsDocToken . STRING ; } else { if ( isJSDocString ( c2 ) ) { addToString ( c1 ) ; } else { ungetChar ( c2 ) ; addToString ( c1 ) ; this . string = getStringFromBuffer ( ) ; stringBufferTop = 0 ; return JsDocToken . STRING ; } } } while ( true ) ; } } } }
public class SanitizedContents { /** * Converts a { @ link TrustedResourceUrlProto } into a Soy { @ link SanitizedContent } of kind * TRUSTED _ RESOURCE _ URI . */ public static SanitizedContent fromTrustedResourceUrlProto ( TrustedResourceUrlProto url ) { } }
return SanitizedContent . create ( TrustedResourceUrls . fromProto ( url ) . getTrustedResourceUrlString ( ) , ContentKind . TRUSTED_RESOURCE_URI ) ;
public class LotRoute { /** * Retries the list of comments for a lot / file * @ param id ID of the lot / file * @ return List of comments */ public List < Comment > getComment ( int id ) { } }
ClientResource resource = new ClientResource ( Route . GET_COMMENT . url ( id ) ) ; try { String result = resource . get ( ) . getText ( ) ; return mapper . readValue ( result , new TypeReference < List < Comment > > ( ) { } ) ; } catch ( IOException ex ) { LEX4JLogger . log ( Level . WARNING , "Could not retrieve lot comments correctly!" ) ; return null ; }
public class URIUtils { /** * Helper method for modifying URI authority * @ param uri * @ param newAuthority * @ return */ public static String modifyURIAuthority ( String uri , String newAuthority ) { } }
try { URI uriObj = new URI ( uri ) ; // code below modifies new authority considering also network interface syntax Pattern pattern = Pattern . compile ( NETWORK_INTERFACE_AUTHORITY ) ; Matcher matcher = pattern . matcher ( newAuthority ) ; String matchedToken = MOCK_HOST ; // if newAuthority corresponds to NetworkInterfaceURI syntax if ( matcher . find ( ) ) { matchedToken = matcher . group ( 0 ) ; newAuthority = newAuthority . replace ( matchedToken , MOCK_HOST ) ; } URI modifiedURIAuthority = URLUtils . modifyURIAuthority ( uriObj , newAuthority ) ; String uriWithModifiedAuthority = URIUtils . uriToString ( modifiedURIAuthority ) . replace ( MOCK_HOST , matchedToken ) ; return uriWithModifiedAuthority ; } catch ( URISyntaxException e ) { try { return ( new NetworkInterfaceURI ( uri ) ) . modifyURIAuthority ( newAuthority ) ; } catch ( IllegalArgumentException ne ) { throw new IllegalArgumentException ( ne . getMessage ( ) , ne ) ; } }
public class ConfigManager { /** * will throw exception if key is not found */ public void set ( final String originalKey , final Object object ) { } }
final String key = applicationName + "." + originalKey ; if ( object == null || ( object instanceof String && ( ( String ) object ) . length ( ) == 0 ) ) { // remove application . setConfig ( application . getConfig ( ) . withoutPath ( key ) ) ; } else { // create or update ConfigValue value = ConfigValueFactory . fromAnyRef ( object , "modified at runtime " ) ; application . setConfig ( application . getConfig ( ) . withValue ( key , value ) ) ; } // merge global config ( no reloading ) config = application . getConfig ( ) . withFallback ( user . getConfig ( ) ) . withFallback ( system . getConfig ( ) ) ;
public class JsMessageVisitor { /** * Returns the string value associated with a node representing a JS string or * several JS strings added together ( e . g . { @ code ' str ' } or { @ code ' s ' + ' t ' + * @ param node the node from where we extract the string * @ return String representation of the node * @ throws MalformedException if the parsed message is invalid */ private static String extractStringFromStringExprNode ( Node node ) throws MalformedException { } }
switch ( node . getToken ( ) ) { case STRING : return node . getString ( ) ; case TEMPLATELIT : if ( node . hasOneChild ( ) ) { // Cooked string can be null only for tagged template literals . // A tagged template literal would hit the default case below . return checkNotNull ( node . getFirstChild ( ) . getCookedString ( ) ) ; } else { throw new MalformedException ( "Template literals with substitutions are not allowed." , node ) ; } case ADD : StringBuilder sb = new StringBuilder ( ) ; for ( Node child : node . children ( ) ) { sb . append ( extractStringFromStringExprNode ( child ) ) ; } return sb . toString ( ) ; default : throw new MalformedException ( "STRING or ADD node expected; found: " + node . getToken ( ) , node ) ; }
public class authorizationpolicy_binding { /** * Use this API to fetch authorizationpolicy _ binding resource of given name . */ public static authorizationpolicy_binding get ( nitro_service service , String name ) throws Exception { } }
authorizationpolicy_binding obj = new authorizationpolicy_binding ( ) ; obj . set_name ( name ) ; authorizationpolicy_binding response = ( authorizationpolicy_binding ) obj . get_resource ( service ) ; return response ;
public class Bank { /** * Sets the team id . Can only be called once , otherwise it will throw an exception * @ param newId the team id of the bank */ final public void setTeamId ( int newId ) { } }
if ( teamId != - 1 ) { throw new IllegalStateException ( "Team Id cannot be modified" ) ; } SecurityManager sm = System . getSecurityManager ( ) ; if ( sm != null ) { sm . checkPermission ( new GamePermission ( "setTeamId" ) ) ; } teamId = newId ;
public class Raster { /** * Load raster from media . * @ param media The raster media ( must not be < code > null < / code > ) . * @ return The raster channels data . * @ throws LionEngineException If < code > null < / code > argument or unable to read data . */ public static Raster load ( Media media ) { } }
Check . notNull ( media ) ; final Xml root = new Xml ( media ) ; final RasterData dataRed = RasterData . load ( root , CHANNEL_RED ) ; final RasterData dataGreen = RasterData . load ( root , CHANNEL_GREEN ) ; final RasterData dataBlue = RasterData . load ( root , CHANNEL_BLUE ) ; return new Raster ( dataRed , dataGreen , dataBlue ) ;
public class TarFileInputStream { /** * readBlock ( ) and readNextHeaderBlock are the methods that USERS of this * class should use to read header blocks from the tar file . * readNextHeaderBlock continues working through the Tar File from the * current point until it finds a block with a non - 0 first byte . * @ return True if a header block was read and place at beginning of the * readBuffer array . False if EOF was encountered without finding * any blocks with first byte ! = 0 . If false is returned , we have * automatically closed the this TarFileInputStream too . * @ see # readBlock */ public boolean readNextHeaderBlock ( ) throws IOException , TarMalformatException { } }
// We read a - byte - at - a - time because there should only be 2 empty blocks // between each Tar Entry . try { while ( readStream . available ( ) > 0 ) { readBlock ( ) ; if ( readBuffer [ 0 ] != 0 ) { return true ; } } } catch ( EOFException ee ) { /* This is a work - around . * Sun Java ' s inputStream . available ( ) works like crap . * Reach this point when performing a read of a GZip stream when * . available = = 1 , which according to API Spec , should not happen . * We treat this condition exactly as if readStream . available is 0, * which it should be . */ } close ( ) ; return false ;
public class Range { /** * Sets the range from a string representation . * @ param range the start and end string */ public void setRange ( String range ) { } }
String single = range . trim ( ) ; int hyphenIndex = range . indexOf ( '-' ) ; if ( hyphenIndex > 0 ) { this . start = rangeSingle ( range . substring ( 0 , hyphenIndex ) ) ; this . end = rangeSingle ( range . substring ( hyphenIndex + 1 ) ) ; } else { int number = rangeSingle ( range ) ; if ( number >= 0 ) { // first n attributes this . start = 0 ; this . end = number ; } else { // last n attributes this . start = this . upperLimit + number > 0 ? this . upperLimit + number : 0 ; this . end = this . upperLimit - 1 ; } }
public class AWSRoboMakerClient { /** * Describes a robot application . * @ param describeRobotApplicationRequest * @ return Result of the DescribeRobotApplication operation returned by the service . * @ throws InvalidParameterException * A parameter specified in a request is not valid , is unsupported , or cannot be used . The returned message * provides an explanation of the error value . * @ throws ResourceNotFoundException * The specified resource does not exist . * @ throws ThrottlingException * AWS RoboMaker is temporarily unable to process the request . Try your call again . * @ throws InternalServerException * AWS RoboMaker experienced a service issue . Try your call again . * @ sample AWSRoboMaker . DescribeRobotApplication * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / robomaker - 2018-06-29 / DescribeRobotApplication " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeRobotApplicationResult describeRobotApplication ( DescribeRobotApplicationRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeRobotApplication ( request ) ;
public class Call { static < T1 > Flowable < Notification < CallableResultSet1 < T1 > > > createWithOneResultSet ( Single < Connection > connection , String sql , Flowable < List < Object > > parameterGroups , List < ParameterPlaceholder > parameterPlaceholders , Function < ? super ResultSet , ? extends T1 > f1 , int fetchSize ) { } }
return connection . toFlowable ( ) . flatMap ( con -> createWithOneResultSet ( con , sql , parameterGroups , parameterPlaceholders , f1 , fetchSize ) ) ;
public class CmsPointerLinkValidatorReport { /** * Performs the pointer link validation report , will be called by the JSP page . < p > * @ throws JspException if problems including sub - elements occur */ public void actionReport ( ) throws JspException { } }
// save initialized instance of this class in request attribute for included sub - elements getJsp ( ) . getRequest ( ) . setAttribute ( SESSION_WORKPLACE_CLASS , this ) ; switch ( getAction ( ) ) { case ACTION_REPORT_END : actionCloseDialog ( ) ; break ; case ACTION_REPORT_UPDATE : setParamAction ( REPORT_UPDATE ) ; getJsp ( ) . include ( FILE_REPORT_OUTPUT ) ; break ; case ACTION_REPORT_BEGIN : case ACTION_CONFIRMED : default : CmsExternalLinksValidatorThread thread = new CmsExternalLinksValidatorThread ( getCms ( ) , null ) ; thread . start ( ) ; setParamAction ( REPORT_BEGIN ) ; setParamThread ( thread . getUUID ( ) . toString ( ) ) ; Map params = new HashMap ( 1 ) ; params . put ( PARAM_CLOSELINK , CmsToolManager . linkForToolPath ( getJsp ( ) , "/linkchecking" ) ) ; getJsp ( ) . include ( FILE_REPORT_OUTPUT , null , params ) ; break ; }
public class ByteArraySegment { /** * region ArrayView Implementation */ @ Override public byte get ( int index ) { } }
Preconditions . checkElementIndex ( index , this . length , "index" ) ; return this . array [ index + this . startOffset ] ;
public class AdditiveExpression { /** * Evaluates this additive expression . Either performs numeric addition / subtraction or * String concatination . */ public Object evaluate ( ) { } }
if ( ! isCompileTimeConstant ( ) ) { return super . evaluate ( ) ; } return evaluate ( getType ( ) , getLHS ( ) . evaluate ( ) , getRHS ( ) . evaluate ( ) , getLHS ( ) . getType ( ) , getRHS ( ) . getType ( ) , isAdditive ( ) , false , ! getType ( ) . equals ( JavaTypes . STRING ( ) ) ) ;
public class SAReducerDecorator { /** * Create a new soil filled with aggregated data coming from soils set as * input parameters . * @ param fullCurrentSoil * @ param previousSoil * @ return */ public HashMap < String , String > computeSoil ( Map < String , String > fullCurrentSoil , Map < String , String > previousSoil ) { } }
HashMap < String , String > aggregatedSoil ; String fullCurrentValue ; String previousValue ; Float newValue ; newValue = 0f ; aggregatedSoil = new HashMap < String , String > ( ) ; for ( String p : allParams ) { if ( SLLB . equals ( p ) ) { newValue = ( parseFloat ( fullCurrentSoil . get ( p ) ) + parseFloat ( previousSoil . get ( p ) ) ) ; } else if ( ( ICNH4 . equals ( p ) && fullCurrentSoil . containsKey ( ICNH4 ) && previousSoil . containsKey ( ICNH4 ) ) || ICNO3 . equals ( p ) && fullCurrentSoil . containsKey ( ICNO3 ) && previousSoil . containsKey ( ICNO3 ) ) { newValue = computeInitialConditions ( p , fullCurrentSoil , previousSoil ) ; } else { fullCurrentValue = fullCurrentSoil . get ( p ) == null ? LayerReducerUtil . defaultValue ( p ) : fullCurrentSoil . get ( p ) ; previousValue = previousSoil . get ( p ) == null ? LayerReducerUtil . defaultValue ( p ) : previousSoil . get ( p ) ; newValue = ( parseFloat ( fullCurrentValue ) + parseFloat ( previousValue ) ) / 2f ; } aggregatedSoil . put ( p , newValue . toString ( ) ) ; } return aggregatedSoil ;
public class LayoutMap { /** * Removes the column value mapping for this key from this map if it is present . < p > * Returns the value to which the map previously associated the key , or { @ code null } if the map * contained no mapping for this key . The map will not contain a String mapping for the * specified key once the call returns . * @ param key key whose mapping is to be removed from the map . * @ return previous value associated with specified key , or { @ code null } if there was no mapping * for key . * @ throws NullPointerException if { @ code key } is { @ code null } . * @ see Map # remove ( Object ) */ public String columnRemove ( String key ) { } }
String resolvedKey = resolveColumnKey ( key ) ; columnMapCache . clear ( ) ; return columnMap . remove ( resolvedKey ) ;
public class BenchmarkResult { /** * Adding a dataset to a given meter and adapting the underlaying result model . * @ param meth where the result is corresponding to * @ param meter where the result is corresponding to * @ param data the data itself */ public void addData ( final BenchmarkMethod meth , final AbstractMeter meter , final double data ) { } }
final Class < ? > clazz = meth . getMethodToBench ( ) . getDeclaringClass ( ) ; if ( ! elements . containsKey ( clazz ) ) { elements . put ( clazz , new ClassResult ( clazz ) ) ; } final ClassResult clazzResult = elements . get ( clazz ) ; if ( ! clazzResult . elements . containsKey ( meth ) ) { clazzResult . elements . put ( meth , new MethodResult ( meth ) ) ; } final MethodResult methodResult = clazzResult . elements . get ( meth ) ; methodResult . addData ( meter , data ) ; clazzResult . addData ( meter , data ) ; this . addData ( meter , data ) ; for ( final AbstractOutput output : outputs ) { output . listenToResultSet ( meth , meter , data ) ; }
public class ShibbolethCompatiblePersistentIdGenerator { /** * Digest and encode with salt string . * @ param md the md * @ return the string */ protected String digestAndEncodeWithSalt ( final MessageDigest md ) { } }
val sanitizedSalt = StringUtils . replace ( salt , "\n" , " " ) ; val digested = md . digest ( sanitizedSalt . getBytes ( StandardCharsets . UTF_8 ) ) ; return EncodingUtils . encodeBase64 ( digested , false ) ;
public class AbstractRadial { /** * Enables / disables the pointer shadow * @ param POINTER _ SHADOW _ VISIBLE */ public void setPointerShadowVisible ( final boolean POINTER_SHADOW_VISIBLE ) { } }
getModel ( ) . setPointerShadowVisible ( POINTER_SHADOW_VISIBLE ) ; init ( getInnerBounds ( ) . width , getInnerBounds ( ) . height ) ; repaint ( getInnerBounds ( ) ) ;
public class ParameterFormatter { /** * This method performs a deep toString of the given Object . * Primitive arrays are converted using their respective Arrays . toString methods while * special handling is implemented for " container types " , i . e . Object [ ] , Map and Collection because those could * contain themselves . * It should be noted that neither AbstractMap . toString ( ) nor AbstractCollection . toString ( ) implement such a * behavior . They only check if the container is directly contained in itself , but not if a contained container * contains the original one . Because of that , Arrays . toString ( Object [ ] ) isn ' t safe either . * Confusing ? Just read the last paragraph again and check the respective toString ( ) implementation . * This means , in effect , that logging would produce a usable output even if an ordinary System . out . println ( o ) * would produce a relatively hard - to - debug StackOverflowError . * @ param o The object . * @ return The String representation . */ static String deepToString ( final Object o ) { } }
if ( o == null ) { return null ; } if ( o instanceof String ) { return ( String ) o ; } final StringBuilder str = new StringBuilder ( ) ; final Set < String > dejaVu = new HashSet < > ( ) ; // that ' s actually a neat name ; ) recursiveDeepToString ( o , str , dejaVu ) ; return str . toString ( ) ;
public class CorsConfig { /** * Returns the policy for the specified { @ code origin } . * @ return { @ link CorsPolicy } which allows the { @ code origin } , * { @ code null } if the { @ code origin } is not allowed in any policy . */ @ Nullable public CorsPolicy getPolicy ( String origin , PathMappingContext pathMappingContext ) { } }
requireNonNull ( origin , "origin" ) ; if ( isAnyOriginSupported ( ) ) { return Iterables . getFirst ( policies , null ) ; } final String lowerCaseOrigin = Ascii . toLowerCase ( origin ) ; final boolean isNullOrigin = CorsService . NULL_ORIGIN . equals ( lowerCaseOrigin ) ; for ( final CorsPolicy policy : policies ) { if ( isNullOrigin && policy . isNullOriginAllowed ( ) && isPathMatched ( policy , pathMappingContext ) ) { return policy ; } else if ( ! isNullOrigin && policy . origins ( ) . contains ( lowerCaseOrigin ) && isPathMatched ( policy , pathMappingContext ) ) { return policy ; } } return null ;
public class AlphabeticIndex { /** * Add a record ( name and data ) to the index . The name will be used to sort the items into buckets , and to sort * within the bucket . Two records may have the same name . When they do , the sort order is according to the order added : * the first added comes first . * @ param name * Name , such as a name * @ param data * Data , such as an address or link * @ return this , for chaining */ public AlphabeticIndex < V > addRecord ( CharSequence name , V data ) { } }
// TODO instead of invalidating , just add to unprocessed list . buckets = null ; // invalidate old bucketlist if ( inputList == null ) { inputList = new ArrayList < Record < V > > ( ) ; } inputList . add ( new Record < V > ( name , data ) ) ; return this ;
public class ComponentTag { /** * restores all replaced values and removes all attributes declared in this tag */ protected void restoreAttributes ( ) { } }
if ( replacedAttributes != null ) { for ( int scope = 0 ; scope < replacedAttributes . size ( ) ; scope ++ ) { Map < String , Object > replaced = replacedAttributes . get ( scope ) ; for ( Map . Entry < String , Object > entry : replaced . entrySet ( ) ) { String key = entry . getKey ( ) ; Object value = entry . getValue ( ) ; if ( value != null ) { pageContext . setAttribute ( key , value , scope ) ; } else { pageContext . removeAttribute ( key , scope ) ; } } } }
public class ResourcesAggregatorImpl { /** * Iterate over the list of { @ link BasicInclude } sub - classes using the { @ link AggregatorCallback # willAggregate ( BasicInclude , BasicInclude ) } * and { @ link AggregatorCallback # aggregate ( Deque ) } to generate an aggregated list of { @ link BasicInclude } sub - classes . */ protected < T extends BasicInclude > List < T > aggregateBasicIncludes ( List < T > original , AggregatorCallback < T > callback ) throws IOException { } }
final List < T > result = new LinkedList < T > ( ) ; final Deque < T > currentAggregateList = new LinkedList < T > ( ) ; for ( final T originalElement : original ) { // handle first loop iteration if ( currentAggregateList . isEmpty ( ) ) { currentAggregateList . add ( originalElement ) ; } else { // test if ' originalElement ' will aggregate with head element in currentAggregate final T baseElement = currentAggregateList . getFirst ( ) ; if ( callback . willAggregate ( originalElement , baseElement ) ) { // matches current criteria , add to currentAggregate currentAggregateList . add ( originalElement ) ; } else { // doesn ' t match criteria // generate new single aggregate from currentAggregateList final T aggregate = callback . aggregate ( currentAggregateList ) ; if ( null != aggregate ) { // push result result . add ( aggregate ) ; } else { this . logger . warn ( "Generated 0 byte aggregate from: " + generatePathList ( currentAggregateList ) ) ; } // zero out currentAggregateList currentAggregateList . clear ( ) ; // add originalElement to empty list currentAggregateList . add ( originalElement ) ; } } } // flush the currentAggregateList if ( currentAggregateList . size ( ) > 0 ) { final T aggregate = callback . aggregate ( currentAggregateList ) ; if ( null != aggregate ) { result . add ( aggregate ) ; } else { this . logger . warn ( "Generated 0 byte aggregate from: " + generatePathList ( currentAggregateList ) ) ; } } return result ;
public class PDTWebDateHelper { /** * create a W3C Date Time representation of a date time using UTC date time * zone . * @ param aDateTime * Date to print . May not be < code > null < / code > . * @ return the W3C Date Time represented by the given Date . */ @ Nullable public static String getAsStringW3C ( @ Nullable final ZonedDateTime aDateTime ) { } }
if ( aDateTime == null ) return null ; final DateTimeFormatter aFormatter = PDTFormatter . getForPattern ( FORMAT_W3C , LOCALE_TO_USE ) ; return aFormatter . format ( aDateTime ) ;
public class Index { /** * Search for synonyms * @ param query the query * @ param requestOptions Options to pass to this request */ public JSONObject searchSynonyms ( SynonymQuery query , RequestOptions requestOptions ) throws AlgoliaException , JSONException { } }
JSONObject body = new JSONObject ( ) . put ( "query" , query . getQueryString ( ) ) ; if ( query . hasTypes ( ) ) { StringBuilder type = new StringBuilder ( ) ; boolean first = true ; for ( SynonymQuery . SynonymType t : query . getTypes ( ) ) { if ( ! first ) { type . append ( "," ) ; } type . append ( t . name ) ; first = false ; } body = body . put ( "type" , type . toString ( ) ) ; } if ( query . getPage ( ) != null ) { body = body . put ( "page" , query . getPage ( ) ) ; } if ( query . getHitsPerPage ( ) != null ) { body = body . put ( "hitsPerPage" , query . getHitsPerPage ( ) ) ; } return client . postRequest ( "/1/indexes/" + encodedIndexName + "/synonyms/search" , body . toString ( ) , false , true , requestOptions ) ;
public class StorageSnippets { /** * [ VARIABLE 42] */ public Acl blobToPublicRead ( String bucketName , String blobName , long blobGeneration ) { } }
// [ START storageMakePublic ] BlobId blobId = BlobId . of ( bucketName , blobName , blobGeneration ) ; Acl acl = storage . createAcl ( blobId , Acl . of ( User . ofAllUsers ( ) , Role . READER ) ) ; // [ END storageMakePublic ] return acl ;
public class Calendar { /** * Returns the pseudo - time - stamp for two fields , given their * individual pseudo - time - stamps . If either of the fields * is unset , then the aggregate is unset . Otherwise , the * aggregate is the later of the two stamps . */ private static int aggregateStamp ( int stamp_a , int stamp_b ) { } }
if ( stamp_a == UNSET || stamp_b == UNSET ) { return UNSET ; } return ( stamp_a > stamp_b ) ? stamp_a : stamp_b ;
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getIfcStructuralConnectionCondition ( ) { } }
if ( ifcStructuralConnectionConditionEClass == null ) { ifcStructuralConnectionConditionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 632 ) ; } return ifcStructuralConnectionConditionEClass ;
public class SQLUtilityImpl { /** * Get a long string , which could be a TEXT or CLOB type . ( CLOBs require * special handling - - this method normalizes the reading of them ) */ @ Override protected String i_getLongString ( ResultSet rs , int pos ) throws SQLException { } }
String s = rs . getString ( pos ) ; if ( s != null ) { // It ' s a String - based datatype , so just return it . return s ; } else { // It may be a CLOB . If so , return the contents as a String . try { Clob c = rs . getClob ( pos ) ; return c . getSubString ( 1 , ( int ) c . length ( ) ) ; } catch ( Throwable th ) { th . printStackTrace ( ) ; return null ; } }
public class KeyPadPanel { /** * Initialize a button . * @ param button * the button * @ param foreground * the foreground * @ param background * the background */ protected void initializeButton ( final Button button , final Color foreground , final Color background ) { } }
button . setForeground ( foreground ) ; button . setBackground ( background ) ;
public class BaseKdDao { /** * { @ inheritDoc } */ @ Override public void delete ( String spaceId , String key , IDeleteCallback callback ) throws IOException { } }
kdStorage . delete ( spaceId , key , new IDeleteCallback ( ) { @ Override public void onSuccess ( String spaceId , String key ) { // invalidate cache upon successful deletion invalidateCacheEntry ( spaceId , key ) ; // invoke callback callback . onSuccess ( spaceId , key ) ; } @ Override public void onError ( String spaceId , String key , Throwable t ) { // invoke callback callback . onError ( spaceId , key , t ) ; } } ) ;
public class SpdLoadAggregate { /** * Check data type and call super to remove data - synchronized in super */ public boolean remove ( SpdData data ) { } }
if ( data == null ) return false ; if ( data instanceof SpdLoad ) { return super . remove ( data ) ; } else { return false ; }
public class GVRCameraRig { /** * Sets the distance from the origin to the far clipping plane for the * whole camera rig . * @ param far * Distance to the far clipping plane . */ public void setFarClippingDistance ( float far ) { } }
if ( leftCamera instanceof GVRCameraClippingDistanceInterface && centerCamera instanceof GVRCameraClippingDistanceInterface && rightCamera instanceof GVRCameraClippingDistanceInterface ) { ( ( GVRCameraClippingDistanceInterface ) leftCamera ) . setFarClippingDistance ( far ) ; centerCamera . setFarClippingDistance ( far ) ; ( ( GVRCameraClippingDistanceInterface ) rightCamera ) . setFarClippingDistance ( far ) ; }
public class AtomContainerComparator { /** * < p > Compares two IAtomContainers for order with the following criteria * with decreasing priority : < / p > < ul > < li > Compare atom count < li > Compare * molecular weight ( heavy atoms only ) < li > Compare bond count < li > Compare * sum of bond orders ( heavy atoms only ) < / ul > < p > If no difference can be * found with the above criteria , the IAtomContainers are considered * equal . < / p > < p > Returns a negative integer , zero , or a positive integer as * the first argument is less than , equal to , or greater than the * second . < / p > < p > This method is null safe . < / p > * @ param o1 the first IAtomContainer * @ param o2 the second IAtomContainer * @ return a negative integer , zero , or a positive integer as the first * argument is less than , equal to , or greater than the second . */ @ Override public int compare ( IAtomContainer o1 , IAtomContainer o2 ) { } }
// Check for nulls if ( o1 == null && o2 == null ) return 0 ; if ( o1 == null ) return 1 ; if ( o2 == null ) return - 1 ; IAtomContainer atomContainer1 = o1 ; IAtomContainer atomContainer2 = o2 ; // 1 . Compare atom count if ( atomContainer1 . getAtomCount ( ) > atomContainer2 . getAtomCount ( ) ) return 1 ; else if ( atomContainer1 . getAtomCount ( ) < atomContainer2 . getAtomCount ( ) ) return - 1 ; else { // 2 . Atom count equal , compare molecular weight ( heavy atoms only ) double mw1 = 0 ; double mw2 = 0 ; try { mw1 = getMolecularWeight ( atomContainer1 ) ; mw2 = getMolecularWeight ( atomContainer2 ) ; } catch ( CDKException e ) { logger . warn ( "Exception in molecular mass calculation." ) ; return 0 ; } if ( mw1 > mw2 ) return 1 ; else if ( mw1 < mw2 ) return - 1 ; else { // 3 . Molecular weight equal , compare bond count if ( atomContainer1 . getBondCount ( ) > atomContainer2 . getBondCount ( ) ) return 1 ; else if ( atomContainer1 . getBondCount ( ) < atomContainer2 . getBondCount ( ) ) return - 1 ; else { // 4 . Bond count equal , compare sum of bond orders ( heavy atoms only ) double bondOrderSum1 = AtomContainerManipulator . getSingleBondEquivalentSum ( atomContainer1 ) ; double bondOrderSum2 = AtomContainerManipulator . getSingleBondEquivalentSum ( atomContainer2 ) ; if ( bondOrderSum1 > bondOrderSum2 ) return 1 ; else if ( bondOrderSum1 < bondOrderSum2 ) return - 1 ; } } } // AtomContainers are equal in terms of this comparator return 0 ;
public class sslparameter { /** * Use this API to fetch all the sslparameter resources that are configured on netscaler . */ public static sslparameter get ( nitro_service service ) throws Exception { } }
sslparameter obj = new sslparameter ( ) ; sslparameter [ ] response = ( sslparameter [ ] ) obj . get_resources ( service ) ; return response [ 0 ] ;
public class MoreElements { /** * Returns a { @ link Predicate } that can be used to filter elements by { @ link Modifier } . * The predicate returns { @ code true } if the input { @ link Element } has all of the given * { @ code modifiers } , perhaps in addition to others . * < p > Here is an example how one could get a List of static methods of a class : * < pre > { @ code * FluentIterable . from ( ElementFilter . methodsIn ( clazzElement . getEnclosedElements ( ) ) ) * . filter ( MoreElements . hasModifiers ( Modifier . STATIC ) . toList ( ) ; * } < / pre > */ public static < T extends Element > Predicate < T > hasModifiers ( Modifier ... modifiers ) { } }
return hasModifiers ( ImmutableSet . copyOf ( modifiers ) ) ;
public class XMLAssert { /** * Assert that the node lists of two Xpaths in two documents are NOT equal * @ param controlXpath * @ param testXpath * @ param controlDocument * @ param testDocument * @ see XpathEngine */ public static void assertXpathsNotEqual ( String controlXpath , Document controlDocument , String testXpath , Document testDocument ) throws XpathException { } }
assertXpathEquality ( controlXpath , controlDocument , testXpath , testDocument , false ) ;
public class DamerauLevenshteinDistance { /** * ( non - Javadoc ) * @ see mtas . codec . util . distance . LevenshteinDistance # compute ( java . lang . String ) */ @ Override public double compute ( String key ) { } }
double [ ] [ ] state = _start ( ) ; char ch2 = 0x00 ; for ( char ch1 : key . toCharArray ( ) ) { if ( ch1 == 0x00 ) { break ; } state = _step ( state , ch1 , ch2 ) ; ch2 = ch1 ; } return _distance ( state ) ;
public class Math { /** * The Spearman Rank Correlation Coefficient is a form of the Pearson * coefficient with the data converted to rankings ( ie . when variables * are ordinal ) . It can be used when there is non - parametric data and hence * Pearson cannot be used . */ public static double spearman ( float [ ] x , float [ ] y ) { } }
if ( x . length != y . length ) { throw new IllegalArgumentException ( "Input vector sizes are different." ) ; } int n = x . length ; double [ ] wksp1 = new double [ n ] ; double [ ] wksp2 = new double [ n ] ; for ( int j = 0 ; j < n ; j ++ ) { wksp1 [ j ] = x [ j ] ; wksp2 [ j ] = y [ j ] ; } QuickSort . sort ( wksp1 , wksp2 ) ; double sf = crank ( wksp1 ) ; QuickSort . sort ( wksp2 , wksp1 ) ; double sg = crank ( wksp2 ) ; double d = 0.0 ; for ( int j = 0 ; j < n ; j ++ ) { d += Math . sqr ( wksp1 [ j ] - wksp2 [ j ] ) ; } int en = n ; double en3n = en * en * en - en ; double fac = ( 1.0 - sf / en3n ) * ( 1.0 - sg / en3n ) ; double rs = ( 1.0 - ( 6.0 / en3n ) * ( d + ( sf + sg ) / 12.0 ) ) / Math . sqrt ( fac ) ; return rs ;
public class MetaService { /** * Merges the two Meta objects * @ param metaData1 * First metadata object * @ param metaData2 * Second metadata object * @ return Merged metadata */ @ SuppressWarnings ( { } }
"unchecked" } ) public static IMeta mergeMeta ( IMetaData < ? , ? > metaData1 , IMetaData < ? , ? > metaData2 ) { // walk the entries and merge them // 1 . higher number values trump lower ones // 2 . true considered higher than false // 3 . strings are not replaced Map < String , Object > map1 = ( ( Map < String , Object > ) metaData1 ) ; Set < Entry < String , Object > > set1 = map1 . entrySet ( ) ; Map < String , Object > map2 = ( ( Map < String , Object > ) metaData2 ) ; Set < Entry < String , Object > > set2 = map2 . entrySet ( ) ; // map to hold updates / replacements Map < String , Object > rep = new HashMap < String , Object > ( ) ; // loop to update common elements for ( Entry < String , Object > entry1 : set1 ) { String key1 = entry1 . getKey ( ) ; if ( map2 . containsKey ( key1 ) ) { Object value1 = map1 . get ( key1 ) ; Object value2 = map2 . get ( key1 ) ; // we dont replace strings // check numbers if ( value1 instanceof Double ) { if ( Double . valueOf ( value1 . toString ( ) ) . doubleValue ( ) < Double . valueOf ( value2 . toString ( ) ) . doubleValue ( ) ) { rep . put ( key1 , value2 ) ; } } else if ( value1 instanceof Integer ) { if ( Integer . valueOf ( value1 . toString ( ) ) . intValue ( ) < Integer . valueOf ( value2 . toString ( ) ) . intValue ( ) ) { rep . put ( key1 , value2 ) ; } } else if ( value1 instanceof Long ) { if ( Long . valueOf ( value1 . toString ( ) ) . longValue ( ) < Long . valueOf ( value2 . toString ( ) ) . longValue ( ) ) { rep . put ( key1 , value2 ) ; } } // check boolean if ( value1 instanceof Boolean ) { // consider true > false if ( ! Boolean . valueOf ( value1 . toString ( ) ) && Boolean . valueOf ( value2 . toString ( ) ) ) { rep . put ( key1 , value2 ) ; } } } } // remove all changed set1 . removeAll ( rep . entrySet ( ) ) ; // add the updates set1 . addAll ( rep . entrySet ( ) ) ; // perform a union / adds all elements missing from set1 set1 . addAll ( set2 ) ; // return the original object with merges return metaData1 ;
public class Problem { /** * see { @ link Train # readProblem ( File , double ) } */ public static Problem readFromFile ( File file , double bias ) throws IOException , InvalidInputDataException { } }
return Train . readProblem ( file , bias ) ;
public class GVRAudioManager { /** * Remove all of the audio sources from the audio manager . * This will stop all sound from playing . */ public void clearSources ( ) { } }
synchronized ( mAudioSources ) { for ( GVRAudioSource source : mAudioSources ) { source . setListener ( null ) ; } mAudioSources . clear ( ) ; }
public class AstaTextFileReader { /** * Reads the file version and configures the expected file format . * @ param token token containing the file version * @ throws MPXJException */ private void processFileType ( String token ) throws MPXJException { } }
String version = token . substring ( 2 ) . split ( " " ) [ 0 ] ; // System . out . println ( version ) ; Class < ? extends AbstractFileFormat > fileFormatClass = FILE_VERSION_MAP . get ( Integer . valueOf ( version ) ) ; if ( fileFormatClass == null ) { throw new MPXJException ( "Unsupported PP file format version " + version ) ; } try { AbstractFileFormat format = fileFormatClass . newInstance ( ) ; m_tableDefinitions = format . tableDefinitions ( ) ; m_epochDateFormat = format . epochDateFormat ( ) ; } catch ( Exception ex ) { throw new MPXJException ( "Failed to configure file format" , ex ) ; }
public class SimpleFormValidator { /** * Allows to add custom validator to field */ public FormInputValidator custom ( VisValidatableTextField field , FormInputValidator customValidator ) { } }
field . addValidator ( customValidator ) ; add ( field ) ; return customValidator ;
public class CmsNavModePropertyEditor { /** * Builds a single form field . < p > * @ param entryId the entry id * @ param defaultFileId the default file id * @ param ownProps the entry ' s own properties * @ param defaultFileProps the default file properties * @ param propName the property name */ private void buildSimpleField ( String entryId , String defaultFileId , Map < String , CmsClientProperty > ownProps , Map < String , CmsClientProperty > defaultFileProps , String propName ) { } }
CmsXmlContentProperty propDef = m_propertyConfig . get ( propName ) ; CmsClientProperty fileProp = defaultFileProps == null ? null : defaultFileProps . get ( propName ) ; CmsClientProperty ownProp = ownProps . get ( propName ) ; CmsPathValue pathValue ; if ( ( fileProp != null ) && ! CmsClientProperty . isPropertyEmpty ( fileProp ) ) { pathValue = fileProp . getPathValue ( ) . prepend ( defaultFileId + "/" + propName ) ; } else if ( ! CmsClientProperty . isPropertyEmpty ( ownProp ) ) { pathValue = ownProp . getPathValue ( ) . prepend ( entryId + "/" + propName ) ; } else { String targetId = null ; if ( propDef . isPreferFolder ( ) || ( m_handler . getDefaultFileId ( ) == null ) ) { targetId = entryId ; } else { targetId = m_handler . getDefaultFileId ( ) . toString ( ) ; } pathValue = new CmsPathValue ( "" , targetId + "/" + propName + "/" + CmsClientProperty . PATH_STRUCTURE_VALUE ) ; } boolean alwaysAllowEmpty = ! propName . equals ( CmsClientProperty . PROPERTY_NAVTEXT ) ; // CHECK : should we really generally allow empty fields other than NavText to be empty ? CmsBasicFormField field = CmsBasicFormField . createField ( propDef , pathValue . getPath ( ) , this , Collections . < String , String > emptyMap ( ) , alwaysAllowEmpty ) ; CmsClientProperty inheritedProperty = m_handler . getInheritedProperty ( propName ) ; String inherited = ( inheritedProperty == null ) ? null : inheritedProperty . getEffectiveValue ( ) ; if ( inheritedProperty != null ) { String message = Messages . get ( ) . key ( Messages . GUI_PROPERTY_ORIGIN_2 , inheritedProperty . getOrigin ( ) , inherited ) ; ( ( Widget ) field . getWidget ( ) ) . setTitle ( message ) ; } I_CmsFormWidget w = field . getWidget ( ) ; // model binding not necessary here String initialValue = pathValue . getValue ( ) ; boolean ghost = CmsStringUtil . isEmptyOrWhitespaceOnly ( pathValue . getValue ( ) ) ; if ( w instanceof I_CmsHasGhostValue ) { ( ( I_CmsHasGhostValue ) w ) . setGhostValue ( inherited , ghost ) ; if ( ghost ) { initialValue = null ; } } if ( ghost && ( inheritedProperty != null ) ) { String message = org . opencms . gwt . client . Messages . get ( ) . key ( org . opencms . gwt . client . Messages . GUI_ORIGIN_INHERITED_1 , inheritedProperty . getOrigin ( ) ) ; field . getLayoutData ( ) . put ( "info" , message ) ; } m_form . addField ( m_form . getWidget ( ) . getDefaultGroup ( ) , field , initialValue ) ;
public class DOMHelper { /** * Search for prefix definitions in element and all children . There still is an issue for * documents that use the same prefix on differen namespaces in disjunct subtrees . This might be * possible but we won ' t support this . Same is with declaring multiple default namespaces . * XMLBeams behaviour will be undefined in that case . There is a workaround by defining a custom * namespace / prefix mapping , so the effort to support this is not justified . * @ param nsMap * @ param element * @ throws DOMException */ private static void fillNSMapWithPrefixesDeclaredInElement ( final Map < String , String > nsMap , final Element element ) throws DOMException { } }
NamedNodeMap attributes = element . getAttributes ( ) ; for ( int i = 0 ; i < attributes . getLength ( ) ; i ++ ) { Node attribute = attributes . item ( i ) ; if ( ( ! XMLConstants . XMLNS_ATTRIBUTE . equals ( attribute . getPrefix ( ) ) ) && ( ! XMLConstants . XMLNS_ATTRIBUTE . equals ( attribute . getLocalName ( ) ) ) ) { continue ; } if ( XMLConstants . XMLNS_ATTRIBUTE . equals ( attribute . getLocalName ( ) ) ) { nsMap . put ( "xbdefaultns" , attribute . getNodeValue ( ) ) ; continue ; } nsMap . put ( attribute . getLocalName ( ) , attribute . getNodeValue ( ) ) ; } NodeList childNodes = element . getChildNodes ( ) ; for ( Node n : nodeListToIterator ( childNodes ) ) { if ( n . getNodeType ( ) != Node . ELEMENT_NODE ) { continue ; } fillNSMapWithPrefixesDeclaredInElement ( nsMap , ( Element ) n ) ; }
public class PDTHelper { /** * Get the start - - week number for the passed year and month . * @ param aDT * The object to use year and month from . * @ param aLocale * Locale to use . May not be < code > null < / code > . * @ return the start week number . */ public static int getStartWeekOfMonth ( @ Nonnull final LocalDateTime aDT , @ Nonnull final Locale aLocale ) { } }
return getWeekOfWeekBasedYear ( aDT . withDayOfMonth ( 1 ) , aLocale ) ;
public class TransactionManager { /** * Called from the tx service every 10 seconds . * This hack is needed because current metrics system is not flexible when it comes to adding new metrics . */ public void logStatistics ( ) { } }
LOG . info ( "Transaction Statistics: write pointer = " + lastWritePointer + ", invalid = " + invalid . size ( ) + ", in progress = " + inProgress . size ( ) + ", committing = " + committingChangeSets . size ( ) + ", committed = " + committedChangeSets . size ( ) ) ;
public class CaptureActivity { /** * Put up our own UI for how to handle the decoded contents . */ private void handleDecodeInternally ( Result rawResult , ResultHandler resultHandler , Bitmap barcode ) { } }
maybeSetClipboard ( resultHandler ) ; SharedPreferences prefs = PreferenceManager . getDefaultSharedPreferences ( this ) ; if ( resultHandler . getDefaultButtonID ( ) != null && prefs . getBoolean ( PreferencesActivity . KEY_AUTO_OPEN_WEB , false ) ) { resultHandler . handleButtonPress ( resultHandler . getDefaultButtonID ( ) ) ; return ; } statusView . setVisibility ( View . GONE ) ; viewfinderView . setVisibility ( View . GONE ) ; resultView . setVisibility ( View . VISIBLE ) ; ImageView barcodeImageView = ( ImageView ) findViewById ( R . id . barcode_image_view ) ; if ( barcode == null ) { barcodeImageView . setImageBitmap ( BitmapFactory . decodeResource ( getResources ( ) , R . drawable . launcher_icon ) ) ; } else { barcodeImageView . setImageBitmap ( barcode ) ; } TextView formatTextView = ( TextView ) findViewById ( R . id . format_text_view ) ; formatTextView . setText ( rawResult . getBarcodeFormat ( ) . toString ( ) ) ; TextView typeTextView = ( TextView ) findViewById ( R . id . type_text_view ) ; typeTextView . setText ( resultHandler . getType ( ) . toString ( ) ) ; DateFormat formatter = DateFormat . getDateTimeInstance ( DateFormat . SHORT , DateFormat . SHORT ) ; TextView timeTextView = ( TextView ) findViewById ( R . id . time_text_view ) ; timeTextView . setText ( formatter . format ( rawResult . getTimestamp ( ) ) ) ; TextView metaTextView = ( TextView ) findViewById ( R . id . meta_text_view ) ; View metaTextViewLabel = findViewById ( R . id . meta_text_view_label ) ; metaTextView . setVisibility ( View . GONE ) ; metaTextViewLabel . setVisibility ( View . GONE ) ; Map < ResultMetadataType , Object > metadata = rawResult . getResultMetadata ( ) ; if ( metadata != null ) { StringBuilder metadataText = new StringBuilder ( 20 ) ; for ( Map . Entry < ResultMetadataType , Object > entry : metadata . entrySet ( ) ) { if ( DISPLAYABLE_METADATA_TYPES . contains ( entry . getKey ( ) ) ) { metadataText . append ( entry . getValue ( ) ) . append ( '\n' ) ; } } if ( metadataText . length ( ) > 0 ) { metadataText . setLength ( metadataText . length ( ) - 1 ) ; metaTextView . setText ( metadataText ) ; metaTextView . setVisibility ( View . VISIBLE ) ; metaTextViewLabel . setVisibility ( View . VISIBLE ) ; } } CharSequence displayContents = resultHandler . getDisplayContents ( ) ; TextView contentsTextView = ( TextView ) findViewById ( R . id . contents_text_view ) ; contentsTextView . setText ( displayContents ) ; int scaledSize = Math . max ( 22 , 32 - displayContents . length ( ) / 4 ) ; contentsTextView . setTextSize ( TypedValue . COMPLEX_UNIT_SP , scaledSize ) ; TextView supplementTextView = ( TextView ) findViewById ( R . id . contents_supplement_text_view ) ; supplementTextView . setText ( "" ) ; supplementTextView . setOnClickListener ( null ) ; if ( PreferenceManager . getDefaultSharedPreferences ( this ) . getBoolean ( PreferencesActivity . KEY_SUPPLEMENTAL , true ) ) { SupplementalInfoRetriever . maybeInvokeRetrieval ( supplementTextView , resultHandler . getResult ( ) , historyManager , this ) ; } int buttonCount = resultHandler . getButtonCount ( ) ; ViewGroup buttonView = ( ViewGroup ) findViewById ( R . id . result_button_view ) ; buttonView . requestFocus ( ) ; for ( int x = 0 ; x < ResultHandler . MAX_BUTTON_COUNT ; x ++ ) { TextView button = ( TextView ) buttonView . getChildAt ( x ) ; if ( x < buttonCount ) { button . setVisibility ( View . VISIBLE ) ; button . setText ( resultHandler . getButtonText ( x ) ) ; button . setOnClickListener ( new ResultButtonListener ( resultHandler , x ) ) ; } else { button . setVisibility ( View . GONE ) ; } }
public class Vector { /** * Returns an array containing all of the elements in this Vector in the * correct order ; the runtime type of the returned array is that of the * specified array . If the Vector fits in the specified array , it is * returned therein . Otherwise , a new array is allocated with the runtime * type of the specified array and the size of this Vector . * < p > If the Vector fits in the specified array with room to spare * ( i . e . , the array has more elements than the Vector ) , * the element in the array immediately following the end of the * Vector is set to null . ( This is useful in determining the length * of the Vector < em > only < / em > if the caller knows that the Vector * does not contain any null elements . ) * @ param a the array into which the elements of the Vector are to * be stored , if it is big enough ; otherwise , a new array of the * same runtime type is allocated for this purpose . * @ return an array containing the elements of the Vector * @ throws ArrayStoreException if the runtime type of a is not a supertype * of the runtime type of every element in this Vector * @ throws NullPointerException if the given array is null * @ since 1.2 */ @ SuppressWarnings ( "unchecked" ) public synchronized < T > T [ ] toArray ( T [ ] a ) { } }
if ( a . length < elementCount ) return ( T [ ] ) Arrays . copyOf ( elementData , elementCount , a . getClass ( ) ) ; System . arraycopy ( elementData , 0 , a , 0 , elementCount ) ; if ( a . length > elementCount ) a [ elementCount ] = null ; return a ;
public class NodeTypes { /** * Determine if at least one of the named primary node type or mixin types is or subtypes the ' mix : lastModified ' mixin type . * @ param primaryType the primary type name ; may not be null * @ param mixinTypes the mixin type names ; may be null or empty * @ return true if any of the named node types is a last - modified type , or false if there are none */ public boolean isLastModified ( Name primaryType , Set < Name > mixinTypes ) { } }
if ( lastModifiedNodeTypeNames . contains ( primaryType ) ) return true ; if ( mixinTypes != null ) { for ( Name mixinType : mixinTypes ) { if ( lastModifiedNodeTypeNames . contains ( mixinType ) ) return true ; } } return false ;
public class VirtualHost { /** * Ensure that ' hostnamePattern ' matches ' defaultHostname ' . */ static void ensureHostnamePatternMatchesDefaultHostname ( String hostnamePattern , String defaultHostname ) { } }
if ( "*" . equals ( hostnamePattern ) ) { return ; } // Pretty convoluted way to validate but it ' s done only once and // we don ' t need to duplicate the pattern matching logic . final DomainNameMapping < Boolean > mapping = new DomainNameMappingBuilder < > ( Boolean . FALSE ) . add ( hostnamePattern , Boolean . TRUE ) . build ( ) ; if ( ! mapping . map ( defaultHostname ) ) { throw new IllegalArgumentException ( "defaultHostname: " + defaultHostname + " (must be matched by hostnamePattern: " + hostnamePattern + ')' ) ; }
public class YarnShuffleServiceMetrics { /** * Get metrics from the source * @ param collector to contain the resulting metrics snapshot * @ param all if true , return all metrics even if unchanged . */ @ Override public void getMetrics ( MetricsCollector collector , boolean all ) { } }
MetricsRecordBuilder metricsRecordBuilder = collector . addRecord ( "sparkShuffleService" ) ; for ( Map . Entry < String , Metric > entry : metricSet . getMetrics ( ) . entrySet ( ) ) { collectMetric ( metricsRecordBuilder , entry . getKey ( ) , entry . getValue ( ) ) ; }
public class MongoServerSelector { /** * Reads a server configuration and replaces the placeholder values with an available host and port as specified in the mongo . properties file * @ param libertyServer The server configuration to assign Mongo servers to * @ throws Exception Can ' t connect to any of the specified Mongo servers */ public static void assignMongoServers ( LibertyServer libertyServer ) throws Exception { } }
MongoServerSelector mongoServerSelector = new MongoServerSelector ( libertyServer ) ; mongoServerSelector . updateLibertyServerMongos ( ) ;
public class KNXNetworkLinkIP { /** * Creates a new network link using the { @ link KNXnetIPRouting } protocol , with the local endpoint specified by a * network interface . * @ param netIf local network interface used to join the multicast group and for sending , use < code > null < / code > for * the host ' s default multicast interface * @ param mcGroup address of the multicast group to join , use { @ link # DefaultMulticast } for the default KNX IP * multicast address * @ param settings medium settings defining device and medium specifics needed for communication * @ return the network link in open state * @ throws KNXException on failure establishing link using the KNXnet / IP connection */ public static KNXNetworkLinkIP newRoutingLink ( final NetworkInterface netIf , final InetAddress mcGroup , final KNXMediumSettings settings ) throws KNXException { } }
return new KNXNetworkLinkIP ( ROUTING , new KNXnetIPRouting ( netIf , mcGroup ) , settings ) ;
public class GetCrawlerRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetCrawlerRequest getCrawlerRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getCrawlerRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getCrawlerRequest . getName ( ) , NAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class BufferedWriter { /** * Flushes the output buffer to the underlying character stream , without * flushing the stream itself . This method is non - private only so that it * may be invoked by PrintStream . */ void flushBuffer ( ) throws IOException { } }
synchronized ( lock ) { ensureOpen ( ) ; if ( nextChar == 0 ) return ; out . write ( cb , 0 , nextChar ) ; nextChar = 0 ; }
public class StencilInterpreter { /** * Bind block values to names based on prepare signature * @ param sig Signature of output block preparation * @ param inv Invocation of output block * @ return Map of Name = > BoundParamBlocks representing Macro blocks */ private Map < String , Object > bindBlocks ( PrepareSignatureContext sig , PrepareInvocationContext inv ) { } }
if ( inv == null ) { return Collections . emptyMap ( ) ; } BlockDeclContext allDecl = null ; BlockDeclContext unnamedDecl = null ; List < NamedOutputBlockContext > namedBlocks = new ArrayList < > ( inv . namedBlocks ) ; Map < String , Object > blocks = new HashMap < > ( ) ; for ( BlockDeclContext blockDecl : sig . blockDecls ) { if ( blockDecl . flag != null ) { if ( blockDecl . flag . getText ( ) . equals ( "*" ) ) { if ( allDecl != null ) { throw new ExecutionException ( "only a single parameter can be marked with '*'" , getLocation ( blockDecl ) ) ; } allDecl = blockDecl ; } else if ( blockDecl . flag . getText ( ) . equals ( "+" ) ) { if ( unnamedDecl != null ) { throw new ExecutionException ( "only a single parameter can be marked with '+'" , getLocation ( blockDecl ) ) ; } unnamedDecl = blockDecl ; } else { throw new ExecutionException ( "unknown block declaration flag" , getLocation ( blockDecl ) ) ; } continue ; } // Find the block ParserRuleContext paramBlock = findAndRemoveBlock ( namedBlocks , name ( blockDecl ) ) ; // Bind the block BoundParamOutputBlock boundBlock = bindBlock ( paramBlock ) ; blocks . put ( name ( blockDecl ) , boundBlock ) ; } // Bind unnamed block ( if requested ) if ( unnamedDecl != null ) { UnnamedOutputBlockContext unnamedBlock = inv . unnamedBlock ; BoundParamOutputBlock boundUnnamedBlock = bindBlock ( unnamedBlock ) ; blocks . put ( unnamedDecl . id . getText ( ) , boundUnnamedBlock ) ; } // Bind rest of blocks ( if requested ) if ( allDecl != null ) { Map < String , Block > otherBlocks = new HashMap < > ( ) ; // Add unnamed block if it wasn ' t bound explicitly if ( inv . unnamedBlock != null && unnamedDecl == null ) { UnnamedOutputBlockContext unnamedBlock = inv . unnamedBlock ; BoundParamOutputBlock boundUnnamedBlock = new BoundParamOutputBlock ( unnamedBlock , mode ( unnamedBlock ) , currentScope ) ; otherBlocks . put ( "" , boundUnnamedBlock ) ; } // Add all other unbound blocks for ( NamedOutputBlockContext namedBlock : namedBlocks ) { String blockName = nullToEmpty ( name ( namedBlock ) ) ; BoundParamOutputBlock boundNamedBlock = new BoundParamOutputBlock ( namedBlock , mode ( namedBlock ) , currentScope ) ; otherBlocks . put ( blockName , boundNamedBlock ) ; } blocks . put ( allDecl . id . getText ( ) , otherBlocks ) ; } return blocks ;
public class ScanSpec { /** * Returns true if the given directory path is a descendant of a blacklisted path , or an ancestor or descendant * of a whitelisted path . The path should end in " / " . * @ param relativePath * the relative path * @ return the { @ link ScanSpecPathMatch } */ public ScanSpecPathMatch dirWhitelistMatchStatus ( final String relativePath ) { } }
// In blacklisted path if ( pathWhiteBlackList . isBlacklisted ( relativePath ) ) { // The directory is blacklisted . return ScanSpecPathMatch . HAS_BLACKLISTED_PATH_PREFIX ; } if ( pathPrefixWhiteBlackList . isBlacklisted ( relativePath ) ) { // An prefix of this path is blacklisted . return ScanSpecPathMatch . HAS_BLACKLISTED_PATH_PREFIX ; } if ( pathWhiteBlackList . whitelistIsEmpty ( ) && classPackagePathWhiteBlackList . whitelistIsEmpty ( ) ) { // If there are no whitelisted packages , the root package is whitelisted return relativePath . isEmpty ( ) || relativePath . equals ( "/" ) ? ScanSpecPathMatch . AT_WHITELISTED_PATH : ScanSpecPathMatch . HAS_WHITELISTED_PATH_PREFIX ; } // At whitelisted path if ( pathWhiteBlackList . isSpecificallyWhitelistedAndNotBlacklisted ( relativePath ) ) { // Reached a whitelisted path return ScanSpecPathMatch . AT_WHITELISTED_PATH ; } if ( classPackagePathWhiteBlackList . isSpecificallyWhitelistedAndNotBlacklisted ( relativePath ) ) { // Reached a package containing a specifically - whitelisted class return ScanSpecPathMatch . AT_WHITELISTED_CLASS_PACKAGE ; } // Descendant of whitelisted path if ( pathPrefixWhiteBlackList . isSpecificallyWhitelisted ( relativePath ) ) { // Path prefix matches one in the whitelist return ScanSpecPathMatch . HAS_WHITELISTED_PATH_PREFIX ; } // Ancestor of whitelisted path if ( relativePath . equals ( "/" ) ) { // The default package is always the ancestor of whitelisted paths ( need to keep recursing ) return ScanSpecPathMatch . ANCESTOR_OF_WHITELISTED_PATH ; } if ( pathWhiteBlackList . whitelistHasPrefix ( relativePath ) ) { // relativePath is an ancestor ( prefix ) of a whitelisted path return ScanSpecPathMatch . ANCESTOR_OF_WHITELISTED_PATH ; } if ( classfilePathWhiteBlackList . whitelistHasPrefix ( relativePath ) ) { // relativePath is an ancestor ( prefix ) of a whitelisted class ' parent directory return ScanSpecPathMatch . ANCESTOR_OF_WHITELISTED_PATH ; } // Not in whitelisted path return ScanSpecPathMatch . NOT_WITHIN_WHITELISTED_PATH ;
public class DeleteAppLaunchConfigurationRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DeleteAppLaunchConfigurationRequest deleteAppLaunchConfigurationRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( deleteAppLaunchConfigurationRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteAppLaunchConfigurationRequest . getAppId ( ) , APPID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class LockedObject { /** * adds a new owner to a lock * @ param owner string that represents the owner * @ return true if the owner was added , false otherwise */ public boolean addLockedObjectOwner ( String owner ) { } }
if ( this . owner == null ) { this . owner = new String [ 1 ] ; } else { int size = this . owner . length ; String [ ] newLockObjectOwner = new String [ size + 1 ] ; // check if the owner is already here ( that should actually not // happen ) for ( int i = 0 ; i < size ; i ++ ) { if ( this . owner [ i ] . equals ( owner ) ) { return false ; } } System . arraycopy ( this . owner , 0 , newLockObjectOwner , 0 , size ) ; this . owner = newLockObjectOwner ; } this . owner [ this . owner . length - 1 ] = owner ; return true ;
public class CPAttachmentFileEntryLocalServiceBaseImpl { /** * Returns the cp attachment file entry matching the UUID and group . * @ param uuid the cp attachment file entry ' s UUID * @ param groupId the primary key of the group * @ return the matching cp attachment file entry , or < code > null < / code > if a matching cp attachment file entry could not be found */ @ Override public CPAttachmentFileEntry fetchCPAttachmentFileEntryByUuidAndGroupId ( String uuid , long groupId ) { } }
return cpAttachmentFileEntryPersistence . fetchByUUID_G ( uuid , groupId ) ;
public class DenseTensor { /** * Implementation of inner product where both tensors are dense and have * the same dimensionality . These properties enable the inner product to * be computed extremely quickly by iterating over both dense arrays of * values . * @ param other * @ return */ private double denseTensorInnerProduct ( DenseTensor other ) { } }
double [ ] otherValues = other . values ; int length = values . length ; Preconditions . checkArgument ( otherValues . length == length ) ; double innerProduct = 0.0 ; for ( int i = 0 ; i < length ; i ++ ) { innerProduct += values [ i ] * otherValues [ i ] ; } return innerProduct ;
public class CacheProviderWrapper { /** * Returns all of the cache IDs in the PushPullTable */ @ Override public List getCacheIdsInPushPullTable ( ) { } }
final String methodName = "getCacheIdsInPushPullTable()" ; List list = new ArrayList ( ) ; if ( this . featureSupport . isReplicationSupported ( ) ) { // TODO write code to support getCacheIdsInPushPullTable function if ( tc . isDebugEnabled ( ) ) { Tr . debug ( tc , methodName + " cacheName=" + cacheName + " ERROR because it is not implemented yet" ) ; } } else { Tr . error ( tc , "DYNA1065E" , new Object [ ] { methodName , cacheName , this . cacheProviderName } ) ; } return list ;
public class HelpDoclet { /** * High - level function that processes a single DocWorkUnit unit using its handler * @ param cfg * @ param workUnit * @ param featureMaps * @ throws IOException */ protected void processWorkUnitTemplate ( final Configuration cfg , final DocWorkUnit workUnit , final List < Map < String , String > > indexByGroupMaps , final List < Map < String , String > > featureMaps ) { } }
try { // Merge data - model with template Template template = cfg . getTemplate ( workUnit . getTemplateName ( ) ) ; File outputPath = new File ( getDestinationDir ( ) , workUnit . getTargetFileName ( ) ) ; try ( final Writer out = new OutputStreamWriter ( new FileOutputStream ( outputPath ) ) ) { template . process ( workUnit . getRootMap ( ) , out ) ; } } catch ( IOException e ) { throw new DocException ( "IOException during documentation creation" , e ) ; } catch ( TemplateException e ) { throw new DocException ( "TemplateException during documentation creation" , e ) ; } // Create GSON - friendly container object GSONWorkUnit gsonworkunit = createGSONWorkUnit ( workUnit , indexByGroupMaps , featureMaps ) ; gsonworkunit . populate ( workUnit . getProperty ( "summary" ) . toString ( ) , workUnit . getProperty ( "gson-arguments" ) , workUnit . getProperty ( "description" ) . toString ( ) , workUnit . getProperty ( "name" ) . toString ( ) , workUnit . getProperty ( "group" ) . toString ( ) , Boolean . valueOf ( workUnit . getProperty ( "beta" ) . toString ( ) ) , Boolean . valueOf ( workUnit . getProperty ( "experimental" ) . toString ( ) ) ) ; // Convert object to JSON and write JSON entry to file File outputPathForJSON = new File ( getDestinationDir ( ) , workUnit . getJSONFileName ( ) ) ; try ( final BufferedWriter jsonWriter = new BufferedWriter ( new FileWriter ( outputPathForJSON ) ) ) { Gson gson = new GsonBuilder ( ) . serializeSpecialFloatingPointValues ( ) . setPrettyPrinting ( ) . create ( ) ; String json = gson . toJson ( gsonworkunit ) ; jsonWriter . write ( json ) ; } catch ( IOException e ) { throw new DocException ( "Failed to create JSON entry" , e ) ; }
public class Ifc2x3tc1PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public EClass getIfcHumidifierType ( ) { } }
if ( ifcHumidifierTypeEClass == null ) { ifcHumidifierTypeEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( Ifc2x3tc1Package . eNS_URI ) . getEClassifiers ( ) . get ( 277 ) ; } return ifcHumidifierTypeEClass ;
public class MapElementGroup { /** * Replies the shape types that are corresponding to the given elements . * @ param classification is the map which will be filled with the classified elements . * @ param elements the elements to classify . */ public static void classifiesElements ( Map < ShapeElementType , MapElementGroup > classification , Iterator < ? extends MapElement > elements ) { } }
ShapeElementType type ; MapElementGroup group ; MapElement element ; while ( elements . hasNext ( ) ) { element = elements . next ( ) ; type = classifiesElement ( element ) ; if ( type != ShapeElementType . UNSUPPORTED ) { group = classification . get ( type ) ; if ( group == null ) { group = new MapElementGroup ( ) ; classification . put ( type , group ) ; } group . add ( element ) ; } }
public class Datepicker { /** * Converts a java Date format to a jQuery date format * @ param format * Format to be converted * @ return converted format */ public static String convertFormat ( String format ) { } }
if ( format == null ) return null ; else { // day of week format = format . replaceAll ( "EEE" , "D" ) ; // year format = format . replaceAll ( "yy" , "y" ) ; // month if ( format . indexOf ( "MMM" ) != - 1 ) { format = format . replaceAll ( "MMM" , "M" ) ; } else { format = format . replaceAll ( "M" , "m" ) ; } return format ; }
public class ThreadedServer { /** * Kill a job . This method closes IDLE and socket associated with a job * @ param thread * @ param job */ protected void stopJob ( Thread thread , Object job ) { } }
if ( job instanceof Socket ) { try { ( ( Socket ) job ) . close ( ) ; } catch ( Exception e ) { LogSupport . ignore ( log , e ) ; } } super . stopJob ( thread , job ) ;
public class AbstractPool { /** * { @ inheritDoc } */ public void delist ( ConnectionListener cl ) throws ResourceException { } }
if ( cm . getTransactionSupport ( ) == TransactionSupportLevel . NoTransaction ) return ; if ( cl != null ) { try { TransactionalConnectionManager txCM = ( TransactionalConnectionManager ) cm ; Transaction tx = txCM . getTransactionIntegration ( ) . getTransactionManager ( ) . getTransaction ( ) ; if ( TxUtils . isUncommitted ( tx ) ) { try { cl . delist ( ) ; } finally { Object id = txCM . getTransactionIntegration ( ) . getTransactionSynchronizationRegistry ( ) . getTransactionKey ( ) ; Map < ManagedConnectionPool , ConnectionListener > currentMap = transactionMap . get ( id ) ; if ( currentMap != null ) { ConnectionListener registered = currentMap . remove ( cl . getManagedConnectionPool ( ) ) ; transactionMap . put ( id , currentMap ) ; } } } } catch ( ResourceException re ) { throw re ; } catch ( Exception e ) { throw new ResourceException ( e ) ; } } else { throw new ResourceException ( ) ; }
public class AbstractLRParser { /** * { @ inheritDoc } */ @ Override public final ParseTreeNode parse ( TokenStream tokenStream ) throws ParserException { } }
startTime = System . currentTimeMillis ( ) ; setTokenStream ( tokenStream ) ; reset ( ) ; return parse ( ) ;
public class Utils { /** * Checks if an application is installed . * @ return true if the current thread it the UI thread . */ public static boolean packageInstalled ( @ NotNull final Context context , @ NotNull final String packageName ) { } }
final PackageManager packageManager = context . getPackageManager ( ) ; boolean result = false ; try { packageManager . getPackageInfo ( packageName , PackageManager . GET_ACTIVITIES ) ; result = true ; } catch ( PackageManager . NameNotFoundException ignore ) { } Logger . d ( "packageInstalled() is " , result , " for " , packageName ) ; return result ;
public class CmsJspDateSeriesBean { /** * Returns the gallery title of the series content . < p > * @ return the gallery title of the series content */ public String getTitle ( ) { } }
CmsGallerySearchResult result ; try { result = CmsGallerySearch . searchById ( m_value . getCmsObject ( ) , m_value . getContentValue ( ) . getDocument ( ) . getFile ( ) . getStructureId ( ) , m_value . getLocale ( ) ) ; return result . getTitle ( ) ; } catch ( CmsException e ) { LOG . warn ( "Could not retrieve title of series content." , e ) ; return "" ; }
public class ComputationGraph { /** * Set all labels for the ComputationGraph network */ public void setLabels ( INDArray ... labels ) { } }
if ( labels != null && labels . length != this . numOutputArrays ) { throw new IllegalArgumentException ( "Invalid output array: network has " + numOutputArrays + " outputs, but array is of length " + labels . length ) ; } this . labels = labels ;
public class ConcurrentCollectorMultiple { /** * Set the performer to be used . This method must be invoked before the * collector can be run . The passed implementation must be rock - solid as this * class will not make any retries . If the passed performer throws and * exception without handling the objects correct the objects will be lost ! * @ param aPerformer * The performer to be used . May not be < code > null < / code > . * @ return this for chaining * @ throws IllegalStateException * If another performer is already present ! */ @ Nonnull public final ConcurrentCollectorMultiple < DATATYPE > setPerformer ( @ Nonnull final IConcurrentPerformer < List < DATATYPE > > aPerformer ) { } }
if ( m_aPerformer != null ) throw new IllegalStateException ( "Another performer is already set!" ) ; m_aPerformer = ValueEnforcer . notNull ( aPerformer , "Performer" ) ; return this ;
public class HmlWriter { /** * Write the specified HML to the specified file . * @ param data HML to write , must not be null * @ param file file to write to , must not be null * @ throws IOException if an I / O error occurs */ public static void write ( final Hml data , final File file ) throws IOException { } }
checkNotNull ( data ) ; checkNotNull ( file ) ; try ( BufferedWriter writer = new BufferedWriter ( new FileWriter ( file ) ) ) { write ( data , writer ) ; }
public class DashboardService { /** * Updates the given dashboard . * @ param dashboard The dashboard to update * @ return The dashboard that was updated */ public Optional < Dashboard > update ( Dashboard dashboard ) { } }
return HTTP . PUT ( String . format ( "/v2/dashboards/%d.json" , dashboard . getId ( ) ) , dashboard , DASHBOARD ) ;
public class ClientUtils { /** * Client connection methods */ public HttpURLConnection buildConnection ( URI baseServiceUri , String tail ) { } }
try { HttpURLConnection client = ( HttpURLConnection ) baseServiceUri . resolve ( tail ) . toURL ( ) . openConnection ( ) ; return client ; } catch ( MalformedURLException mue ) { throw new EFhirClientException ( "Invalid Service URL" , mue ) ; } catch ( IOException ioe ) { throw new EFhirClientException ( "Unable to establish connection to server: " + baseServiceUri . toString ( ) + tail , ioe ) ; }
public class nslimitsessions { /** * Use this API to fetch all the nslimitsessions resources that are configured on netscaler . * This uses nslimitsessions _ args which is a way to provide additional arguments while fetching the resources . */ public static nslimitsessions [ ] get ( nitro_service service , nslimitsessions_args args ) throws Exception { } }
nslimitsessions obj = new nslimitsessions ( ) ; options option = new options ( ) ; option . set_args ( nitro_util . object_to_string_withoutquotes ( args ) ) ; nslimitsessions [ ] response = ( nslimitsessions [ ] ) obj . get_resources ( service , option ) ; return response ;
public class ReflectiveRandomIndexing { /** * Returns the index vector for the term , or if creates one if the term to * index vector mapping does not yet exist . * @ param term a word in the semantic space * @ return the index for the provide term . */ private TernaryVector getTermIndexVector ( String term ) { } }
TernaryVector iv = termToIndexVector . get ( term ) ; if ( iv == null ) { // lock in case multiple threads attempt to add it at once synchronized ( this ) { // recheck in case another thread added it while we were waiting // for the lock iv = termToIndexVector . get ( term ) ; if ( iv == null ) { // since this is a new term , also map it to its index for // later look - up when the integer documents are processed termToIndex . put ( term , termIndexCounter ++ ) ; // next , map it to its reflective vector which will be // filled in process space termToReflectiveSemantics . put ( term , createVector ( ) ) ; // last , create an index vector for the term iv = indexVectorGenerator . generate ( ) ; termToIndexVector . put ( term , iv ) ; } } } return iv ;
public class GenericBoJdbcDao { /** * Delete an existing BO from storage . * @ param conn * @ param bo * @ return * @ since 0.8.1 */ protected DaoResult delete ( Connection conn , T bo ) { } }
if ( bo == null ) { return new DaoResult ( DaoOperationStatus . NOT_FOUND ) ; } int numRows = execute ( conn , calcSqlDeleteOne ( bo ) , rowMapper . valuesForColumns ( bo , rowMapper . getPrimaryKeyColumns ( ) ) ) ; DaoResult result = numRows > 0 ? new DaoResult ( DaoOperationStatus . SUCCESSFUL , bo ) : new DaoResult ( DaoOperationStatus . NOT_FOUND ) ; if ( numRows > 0 ) { invalidateCache ( bo , CacheInvalidationReason . DELETE ) ; } return result ;
public class CmsCacheViewApp { /** * Creates the view for the image cache . < p > * @ return a vaadin vertical layout with the information about the image cache */ private Component getImageViewComponent ( ) { } }
m_siteTableFilter = new TextField ( ) ; HorizontalSplitPanel sp = new HorizontalSplitPanel ( ) ; sp . setSizeFull ( ) ; VerticalLayout intro = CmsVaadinUtils . getInfoLayout ( Messages . GUI_CACHE_IMAGE_INTRODUCTION_0 ) ; VerticalLayout nullResult = CmsVaadinUtils . getInfoLayout ( Messages . GUI_CACHE_IMAGE_NO_RESULTS_0 ) ; final CmsImageCacheTable table = new CmsImageCacheTable ( intro , nullResult , m_siteTableFilter ) ; sp . setFirstComponent ( new CmsImageCacheInput ( table ) ) ; VerticalLayout secC = new VerticalLayout ( ) ; secC . setSizeFull ( ) ; secC . addComponent ( intro ) ; secC . addComponent ( nullResult ) ; secC . addComponent ( table ) ; m_siteTableFilter . setIcon ( FontOpenCms . FILTER ) ; m_siteTableFilter . setInputPrompt ( Messages . get ( ) . getBundle ( UI . getCurrent ( ) . getLocale ( ) ) . key ( Messages . GUI_EXPLORER_FILTER_0 ) ) ; m_siteTableFilter . addStyleName ( ValoTheme . TEXTFIELD_INLINE_ICON ) ; m_siteTableFilter . setWidth ( "200px" ) ; m_siteTableFilter . addTextChangeListener ( new TextChangeListener ( ) { private static final long serialVersionUID = 1L ; public void textChange ( TextChangeEvent event ) { table . filterTable ( event . getText ( ) ) ; } } ) ; m_infoLayout . addComponent ( m_siteTableFilter ) ; m_uiContext . addToolbarButton ( getImageStatisticButton ( ) ) ; m_uiContext . addToolbarButton ( CmsFlushCache . getFlushToolButton ( ) ) ; table . setSizeFull ( ) ; sp . setSecondComponent ( secC ) ; sp . setSplitPosition ( CmsFileExplorer . LAYOUT_SPLIT_POSITION , Unit . PIXELS ) ; table . setVisible ( false ) ; nullResult . setVisible ( false ) ; m_siteTableFilter . setVisible ( false ) ; return sp ;
public class OSHelper { /** * / * public static Thread runCommandInThread ( final List < String > toRun ) throws OSHelperException { * Thread commandThread ; */ public static ProcessReturn procWaitWithProcessReturn ( Process process ) throws OSHelperException { } }
ProcessReturn processReturn = new ProcessReturn ( ) ; try { processReturn . exitCode = process . waitFor ( ) ; } catch ( InterruptedException ex ) { throw new OSHelperException ( "Received an InterruptedException when waiting for an external process to terminate." , ex ) ; } InputStream stdoutIs = process . getInputStream ( ) ; processReturn . stdout = readInputStreamAsString ( stdoutIs ) ; InputStream stderrIs = process . getErrorStream ( ) ; processReturn . stderr = readInputStreamAsString ( stderrIs ) ; processReturn . exitStatus = ProcessReturn . osh_PROCESS_EXITED_BY_ITSELF ; return processReturn ;
public class ParosTableParam { /** * / * ( non - Javadoc ) * @ see org . parosproxy . paros . db . paros . TableParam # insert ( java . lang . String , java . lang . String , java . lang . String , int , java . lang . String , java . lang . String ) */ @ Override public synchronized RecordParam insert ( String site , String type , String name , int used , String flags , String values ) throws DatabaseException { } }
try { psInsert . setString ( 1 , site ) ; psInsert . setString ( 2 , type ) ; psInsert . setString ( 3 , name ) ; psInsert . setInt ( 4 , used ) ; psInsert . setString ( 5 , flags ) ; psInsert . setString ( 6 , values ) ; psInsert . executeUpdate ( ) ; long id ; try ( ResultSet rs = psGetIdLastInsert . executeQuery ( ) ) { rs . next ( ) ; id = rs . getLong ( 1 ) ; } return read ( id ) ; } catch ( SQLException e ) { throw new DatabaseException ( e ) ; }
public class OpenSslSessionStats { /** * Returns the number of times a client did not present a ticket and we issued a new one */ public long ticketKeyNew ( ) { } }
Lock readerLock = context . ctxLock . readLock ( ) ; readerLock . lock ( ) ; try { return SSLContext . sessionTicketKeyNew ( context . ctx ) ; } finally { readerLock . unlock ( ) ; }
public class CmsRfsFileViewer { /** * Sets the additional root folders from which files can be viewed . < p > * @ param roots the list of additional root folders */ public void setAdditionalRoots ( List < String > roots ) { } }
List < String > additionalRoots = new ArrayList < > ( ) ; // making sure all paths end with the path separator CHAR for ( String path : roots ) { if ( path != null ) { if ( ! path . endsWith ( File . separator ) ) { path += File . separator ; } } additionalRoots . add ( path ) ; } m_additionalRoots = additionalRoots ;
public class DefaultServiceRegistry { /** * - - - RECEIVE REQUEST FROM REMOTE SERVICE - - - */ @ Override public void receiveRequest ( Tree message ) { } }
// Get request ' s unique ID String id = message . get ( "id" , ( String ) null ) ; if ( id == null || id . isEmpty ( ) ) { logger . warn ( "Missing \"id\" property!" ) ; return ; } // Get sender ' s nodeID String sender = message . get ( "sender" , ( String ) null ) ; if ( sender == null || sender . isEmpty ( ) ) { logger . warn ( "Missing \"sender\" property!" ) ; return ; } // Verify protocol version if ( checkVersion ) { String ver = message . get ( "ver" , "unknown" ) ; if ( ! PROTOCOL_VERSION . equals ( ver ) ) { logger . warn ( "Invalid protocol version (" + ver + ")!" ) ; transporter . publish ( PACKET_RESPONSE , sender , throwableToTree ( id , nodeID , new ProtocolVersionMismatchError ( nodeID , PROTOCOL_VERSION , ver ) ) ) ; return ; } } // Incoming stream handling IncomingStream requestStream ; requestStreamReadLock . lock ( ) ; try { requestStream = requestStreams . get ( id ) ; } finally { requestStreamReadLock . unlock ( ) ; } if ( requestStream != null ) { boolean remove = false ; try { if ( requestStream . receive ( message ) ) { remove = true ; } } catch ( Throwable error ) { remove = true ; // Send error try { transporter . publish ( PACKET_RESPONSE , sender , throwableToTree ( id , nodeID , error ) ) ; } catch ( Throwable ignored ) { logger . debug ( "Unable to send response!" , ignored ) ; } // Write error to log file if ( writeErrorsToLog ) { logger . error ( "Unexpected error occurred while streaming!" , error ) ; } } if ( remove ) { requestStreamWriteLock . lock ( ) ; try { requestStreams . remove ( id ) ; } finally { requestStreamWriteLock . unlock ( ) ; } } } else if ( message . get ( "stream" , false ) ) { requestStreamWriteLock . lock ( ) ; try { requestStream = requestStreams . get ( id ) ; if ( requestStream == null ) { requestStream = new IncomingStream ( nodeID , scheduler , streamTimeout ) ; requestStreams . put ( id , requestStream ) ; } } finally { requestStreamWriteLock . unlock ( ) ; } if ( requestStream . receive ( message ) ) { requestStreamWriteLock . lock ( ) ; try { requestStreams . remove ( id ) ; } finally { requestStreamWriteLock . unlock ( ) ; } } } // Get action property String action = message . get ( "action" , ( String ) null ) ; if ( action == null || action . isEmpty ( ) ) { if ( requestStream == null ) { logger . warn ( "Missing \"action\" property!" ) ; transporter . publish ( PACKET_RESPONSE , sender , throwableToTree ( id , nodeID , new InvalidPacketDataError ( nodeID ) ) ) ; } return ; } if ( requestStream != null && requestStream . inited ( ) ) { // Action method invoked ( do not invoke twice ) return ; } // Get strategy ( action endpoint array ) by action name Strategy < ActionEndpoint > strategy = null ; long stamp = lock . tryOptimisticRead ( ) ; if ( stamp != 0 ) { try { strategy = strategies . get ( action ) ; } catch ( Exception modified ) { stamp = 0 ; } } if ( ! lock . validate ( stamp ) || stamp == 0 ) { stamp = lock . readLock ( ) ; try { strategy = strategies . get ( action ) ; } finally { lock . unlockRead ( stamp ) ; } } if ( strategy == null ) { logger . warn ( "Invalid action name (" + action + ")!" ) ; transporter . publish ( PACKET_RESPONSE , sender , throwableToTree ( id , nodeID , new ServiceNotFoundError ( nodeID , action ) ) ) ; return ; } // Get local action endpoint ( with cache handling ) ActionEndpoint endpoint = strategy . getEndpoint ( nodeID ) ; if ( endpoint == null ) { logger . warn ( "Not a local action (" + action + ")!" ) ; transporter . publish ( PACKET_RESPONSE , sender , throwableToTree ( id , nodeID , new ServiceNotAvailableError ( nodeID , action ) ) ) ; return ; } // Process params and meta Tree params = message . get ( "params" ) ; Tree meta = message . get ( "meta" ) ; if ( meta != null && ! meta . isEmpty ( ) ) { if ( params == null ) { params = new Tree ( ) ; } params . getMeta ( ) . setObject ( params ) ; } // Get timeout int timeout = message . get ( "timeout" , 0 ) ; CallOptions . Options opts ; if ( timeout > 0 ) { opts = CallOptions . timeout ( timeout ) ; } else { opts = null ; } // Get other properties int level = message . get ( "level" , 1 ) ; String parentID = message . get ( "parentID" , ( String ) null ) ; String requestID = message . get ( "requestID" , id ) ; // Create context Context ctx = contextFactory . create ( action , params , opts , requestStream == null ? null : requestStream . getPacketStream ( ) , id , level , requestID , parentID ) ; // Invoke action try { new Promise ( endpoint . handler ( ctx ) ) . then ( data -> { // Send response FastBuildTree msg = new FastBuildTree ( 7 ) ; msg . putUnsafe ( "sender" , nodeID ) ; msg . putUnsafe ( "id" , id ) ; msg . putUnsafe ( "ver" , PROTOCOL_VERSION ) ; msg . putUnsafe ( "success" , true ) ; PacketStream responseStream = null ; if ( data != null ) { Object d = data . asObject ( ) ; if ( d != null && d instanceof PacketStream ) { msg . putUnsafe ( "stream" , true ) ; msg . putUnsafe ( "seq" , 0 ) ; responseStream = ( PacketStream ) d ; } else { msg . putUnsafe ( "data" , d ) ; } Tree m = data . getMeta ( false ) ; if ( m != null && ! m . isEmpty ( ) ) { msg . putUnsafe ( "meta" , m ) ; } } transporter . publish ( PACKET_RESPONSE , sender , msg ) ; // Define sender for response stream if ( responseStream != null ) { responseStream . onPacket ( new PacketListener ( ) { // Create sequence counter private final AtomicLong sequence = new AtomicLong ( ) ; @ Override public final void onPacket ( byte [ ] bytes , Throwable cause , boolean close ) { if ( bytes != null ) { transporter . sendDataPacket ( PACKET_RESPONSE , sender , ctx , bytes , sequence . incrementAndGet ( ) ) ; } else if ( cause != null ) { if ( writeErrorsToLog ) { logger . error ( "Unexpected error occured while streaming!" , cause ) ; } transporter . sendErrorPacket ( PACKET_RESPONSE , sender , ctx , cause , sequence . incrementAndGet ( ) ) ; } if ( close ) { transporter . sendClosePacket ( PACKET_RESPONSE , sender , ctx , sequence . incrementAndGet ( ) ) ; } } } ) ; } } ) . catchError ( error -> { // Send error transporter . publish ( PACKET_RESPONSE , sender , throwableToTree ( id , nodeID , error ) ) ; // Write error to log file if ( writeErrorsToLog ) { logger . error ( "Unexpected error occurred while invoking \"" + action + "\" action!" , error ) ; } } ) ; } catch ( Throwable error ) { // Send error transporter . publish ( PACKET_RESPONSE , sender , throwableToTree ( id , nodeID , error ) ) ; // Write error to log file if ( writeErrorsToLog ) { logger . error ( "Unexpected error occurred while invoking \"" + action + "\" action!" , error ) ; } }
public class PatternWrapper { /** * Advance the state of this pattern */ void advance ( ) { } }
if ( tc . isEntryEnabled ( ) ) tc . entry ( this , cclass , "advance" ) ; if ( ! prefixDone ) { Pattern . Clause prefix = pattern . getPrefix ( ) ; if ( prefix == null || next >= prefix . items . length ) { Pattern . Clause suffix = pattern . getSuffix ( ) ; if ( suffix != null && suffix != prefix ) { // SWITCH _ TO _ SUFFIX prefixDone = true ; next = suffix . items . length - 1 ; } // else in one of the FINAL _ states , so do nothing } else // somewhere in the body of the prefix , so advance by incrementing next ++ ; } else // somewhere in the suffix , so advance by decrementing next -- ; if ( tc . isEntryEnabled ( ) ) tc . exit ( this , cclass , "advance" ) ;