signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class DoubleStreamEx { /** * Returns an { @ link EntryStream } consisting of the { @ link Entry } objects * which keys and values are results of applying the given functions to the * elements of this stream . * This is an intermediate operation . * @ param < K > The { @ code Entry } key type * @ param < V > The { @ code Entry } value type * @ param keyMapper a non - interfering , stateless function to apply to each * element * @ param valueMapper a non - interfering , stateless function to apply to each * element * @ return the new stream * @ since 0.3.1 */ public < K , V > EntryStream < K , V > mapToEntry ( DoubleFunction < ? extends K > keyMapper , DoubleFunction < ? extends V > valueMapper ) { } }
return new EntryStream < > ( stream ( ) . mapToObj ( t -> new AbstractMap . SimpleImmutableEntry < > ( keyMapper . apply ( t ) , valueMapper . apply ( t ) ) ) , context ) ;
public class Expect4j { /** * TODO * @ param matches TODO * @ return TODO */ protected TimeoutMatch findTimeout ( Match matches [ ] ) { } }
TimeoutMatch ourTimeout = null ; for ( int i = 0 ; i < matches . length ; i ++ ) { if ( matches [ i ] instanceof TimeoutMatch ) ourTimeout = ( TimeoutMatch ) matches [ i ] ; } /* TODO : candidate for removal ? if ( ourTimeout = = null ) { / / Have to create our own ourTimeout = new TimeoutMatch ( null ) ; */ return ourTimeout ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public IfcPileConstructionEnum createIfcPileConstructionEnumFromString ( EDataType eDataType , String initialValue ) { } }
IfcPileConstructionEnum result = IfcPileConstructionEnum . get ( initialValue ) ; if ( result == null ) throw new IllegalArgumentException ( "The value '" + initialValue + "' is not a valid enumerator of '" + eDataType . getName ( ) + "'" ) ; return result ;
public class Flowable { /** * Returns a Single that emits a single HashMap containing values corresponding to items emitted by the * finite source Publisher , mapped by the keys returned by a specified { @ code keySelector } function . * < img width = " 640 " height = " 305 " src = " https : / / raw . github . com / wiki / ReactiveX / RxJava / images / rx - operators / toMap . png " alt = " " > * If more than one source item maps to the same key , the HashMap will contain a single entry that * corresponds to the latest of those items . * Note that this operator requires the upstream to signal { @ code onComplete } for the accumulated map to * be emitted . Sources that are infinite and never complete will never emit anything through this * operator and an infinite source may lead to a fatal { @ code OutOfMemoryError } . * < dl > * < dt > < b > Backpressure : < / b > < / dt > * < dd > The operator honors backpressure from downstream and consumes the source { @ code Publisher } in an * unbounded manner ( i . e . , without applying backpressure to it ) . < / dd > * < dt > < b > Scheduler : < / b > < / dt > * < dd > { @ code toMap } does not operate by default on a particular { @ link Scheduler } . < / dd > * < / dl > * @ param < K > the key type of the Map * @ param < V > the value type of the Map * @ param keySelector * the function that extracts the key from a source item to be used in the HashMap * @ param valueSelector * the function that extracts the value from a source item to be used in the HashMap * @ return a Single that emits a single item : a HashMap containing the mapped items from the source * Publisher * @ see < a href = " http : / / reactivex . io / documentation / operators / to . html " > ReactiveX operators documentation : To < / a > */ @ CheckReturnValue @ BackpressureSupport ( BackpressureKind . UNBOUNDED_IN ) @ SchedulerSupport ( SchedulerSupport . NONE ) public final < K , V > Single < Map < K , V > > toMap ( final Function < ? super T , ? extends K > keySelector , final Function < ? super T , ? extends V > valueSelector ) { } }
ObjectHelper . requireNonNull ( keySelector , "keySelector is null" ) ; ObjectHelper . requireNonNull ( valueSelector , "valueSelector is null" ) ; return collect ( HashMapSupplier . < K , V > asCallable ( ) , Functions . toMapKeyValueSelector ( keySelector , valueSelector ) ) ;
public class JwtBuilder { /** * 校验token是否合法 * @ param token token的值 * @ return true : 合法 , false : 非法 */ @ SuppressWarnings ( "all" ) public JwtAuth getJwtAuth ( String token ) { } }
Claims claims = this . getClaimsFromToken ( token ) ; List < String > roles = ( List < String > ) claims . get ( CLAIM_KEY_ROLE ) ; return new JwtAuth ( ( String ) claims . get ( CLAIM_KEY_ID ) , ( String ) claims . get ( CLAIM_KEY_LOGIN_NAME ) , ( String ) claims . get ( CLAIM_KEY_MICK_NAME ) , ( String ) claims . get ( CLAIM_KEY_AVATAR ) , Lists . iterable ( roles ) ? roles : Lists . newArrayList ( ) ) ;
public class BoxUser { /** * A convenience method to create an empty user with just the id and type fields set . This allows * the ability to interact with the content sdk in a more descriptive and type safe manner * @ param userId the id of user to create * @ return an empty BoxUser object that only contains id and type information */ public static BoxUser createFromId ( String userId ) { } }
JsonObject object = new JsonObject ( ) ; object . add ( BoxCollaborator . FIELD_ID , userId ) ; object . add ( BoxCollaborator . FIELD_TYPE , BoxUser . TYPE ) ; BoxUser user = new BoxUser ( ) ; user . createFromJson ( object ) ; return user ;
public class ChainedAllReduceDriver { @ Override public void setup ( AbstractInvokable parent ) { } }
@ SuppressWarnings ( "unchecked" ) final ReduceFunction < IT > red = BatchTask . instantiateUserCode ( this . config , userCodeClassLoader , ReduceFunction . class ) ; this . reducer = red ; FunctionUtils . setFunctionRuntimeContext ( red , getUdfRuntimeContext ( ) ) ; TypeSerializerFactory < IT > serializerFactory = this . config . getInputSerializer ( 0 , userCodeClassLoader ) ; this . serializer = serializerFactory . getSerializer ( ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "ChainedAllReduceDriver object reuse: " + ( this . objectReuseEnabled ? "ENABLED" : "DISABLED" ) + "." ) ; }
public class EnglishAndChineseHeadRules { /** * Head finder using English NP rules and the Chinese head table as defined in * Honglin Sun and Daniel Jurafsky . 2004 . Shallow Semantic Parsing of Chinese . In North American * Chapter of the ACL : Human Language Technologies ( NAACL - HLT ) , pages 249256 , Boston , MA . */ public static < NodeT extends ConstituentNode < NodeT , ? > > HeadFinder < NodeT > createChinesePTBFromResources ( ) throws IOException { } }
final boolean headInitial = true ; final CharSource resource = Resources . asCharSource ( EnglishAndChineseHeadRules . class . getResource ( "ch_heads.sun.txt" ) , Charsets . UTF_8 ) ; final ImmutableMap < Symbol , HeadRule < NodeT > > headRules = headRulesFromResources ( headInitial , resource ) ; return MapHeadFinder . create ( headRules ) ;
public class HelperFunctions { /** * Determine the type , old or young , based on the name of the collector . */ static GcType getGcType ( String name ) { } }
GcType t = KNOWN_COLLECTOR_NAMES . get ( name ) ; return ( t == null ) ? GcType . UNKNOWN : t ;
public class XAttributeUtils { /** * Composes the appropriate attribute type from the string - based information * found , e . g . , in XML serializations . * @ param factory * Factory to use for creating the attribute . * @ param key * Key of the attribute . * @ param value * Value of the attribute . * @ param type * Type string of the attribute . * @ param extension * Extension of the attribute ( can be < code > null < / code > ) . * @ return An appropriate attribute . */ public static XAttribute composeAttribute ( XFactory factory , String key , String value , String type , XExtension extension ) { } }
type = type . trim ( ) ; if ( type . equalsIgnoreCase ( "LIST" ) ) { XAttributeList attr = factory . createAttributeList ( key , extension ) ; return attr ; } else if ( type . equalsIgnoreCase ( "CONTAINER" ) ) { XAttributeContainer attr = factory . createAttributeContainer ( key , extension ) ; return attr ; } else if ( type . equalsIgnoreCase ( "LITERAL" ) ) { XAttributeLiteral attr = factory . createAttributeLiteral ( key , value , extension ) ; return attr ; } else if ( type . equalsIgnoreCase ( "BOOLEAN" ) ) { XAttributeBoolean attr = factory . createAttributeBoolean ( key , Boolean . parseBoolean ( value ) , extension ) ; return attr ; } else if ( type . equalsIgnoreCase ( "CONTINUOUS" ) ) { XAttributeContinuous attr = factory . createAttributeContinuous ( key , Double . parseDouble ( value ) , extension ) ; return attr ; } else if ( type . equalsIgnoreCase ( "DISCRETE" ) ) { XAttributeDiscrete attr = factory . createAttributeDiscrete ( key , Long . parseLong ( value ) , extension ) ; return attr ; } else if ( type . equalsIgnoreCase ( "TIMESTAMP" ) ) { XAttributeTimestamp attr ; try { synchronized ( XAttributeTimestamp . FORMATTER ) { attr = factory . createAttributeTimestamp ( key , XAttributeTimestamp . FORMATTER . parseObject ( value ) , extension ) ; } } catch ( ParseException e ) { throw new AssertionError ( "OpenXES: could not parse date-time attribute. Value: " + value ) ; } return attr ; } else if ( type . equalsIgnoreCase ( "ID" ) ) { XAttributeID attr = factory . createAttributeID ( key , XID . parse ( value ) , extension ) ; return attr ; } else { throw new AssertionError ( "OpenXES: could not parse attribute type!" ) ; }
public class TargetValidator { /** * Parses the target properties for a given component . * @ param projectDirectory the project ' s directory * @ param c a component * @ return a non - null list of errors */ public static List < ModelError > parseTargetProperties ( File projectDirectory , Component c ) { } }
List < ModelError > errors ; File dir = ResourceUtils . findInstanceResourcesDirectory ( projectDirectory , c ) ; if ( dir . isDirectory ( ) && ! Utils . listAllFiles ( dir , Constants . FILE_EXT_PROPERTIES ) . isEmpty ( ) ) errors = parseDirectory ( dir , c ) ; else errors = new ArrayList < > ( 0 ) ; return errors ;
public class FileOutputCommitter { /** * Mark the output dir of the job for which the context is passed . */ private void markOutputDirSuccessful ( JobContext context ) throws IOException { } }
if ( outputPath != null ) { FileSystem fileSys = outputPath . getFileSystem ( context . getConfiguration ( ) ) ; if ( fileSys . exists ( outputPath ) ) { // create a file in the folder to mark it Path filePath = new Path ( outputPath , SUCCEEDED_FILE_NAME ) ; fileSys . create ( filePath ) . close ( ) ; } }
public class Identifier { /** * Converts identifier to a byte array * @ param bigEndian true if bytes are MSB first * @ return a new byte array with a copy of the value */ @ TargetApi ( Build . VERSION_CODES . GINGERBREAD ) public byte [ ] toByteArrayOfSpecifiedEndianness ( boolean bigEndian ) { } }
byte [ ] copy = Arrays . copyOf ( mValue , mValue . length ) ; if ( ! bigEndian ) { reverseArray ( copy ) ; } return copy ;
public class BindTypeBuilder { /** * Generate parser on xml end element . * @ param context * the context * @ param methodBuilder * the method builder * @ param instanceName * the instance name * @ param parserName * the parser name * @ param entity * the entity */ private static void generateParserOnXmlEndElement ( BindTypeContext context , MethodSpec . Builder methodBuilder , String instanceName , String parserName , BindEntity entity ) { } }
methodBuilder . beginControlFlow ( "if (elementName.equals($L.getName()))" , parserName ) ; methodBuilder . addStatement ( "currentTag = elementName" ) ; methodBuilder . addStatement ( "elementName = null" ) ; methodBuilder . endControlFlow ( ) ;
public class PubSubInputHandler { /** * ( non - Javadoc ) * @ see com . ibm . ws . sib . processor . impl . interfaces . MessageDeliverer # checkAbleToAcceptMessage */ @ Override public int checkAbleToAcceptMessage ( JsDestinationAddress addr ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "checkAbleToAcceptMessage" , addr ) ; int blockingReason = DestinationHandler . OUTPUT_HANDLER_FOUND ; // we check the remoteQueueHighLimit in this case . // See defect 281311 boolean canAccept = ! _itemStream . isRemoteQueueHighLimit ( ) ; if ( ! canAccept ) { blockingReason = DestinationHandler . OUTPUT_HANDLER_ALL_HIGH_LIMIT ; // 117505 long destHighMsg = _itemStream . getDestHighMsgs ( ) ; SibTr . info ( tc , "NOTIFY_DEPTH_THRESHOLD_REACHED_CWSIP0553" , new Object [ ] { _destination . getName ( ) , _messageProcessor . getMessagingEngineName ( ) , destHighMsg } ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "checkAbleToAcceptMessage" , Integer . valueOf ( blockingReason ) ) ; return blockingReason ;
public class LocalHostAddressFunction { /** * { @ inheritDoc } */ public String execute ( List < String > parameterList , TestContext context ) { } }
if ( ! parameterList . isEmpty ( ) ) { throw new InvalidFunctionUsageException ( "Unexpected parameter for function." ) ; } try { return InetAddress . getLocalHost ( ) . getHostName ( ) ; } catch ( UnknownHostException e ) { throw new CitrusRuntimeException ( "Unable to locate local host address" , e ) ; }
public class JQMCommon { /** * Expensive , based on jQuery , realistic visibility check . */ public static boolean isRealHidden ( Widget widget ) { } }
if ( widget == null || ! widget . isAttached ( ) ) return true ; Element elt = widget . getElement ( ) ; return ! UIObject . isVisible ( elt ) || Mobile . isHidden ( elt ) ;
public class VdmLaunchConfigurationDelegate { /** * generate commandline argments for the debugger and create the debug target * @ param launch * @ param configuration * @ param mode * @ param monitor * @ return * @ throws CoreException */ private List < String > initializeLaunch ( ILaunch launch , ILaunchConfiguration configuration , String mode , IProgressMonitor monitor ) throws CoreException { } }
List < String > commandList = null ; Integer debugSessionId = Integer . valueOf ( getSessionId ( ) ) ; if ( useRemoteDebug ( configuration ) ) { debugSessionId = 1 ; } commandList = new ArrayList < String > ( ) ; IVdmProject vdmProject = getVdmProject ( configuration ) ; Assert . isNotNull ( vdmProject , " Project not found: " + configuration . getAttribute ( IDebugConstants . VDM_LAUNCH_CONFIG_PROJECT , "" ) ) ; if ( vdmProject == null || ! VdmTypeCheckerUi . typeCheck ( vdmProject , monitor ) ) { abort ( "Cannot launch a project (" + vdmProject + ") with type errors, please check the problems view" , null ) ; } String charSet = getProject ( configuration ) . getDefaultCharset ( ) ; commandList . add ( "-h" ) ; commandList . add ( "localhost" ) ; commandList . add ( "-p" ) ; int port = VdmDebugPlugin . getDefault ( ) . getDbgpService ( ) . getPort ( ) ; // Hook for external tools to direct the debugger to listen on a specific port int overridePort = configuration . getAttribute ( IDebugConstants . VDM_LAUNCH_CONFIG_OVERRIDE_PORT , IDebugPreferenceConstants . DBGP_AVAILABLE_PORT ) ; if ( overridePort != IDebugPreferenceConstants . DBGP_AVAILABLE_PORT ) { port = VdmDebugPlugin . getDefault ( ) . getDbgpService ( overridePort ) . getPort ( ) ; } commandList . add ( Integer . valueOf ( port ) . toString ( ) ) ; commandList . add ( "-k" ) ; commandList . add ( debugSessionId . toString ( ) ) ; commandList . add ( "-w" ) ; commandList . add ( "-q" ) ; commandList . add ( vdmProject . getDialect ( ) . getArgstring ( ) ) ; commandList . add ( "-r" ) ; commandList . add ( vdmProject . getLanguageVersionName ( ) ) ; // set disable interpreter settings if ( ! configuration . getAttribute ( IDebugConstants . VDM_LAUNCH_CONFIG_PRE_CHECKS , true ) ) // vdmProject . hasPrechecks ( ) ) { commandList . add ( "-pre" ) ; } if ( ! configuration . getAttribute ( IDebugConstants . VDM_LAUNCH_CONFIG_POST_CHECKS , true ) ) // vdmProject . hasPostchecks ( ) ) { commandList . add ( "-post" ) ; } if ( ! configuration . getAttribute ( IDebugConstants . VDM_LAUNCH_CONFIG_INV_CHECKS , true ) ) // vdmProject . hasInvchecks ( ) ) { commandList . add ( "-inv" ) ; } if ( ! configuration . getAttribute ( IDebugConstants . VDM_LAUNCH_CONFIG_DTC_CHECKS , true ) ) // vdmProject . hasDynamictypechecks ( ) ) { commandList . add ( "-dtc" ) ; } if ( ! configuration . getAttribute ( IDebugConstants . VDM_LAUNCH_CONFIG_MEASURE_CHECKS , true ) ) // vdmProject . hasMeasurechecks ( ) ) { commandList . add ( "-measures" ) ; } commandList . add ( "-c" ) ; commandList . add ( charSet ) ; if ( ! isRemoteControllerEnabled ( configuration ) ) { commandList . add ( "-e64" ) ; commandList . add ( getExpressionBase64 ( configuration , charSet ) ) ; String default64 = getDefaultBase64 ( configuration , charSet ) ; if ( default64 . trim ( ) . length ( ) > 0 ) { commandList . add ( "-default64" ) ; commandList . add ( getDefaultBase64 ( configuration , charSet ) ) ; } } else { // temp fix for commanline args of dbgreader commandList . add ( "-e64" ) ; commandList . add ( Base64 . encode ( "A" . getBytes ( ) ) . toString ( ) ) ; } if ( isRemoteControllerEnabled ( configuration ) ) { commandList . add ( "-remote" ) ; commandList . add ( getRemoteControllerName ( configuration ) ) ; } if ( hasTrace ( configuration ) ) { commandList . add ( "-t" ) ; } commandList . add ( "-consoleName" ) ; commandList . add ( "LaunchConfigurationExpression" ) ; commandList . addAll ( getExtendedCommands ( vdmProject , configuration ) ) ; commandList . add ( "-baseDir" ) ; commandList . add ( getProject ( configuration ) . getLocationURI ( ) . toASCIIString ( ) ) ; commandList . addAll ( getSpecFiles ( vdmProject ) ) ; if ( useRemoteDebug ( configuration ) ) { System . out . println ( "Debugger Arguments:\n" + getArgumentString ( commandList ) ) ; } commandList . add ( 0 , "java" ) ; commandList . add ( 1 , IDebugConstants . DEBUG_ENGINE_CLASS ) ; if ( configuration . getAttribute ( IDebugConstants . VDM_LAUNCH_CONFIG_SHOW_VM_SETTINGS , false ) ) { commandList . addAll ( 1 , Arrays . asList ( new String [ ] { "-XshowSettings:all" } ) ) ; } commandList . addAll ( 1 , getVmArguments ( configuration ) ) ; if ( useRemoteDebug ( configuration ) ) { System . out . println ( "Full Debugger Arguments:\n" + getArgumentString ( commandList ) ) ; } VdmDebugTarget target = null ; // Debug mode if ( mode . equals ( ILaunchManager . DEBUG_MODE ) ) { IDbgpService service = VdmDebugPlugin . getDefault ( ) . getDbgpService ( ) ; if ( ! service . available ( ) ) { abort ( "Could not create DBGP Service" , null ) ; } DebugPlugin . getDefault ( ) . getBreakpointManager ( ) . setEnabled ( true ) ; target = new VdmDebugTarget ( IDebugConstants . ID_VDM_DEBUG_MODEL , service , debugSessionId . toString ( ) , launch , null ) ; target . setVdmProject ( vdmProject ) ; launch . addDebugTarget ( target ) ; target . toggleClassVariables ( true ) ; target . toggleGlobalVariables ( true ) ; target . toggleLocalVariables ( true ) ; } // Run mode else if ( mode . equals ( ILaunchManager . RUN_MODE ) ) { IDbgpService service = VdmDebugPlugin . getDefault ( ) . getDbgpService ( ) ; if ( ! service . available ( ) ) { abort ( "Could not create DBGP Service" , null ) ; } DebugPlugin . getDefault ( ) . getBreakpointManager ( ) . setEnabled ( false ) ; target = new VdmDebugTarget ( IDebugConstants . ID_VDM_DEBUG_MODEL , service , debugSessionId . toString ( ) , launch , null ) ; target . setVdmProject ( vdmProject ) ; launch . addDebugTarget ( target ) ; target . toggleClassVariables ( true ) ; target . toggleGlobalVariables ( true ) ; target . toggleLocalVariables ( true ) ; } return commandList ;
public class SortaServiceImpl { /** * A helper function to calculate the best NGram score from a list ontologyTerm synonyms */ private Entity findSynonymWithHighestNgramScore ( String ontologyIri , String queryString , Entity ontologyTermEntity ) { } }
Iterable < Entity > entities = ontologyTermEntity . getEntities ( OntologyTermMetadata . ONTOLOGY_TERM_SYNONYM ) ; if ( Iterables . size ( entities ) > 0 ) { String cleanedQueryString = removeIllegalCharWithSingleWhiteSpace ( queryString ) ; // Calculate the Ngram silmiarity score for all the synonyms and sort them in descending order List < Entity > synonymEntities = FluentIterable . from ( entities ) . transform ( ontologyTermSynonymEntity -> { Entity mapEntity = ontologyTermSynonymFactory . create ( ) ; mapEntity . set ( ontologyTermSynonymEntity ) ; String ontologyTermSynonym = removeIllegalCharWithSingleWhiteSpace ( ontologyTermSynonymEntity . getString ( OntologyTermSynonymMetadata . ONTOLOGY_TERM_SYNONYM_ATTR ) ) ; mapEntity . set ( SCORE , NGramDistanceAlgorithm . stringMatching ( cleanedQueryString , ontologyTermSynonym ) ) ; return mapEntity ; } ) . toSortedList ( ( entity1 , entity2 ) -> entity2 . getDouble ( SCORE ) . compareTo ( entity1 . getDouble ( SCORE ) ) ) ; Entity firstMatchedSynonymEntity = Iterables . getFirst ( synonymEntities , ontologyTermSynonymFactory . create ( ) ) ; double topNgramScore = firstMatchedSynonymEntity . getDouble ( SCORE ) ; String topMatchedSynonym = firstMatchedSynonymEntity . getString ( OntologyTermSynonymMetadata . ONTOLOGY_TERM_SYNONYM_ATTR ) ; // the algorithm to combine synonyms to re - calculate the similarity scores to deal with the // case where the // input query string contains multiple words from different synonyms of the same ontology // term . E . g . // query string " propotosis , protruding eyeball , Exophthalmos " contains three synonyms of OT // ( propotosis ) , // if it was matched to each of the synonyms , all the similarity score would be fairly low // (25 % ) , therefore // need to combine those synonyms to recalculate the similarity score . // The idea of the algorithm is quite simple , we add up the current synonym ( the most ) and // next synonym ( the // second most ) , if the combined string yields a higher score , the synonyms will be combined // together . The // same process is repeated until all the synonyms have been checked // A - - > 30% // B - - > 25% // C - - > 20% // if ( score ( a + b , query ) > score ( a ) ) combine // else move to next synonym for ( Entity nextMatchedSynonymEntity : Iterables . skip ( synonymEntities , 1 ) ) { String nextMatchedSynonym = nextMatchedSynonymEntity . getString ( OntologyTermSynonymMetadata . ONTOLOGY_TERM_SYNONYM_ATTR ) ; StringBuilder tempCombinedSynonym = new StringBuilder ( ) ; tempCombinedSynonym . append ( topMatchedSynonym ) . append ( SINGLE_WHITESPACE ) . append ( nextMatchedSynonym ) ; double newScore = NGramDistanceAlgorithm . stringMatching ( cleanedQueryString , removeIllegalCharWithSingleWhiteSpace ( tempCombinedSynonym . toString ( ) ) ) ; if ( newScore > topNgramScore ) { topNgramScore = newScore ; topMatchedSynonym = tempCombinedSynonym . toString ( ) ; } } firstMatchedSynonymEntity . set ( OntologyTermSynonymMetadata . ONTOLOGY_TERM_SYNONYM_ATTR , topMatchedSynonym ) ; firstMatchedSynonymEntity . set ( SCORE , topNgramScore ) ; firstMatchedSynonymEntity . set ( COMBINED_SCORE , topNgramScore ) ; // The similarity scores are adjusted based on the inverse document frequency of the words . // The idea is that all the words from query string are weighted ( important words occur fewer // times across // all ontology terms than common words ) , the final score should be compensated for according // to the word // / / weight . Map < String , Double > weightedWordSimilarity = informationContentService . redistributedNGramScore ( cleanedQueryString , ontologyIri ) ; Set < String > synonymStemmedWords = informationContentService . createStemmedWordSet ( topMatchedSynonym ) ; Set < String > createStemmedWordSet = informationContentService . createStemmedWordSet ( cleanedQueryString ) ; createStemmedWordSet . stream ( ) . filter ( originalWord -> Iterables . contains ( synonymStemmedWords , originalWord ) && weightedWordSimilarity . containsKey ( originalWord ) ) . forEach ( word -> firstMatchedSynonymEntity . set ( COMBINED_SCORE , ( firstMatchedSynonymEntity . getDouble ( COMBINED_SCORE ) + weightedWordSimilarity . get ( word ) ) ) ) ; return firstMatchedSynonymEntity ; } return null ;
public class AbstractLinear { /** * Returns the background image with the currently active backgroundcolor * with the given width and height . * @ param WIDTH * @ param HEIGHT * @ param image * @ return buffered image containing the background with the selected background design */ protected BufferedImage create_BACKGROUND_Image ( final int WIDTH , final int HEIGHT , BufferedImage image ) { } }
if ( WIDTH <= 0 || HEIGHT <= 0 ) { return UTIL . createImage ( 1 , 1 , Transparency . TRANSLUCENT ) ; } if ( image == null ) { image = UTIL . createImage ( WIDTH , HEIGHT , Transparency . TRANSLUCENT ) ; } final Graphics2D G2 = image . createGraphics ( ) ; G2 . setRenderingHint ( RenderingHints . KEY_ANTIALIASING , RenderingHints . VALUE_ANTIALIAS_ON ) ; G2 . setRenderingHint ( RenderingHints . KEY_STROKE_CONTROL , RenderingHints . VALUE_STROKE_NORMALIZE ) ; final int IMAGE_WIDTH = image . getWidth ( ) ; final int IMAGE_HEIGHT = image . getHeight ( ) ; // Draw the background image BACKGROUND_FACTORY . createLinearBackground ( WIDTH , HEIGHT , getBackgroundColor ( ) , getModel ( ) . getCustomBackground ( ) , getModel ( ) . getTextureColor ( ) , image ) ; // Draw the custom layer if selected if ( isCustomLayerVisible ( ) ) { G2 . drawImage ( UTIL . getScaledInstance ( getCustomLayer ( ) , IMAGE_WIDTH , IMAGE_HEIGHT , RenderingHints . VALUE_INTERPOLATION_BICUBIC ) , 0 , 0 , null ) ; } G2 . dispose ( ) ; return image ;
public class StringValue { /** * Sets the contents of this string to the contents of the given < tt > CharBuffer < / tt > . * The characters between the buffer ' s current position ( inclusive ) and the buffer ' s * limit ( exclusive ) will be stored in this string . * @ param buffer The character buffer to read the characters from . */ public void setValue ( CharBuffer buffer ) { } }
checkNotNull ( buffer ) ; final int len = buffer . length ( ) ; ensureSize ( len ) ; buffer . get ( this . value , 0 , len ) ; this . len = len ; this . hashCode = 0 ;
public class CmsAvailabilityDialog { /** * Initializes the values for the notification widgets . < p > */ public void initNotification ( ) { } }
if ( m_dialogContext . getResources ( ) . size ( ) == 1 ) { CmsResource resource = m_dialogContext . getResources ( ) . get ( 0 ) ; try { m_availabilityInfo = getAvailabilityInfo ( A_CmsUI . getCmsObject ( ) , resource ) ; m_initialNotificationInterval = "" + m_availabilityInfo . getNotificationInterval ( ) ; m_initialNotificationEnabled = Boolean . valueOf ( m_availabilityInfo . isNotificationEnabled ( ) ) ; } catch ( CmsLoaderException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } catch ( CmsException e ) { LOG . error ( e . getLocalizedMessage ( ) , e ) ; } }
public class ViewMetadata { /** * < p class = " changed _ added _ 2_2 " > Utility method to determine if the * the provided { @ link UIViewRoot } has metadata . The default implementation will * return true if the provided { @ code UIViewRoot } has a facet * named { @ link UIViewRoot # METADATA _ FACET _ NAME } and that facet has children . * It will return false otherwise . < / p > * @ param root the { @ link UIViewRoot } from which the metadata will * be extracted from * @ return true if the view has metadata , false otherwise . */ public static boolean hasMetadata ( UIViewRoot root ) { } }
boolean result = false ; UIComponent metadataFacet = root . getFacet ( UIViewRoot . METADATA_FACET_NAME ) ; if ( null != metadataFacet ) { result = 0 < metadataFacet . getChildCount ( ) ; } return result ;
public class LdapTemplate { /** * { @ inheritDoc } */ @ Override public < T > List < T > search ( String base , String filter , ContextMapper < T > mapper ) { } }
return search ( base , filter , defaultSearchScope , mapper ) ;
public class JSTypeRegistry { /** * Creates a templatized instance of the specified type . Only ObjectTypes * can currently be templatized ; extend the logic in this function when * more types can be templatized . * @ param baseType the type to be templatized . * @ param templatizedTypes a list of the template JSTypes . Will be matched by * list order to the template keys on the base type . */ public TemplatizedType createTemplatizedType ( ObjectType baseType , JSType ... templatizedTypes ) { } }
return createTemplatizedType ( baseType , ImmutableList . copyOf ( templatizedTypes ) ) ;
public class SimpleQueryEsSink { /** * ( non - Javadoc ) * @ see com . sematext . ag . sink . Sink # write ( com . sematext . ag . Event ) */ @ Override public boolean write ( SimpleSearchEvent event ) { } }
HttpGet httpGet = new HttpGet ( esBaseUrl + ES_QUERY_TEMPLATE . replace ( "${INDEX_NAME}" , indexName ) . replace ( "${QUERY_STRING}" , event . getQueryString ( ) . replace ( " " , "+" ) ) ) ; LOG . info ( "Sending ES search event " + httpGet . getRequestLine ( ) ) ; return execute ( httpGet ) ;
public class Converter { /** * Transforms different objects ( BigDecimal , Integer ) to Integer . * @ param value * The object to transform * @ return * The double value */ public static Integer asInteger ( Object value ) { } }
int intValue ; switch ( value . getClass ( ) . getCanonicalName ( ) ) { case "java.math.BigDecimal" : intValue = ( ( BigDecimal ) value ) . intValue ( ) ; break ; case "java.lang.Boolean" : intValue = ( Boolean ) value ? 1 : 0 ; break ; case "java.lang.Integer" : intValue = ( Integer ) value ; break ; default : intValue = ( Integer ) value ; break ; } return intValue ;
public class PasswordPolicyService { /** * Returns whether the given password matches any of the user ' s previous * passwords . Regardless of the value specified here , the maximum number of * passwords involved in this check depends on how many previous passwords * were actually recorded , which depends on the password policy . * @ param password * The password to check . * @ param username * The username of the user whose history should be compared against * the given password . * @ param historySize * The maximum number of history records to compare the password * against . * @ return * true if the given password matches any of the user ' s previous * passwords , up to the specified limit , false otherwise . */ private boolean matchesPreviousPasswords ( String password , String username , int historySize ) { } }
// No need to compare if no history is relevant if ( historySize <= 0 ) return false ; // Check password against all recorded hashes List < PasswordRecordModel > history = passwordRecordMapper . select ( username , historySize ) ; for ( PasswordRecordModel record : history ) { byte [ ] hash = encryptionService . createPasswordHash ( password , record . getPasswordSalt ( ) ) ; if ( Arrays . equals ( hash , record . getPasswordHash ( ) ) ) return true ; } // No passwords match return false ;
public class DnsClient { /** * 查询域名 * @ param domain 域名参数 * @ return ip 列表 * @ throws IOException 网络异常或者无法解析抛出异常 */ private String [ ] queryInternal ( Domain domain ) throws IOException { } }
Record [ ] records = null ; if ( domain . hostsFirst ) { String [ ] ret = hosts . query ( domain . domain ) ; if ( ret != null && ret . length != 0 ) { return ret ; } } synchronized ( cache ) { if ( Network . isNetworkChanged ( ) ) { cache . clear ( ) ; synchronized ( resolvers ) { index = 0 ; } } else { records = cache . get ( domain . domain ) ; if ( records != null && records . length != 0 ) { if ( ! records [ 0 ] . isExpired ( ) ) { rotate ( records ) ; return records2Ip ( records ) ; } else { records = null ; } } } } IOException lastE = null ; int firstOk = index ; for ( int i = 0 ; i < resolvers . length ; i ++ ) { int pos = ( firstOk + i ) % resolvers . length ; String ip = Network . getIp ( ) ; try { records = resolvers [ pos ] . resolve ( domain ) ; } catch ( DomainNotOwn e ) { continue ; } catch ( IOException e ) { lastE = e ; e . printStackTrace ( ) ; } catch ( Exception e ) { lastE = new IOException ( e ) ; e . printStackTrace ( ) ; } String ip2 = Network . getIp ( ) ; if ( ( records == null || records . length == 0 ) && ip . equals ( ip2 ) ) { synchronized ( resolvers ) { if ( index == firstOk ) { index ++ ; if ( index == resolvers . length ) { index = 0 ; } } } } else { break ; } } if ( records == null || records . length == 0 ) { if ( ! domain . hostsFirst ) { String [ ] rs = hosts . query ( domain . domain ) ; if ( rs != null && rs . length != 0 ) { return rs ; } } if ( lastE != null ) { throw lastE ; } throw new UnknownHostException ( domain . domain ) ; } records = trimCname ( records ) ; if ( records . length == 0 ) { throw new UnknownHostException ( "no A records" ) ; } synchronized ( cache ) { cache . put ( domain . domain , records ) ; } return records2Ip ( records ) ;
public class FutureConverter { /** * Converts Spring 4 { @ link org . springframework . util . concurrent . ListenableFuture } * to Guava { @ link com . google . common . util . concurrent . ListenableFuture } . */ public static < T > com . google . common . util . concurrent . ListenableFuture < T > toGuavaListenableFuture ( ListenableFuture < T > springListenableFuture ) { } }
return GuavaFutureUtils . createListenableFuture ( SpringFutureUtils . createValueSourceFuture ( springListenableFuture ) ) ;
public class CommerceTaxFixedRatePersistenceImpl { /** * Returns the last commerce tax fixed rate in the ordered set where CPTaxCategoryId = & # 63 ; . * @ param CPTaxCategoryId the cp tax category ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the last matching commerce tax fixed rate * @ throws NoSuchTaxFixedRateException if a matching commerce tax fixed rate could not be found */ @ Override public CommerceTaxFixedRate findByCPTaxCategoryId_Last ( long CPTaxCategoryId , OrderByComparator < CommerceTaxFixedRate > orderByComparator ) throws NoSuchTaxFixedRateException { } }
CommerceTaxFixedRate commerceTaxFixedRate = fetchByCPTaxCategoryId_Last ( CPTaxCategoryId , orderByComparator ) ; if ( commerceTaxFixedRate != null ) { return commerceTaxFixedRate ; } StringBundler msg = new StringBundler ( 4 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "CPTaxCategoryId=" ) ; msg . append ( CPTaxCategoryId ) ; msg . append ( "}" ) ; throw new NoSuchTaxFixedRateException ( msg . toString ( ) ) ;
public class FBOGraphics { /** * Bind to the FBO created */ private void bind ( ) { } }
EXTFramebufferObject . glBindFramebufferEXT ( EXTFramebufferObject . GL_FRAMEBUFFER_EXT , FBO ) ; GL11 . glReadBuffer ( EXTFramebufferObject . GL_COLOR_ATTACHMENT0_EXT ) ;
public class KeyVaultClientBaseImpl { /** * Gets the specified deleted storage account . * The Get Deleted Storage Account operation returns the specified deleted storage account along with its attributes . This operation requires the storage / get permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param storageAccountName The name of the storage account . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the DeletedStorageBundle object */ public Observable < DeletedStorageBundle > getDeletedStorageAccountAsync ( String vaultBaseUrl , String storageAccountName ) { } }
return getDeletedStorageAccountWithServiceResponseAsync ( vaultBaseUrl , storageAccountName ) . map ( new Func1 < ServiceResponse < DeletedStorageBundle > , DeletedStorageBundle > ( ) { @ Override public DeletedStorageBundle call ( ServiceResponse < DeletedStorageBundle > response ) { return response . body ( ) ; } } ) ;
public class HandlebarsHelper { /** * Handle invalid helper data without exception details or because none was thrown . * @ param message message to log and return * @ return a message which will be used as content */ protected String handleError ( final String message ) { } }
notifier ( ) . error ( formatMessage ( message ) ) ; return formatMessage ( message ) ;
public class SoyFileSet { /** * Compiles this Soy file set into JS source code files and returns these JS files as a list of * strings , one per file . * < p > TODO ( lukes ) : deprecate and delete localized builds * @ param jsSrcOptions The compilation options for the JS Src output target . * @ param msgBundle The bundle of translated messages , or null to use the messages from the Soy * source . * @ return A list of strings where each string represents the JS source code that belongs in one * JS file . The generated JS files correspond one - to - one to the original Soy source files . * @ throws SoyCompilationException If compilation fails . */ @ SuppressWarnings ( "deprecation" ) public List < String > compileToJsSrc ( SoyJsSrcOptions jsSrcOptions , @ Nullable SoyMsgBundle msgBundle ) { } }
resetErrorReporter ( ) ; // JS has traditionally allowed unknown globals , as a way for soy to reference normal js enums // and constants . For consistency / reusability of templates it would be nice to not allow that // but the cat is out of the bag . PassManager . Builder builder = passManagerBuilder ( ) . allowUnknownGlobals ( ) . allowV1Expression ( ) . desugarHtmlNodes ( false ) ; ParseResult result = parse ( builder ) ; throwIfErrorsPresent ( ) ; TemplateRegistry registry = result . registry ( ) ; SoyFileSetNode fileSet = result . fileSet ( ) ; List < String > generatedSrcs = new JsSrcMain ( scopedData . enterable ( ) , typeRegistry ) . genJsSrc ( fileSet , registry , jsSrcOptions , msgBundle , errorReporter ) ; throwIfErrorsPresent ( ) ; reportWarnings ( ) ; return generatedSrcs ;
public class InApplicationMonitor { /** * This method was intended to register module names with their * current version identifier . * This could / should actually be generalized into an non numeric * state value * @ param name name of the versionized " thing " ( class , module etc . ) * @ param version identifier of the version */ public void registerVersion ( String name , String version ) { } }
Version versionToAdd = new Version ( keyHandler . handle ( name ) , version ) ; getCorePlugin ( ) . registerVersion ( versionToAdd ) ;
public class DirectQuickSelectSketchR { /** * restricted methods */ @ Override long [ ] getCache ( ) { } }
final long lgArrLongs = mem_ . getByte ( LG_ARR_LONGS_BYTE ) & 0XFF ; final int preambleLongs = mem_ . getByte ( PREAMBLE_LONGS_BYTE ) & 0X3F ; final long [ ] cacheArr = new long [ 1 << lgArrLongs ] ; final WritableMemory mem = WritableMemory . wrap ( cacheArr ) ; mem_ . copyTo ( preambleLongs << 3 , mem , 0 , 8 << lgArrLongs ) ; return cacheArr ;
public class EXXAdapters { public static String convertByType ( OrderType orderType ) { } }
return OrderType . BID . equals ( orderType ) ? IConstants . BUY : IConstants . SELL ;
public class DescribeInterconnectsResult { /** * The interconnects . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setInterconnects ( java . util . Collection ) } or { @ link # withInterconnects ( java . util . Collection ) } if you want * to override the existing values . * @ param interconnects * The interconnects . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeInterconnectsResult withInterconnects ( Interconnect ... interconnects ) { } }
if ( this . interconnects == null ) { setInterconnects ( new com . amazonaws . internal . SdkInternalList < Interconnect > ( interconnects . length ) ) ; } for ( Interconnect ele : interconnects ) { this . interconnects . add ( ele ) ; } return this ;
public class SetCache { /** * Returns the cached { @ link UnsortedGrouping } for the given ID . * @ param id Set ID * @ param < T > UnsortedGrouping type * @ return Cached UnsortedGrouping * @ throws IllegalStateException if the cached set is not an UnsortedGrouping */ @ SuppressWarnings ( "unchecked" ) public < T > UnsortedGrouping < T > getUnsortedGrouping ( int id ) { } }
return verifyType ( id , unsortedGroupings . get ( id ) , SetType . UNSORTED_GROUPING ) ;
public class DefaultImportationLinker { /** * Bind the { @ link ImportDeclaration } matching the filter ImportDeclarationFilter . * Check if metadata of the ImportDeclaration match the filter exposed by the { @ link ImporterService } s bound . * If the ImportDeclaration matches the ImporterService filter , link them together . */ @ Bind ( id = "importDeclarations" , specification = ImportDeclaration . class , aggregate = true , optional = true ) void bindImportDeclaration ( ServiceReference < ImportDeclaration > importDeclarationSRef ) { } }
synchronized ( lock ) { declarationsManager . add ( importDeclarationSRef ) ; if ( ! declarationsManager . matched ( importDeclarationSRef ) ) { LOG . debug ( "No service matching was found, ignoring service reference." ) ; return ; } LOG . debug ( linkerName + " : Bind the ImportDeclaration " + declarationsManager . getDeclaration ( importDeclarationSRef ) ) ; declarationsManager . createLinks ( importDeclarationSRef ) ; }
public class CollectionResource { /** * { @ inheritDoc } */ @ Override public Set < HierarchicalProperty > getProperties ( boolean namesOnly ) throws PathNotFoundException , AccessDeniedException , RepositoryException { } }
Set < HierarchicalProperty > props = super . getProperties ( namesOnly ) ; PropertyIterator jcrProps = node . getProperties ( ) ; while ( jcrProps . hasNext ( ) ) { Property property = jcrProps . nextProperty ( ) ; if ( ! COLLECTION_SKIP . contains ( property . getName ( ) ) ) { QName name = namespaceContext . createQName ( property . getName ( ) ) ; try { props . add ( ( namesOnly ) ? new HierarchicalProperty ( name ) : getProperty ( name ) ) ; } catch ( UnsupportedOperationException exc ) { if ( LOG . isDebugEnabled ( ) ) { LOG . error ( exc . getMessage ( ) , exc ) ; } } catch ( ClassCastException exc ) { if ( LOG . isDebugEnabled ( ) ) { LOG . error ( exc . getMessage ( ) , exc ) ; } } catch ( IllegalArgumentException exc ) { if ( LOG . isDebugEnabled ( ) ) { LOG . error ( exc . getMessage ( ) , exc ) ; } } catch ( PathNotFoundException exc ) { if ( LOG . isDebugEnabled ( ) ) { LOG . error ( exc . getMessage ( ) , exc ) ; } } } } return props ;
public class DefaultCommandManager { /** * Create a command group which holds all the given members . * @ param groupId the id to configure the group . * @ param members members to add to the group . * @ param configurer the configurer to use . * @ return a { @ link CommandGroup } which contains all the members . */ @ Override public CommandGroup createCommandGroup ( String groupId , Object [ ] members , CommandConfigurer configurer ) { } }
return createCommandGroup ( groupId , members , false , configurer ) ;
public class X509CertInfo { /** * This routine unmarshals the certificate information . */ private void parse ( DerValue val ) throws CertificateParsingException , IOException { } }
DerInputStream in ; DerValue tmp ; if ( val . tag != DerValue . tag_Sequence ) { throw new CertificateParsingException ( "signed fields invalid" ) ; } rawCertInfo = val . toByteArray ( ) ; in = val . data ; // Version tmp = in . getDerValue ( ) ; if ( tmp . isContextSpecific ( ( byte ) 0 ) ) { version = new CertificateVersion ( tmp ) ; tmp = in . getDerValue ( ) ; } // Serial number . . . an integer serialNum = new CertificateSerialNumber ( tmp ) ; // Algorithm Identifier algId = new CertificateAlgorithmId ( in ) ; // Issuer name issuer = new X500Name ( in ) ; if ( issuer . isEmpty ( ) ) { throw new CertificateParsingException ( "Empty issuer DN not allowed in X509Certificates" ) ; } // validity : SEQUENCE { start date , end date } interval = new CertificateValidity ( in ) ; // subject name subject = new X500Name ( in ) ; if ( ( version . compare ( CertificateVersion . V1 ) == 0 ) && subject . isEmpty ( ) ) { throw new CertificateParsingException ( "Empty subject DN not allowed in v1 certificate" ) ; } // public key pubKey = new CertificateX509Key ( in ) ; // If more data available , make sure version is not v1. if ( in . available ( ) != 0 ) { if ( version . compare ( CertificateVersion . V1 ) == 0 ) { throw new CertificateParsingException ( "no more data allowed for version 1 certificate" ) ; } } else { return ; } // Get the issuerUniqueId if present tmp = in . getDerValue ( ) ; if ( tmp . isContextSpecific ( ( byte ) 1 ) ) { issuerUniqueId = new UniqueIdentity ( tmp ) ; if ( in . available ( ) == 0 ) return ; tmp = in . getDerValue ( ) ; } // Get the subjectUniqueId if present . if ( tmp . isContextSpecific ( ( byte ) 2 ) ) { subjectUniqueId = new UniqueIdentity ( tmp ) ; if ( in . available ( ) == 0 ) return ; tmp = in . getDerValue ( ) ; } // Get the extensions . if ( version . compare ( CertificateVersion . V3 ) != 0 ) { throw new CertificateParsingException ( "Extensions not allowed in v2 certificate" ) ; } if ( tmp . isConstructed ( ) && tmp . isContextSpecific ( ( byte ) 3 ) ) { extensions = new CertificateExtensions ( tmp . data ) ; } // verify X . 509 V3 Certificate verifyCert ( subject , extensions ) ;
public class Broadcaster { /** * An S3 . m3u8 upload completed . * Called on a background thread */ private void onManifestUploaded ( S3UploadEvent uploadEvent ) { } }
if ( mDeleteAfterUploading ) { if ( VERBOSE ) Log . i ( TAG , "Deleting " + uploadEvent . getFile ( ) . getAbsolutePath ( ) ) ; uploadEvent . getFile ( ) . delete ( ) ; String uploadUrl = uploadEvent . getDestinationUrl ( ) ; if ( uploadUrl . substring ( uploadUrl . lastIndexOf ( File . separator ) + 1 ) . equals ( "vod.m3u8" ) ) { if ( VERBOSE ) Log . i ( TAG , "Deleting " + mConfig . getOutputDirectory ( ) ) ; mFileObserver . stopWatching ( ) ; FileUtils . deleteDirectory ( mConfig . getOutputDirectory ( ) ) ; } } if ( ! mSentBroadcastLiveEvent ) { mEventBus . post ( new BroadcastIsLiveEvent ( ( ( HlsStream ) mStream ) . getKickflipUrl ( ) ) ) ; mSentBroadcastLiveEvent = true ; if ( mBroadcastListener != null ) mBroadcastListener . onBroadcastLive ( mStream ) ; }
public class Range { /** * Checks whether this range is before the specified element . * @ param element the element to check for , null returns false * @ return true if this range is entirely before the specified element */ public boolean isBefore ( T element ) { } }
if ( element == null ) { return false ; } return comparator . compare ( element , max ) > 0 ;
public class ConfigurationsInner { /** * Configures the HTTP settings on the specified cluster . This API is deprecated , please use UpdateGatewaySettings in cluster endpoint instead . * @ param resourceGroupName The name of the resource group . * @ param clusterName The name of the cluster . * @ param configurationName The name of the cluster configuration . * @ param parameters The cluster configurations . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the { @ link ServiceResponse } object if successful . */ public Observable < Void > beginUpdateAsync ( String resourceGroupName , String clusterName , String configurationName , Map < String , String > parameters ) { } }
return beginUpdateWithServiceResponseAsync ( resourceGroupName , clusterName , configurationName , parameters ) . map ( new Func1 < ServiceResponse < Void > , Void > ( ) { @ Override public Void call ( ServiceResponse < Void > response ) { return response . body ( ) ; } } ) ;
public class PiwikRequest { /** * Set a stored parameter . * @ param key the parameter ' s key * @ param value the parameter ' s value . Removes the parameter if null */ private void setParameter ( String key , Object value ) { } }
if ( value == null ) { parameters . remove ( key ) ; } else { parameters . put ( key , value ) ; }
public class TimeoutManager { public synchronized void start ( ) { } }
if ( thread == null ) { thread = new Thread ( this , "Lasta_Di-TimeoutManager" ) ; thread . setDaemon ( true ) ; thread . start ( ) ; }
public class RoseFilter { /** * 实现 { @ link GenericFilterBean # initFilterBean ( ) } , 对 Rose 进行初始化 */ @ Override protected final void initFilterBean ( ) throws ServletException { } }
try { long startTime = System . currentTimeMillis ( ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "[init] call 'init/rootContext'" ) ; } if ( logger . isDebugEnabled ( ) ) { StringBuilder sb = new StringBuilder ( ) ; @ SuppressWarnings ( "unchecked" ) Enumeration < String > iter = getFilterConfig ( ) . getInitParameterNames ( ) ; while ( iter . hasMoreElements ( ) ) { String name = ( String ) iter . nextElement ( ) ; sb . append ( name ) . append ( "='" ) . append ( getFilterConfig ( ) . getInitParameter ( name ) ) . append ( "'\n" ) ; } logger . debug ( "[init] parameters: " + sb ) ; } WebApplicationContext rootContext = prepareRootApplicationContext ( ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "[init] exits from 'init/rootContext'" ) ; logger . info ( "[init] call 'init/module'" ) ; } // 识别 Rose 程序模块 this . modules = prepareModules ( rootContext ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "[init] exits from 'init/module'" ) ; logger . info ( "[init] call 'init/mappingTree'" ) ; } // 创建匹配树以及各个结点的上的执行逻辑 ( Engine ) this . mappingTree = prepareMappingTree ( modules ) ; if ( logger . isInfoEnabled ( ) ) { logger . info ( "[init] exits from 'init/mappingTree'" ) ; logger . info ( "[init] exits from 'init'" ) ; } long endTime = System . currentTimeMillis ( ) ; // 打印启动信息 printRoseInfos ( endTime - startTime ) ; } catch ( final Throwable e ) { StringBuilder sb = new StringBuilder ( 1024 ) ; sb . append ( "[Rose-" ) . append ( RoseVersion . getVersion ( ) ) ; sb . append ( "@Spring-" ) . append ( SpringVersion . getVersion ( ) ) . append ( "]:" ) ; sb . append ( e . getMessage ( ) ) ; logger . error ( sb . toString ( ) , e ) ; throw new NestedServletException ( sb . toString ( ) , e ) ; }
public class AbstractFuture { /** * Subclasses should invoke this method to set the result of the computation * to an error , { @ code throwable } . This will set the state of the future to * { @ link AbstractFuture . Sync # COMPLETED } and invoke the listeners if the * state was successfully changed . * @ param throwable the exception that the task failed with . * @ return true if the state was successfully changed . */ protected boolean setException ( Throwable throwable ) { } }
boolean result = sync . setException ( checkNotNull ( throwable ) ) ; if ( result ) { executionList . execute ( ) ; } return result ;
public class DatePropertyParser { /** * Try to parse source string as a ISO8601 date . * @ param source * @ return null if unable to parse */ public static Date parseISO8601DateString ( String source ) { } }
final String [ ] supportedFormats = new String [ ] { "yyyy-MM-dd'T'HH:mm:ss.SSSXXX" , "yyyy-MM-dd'T'HH:mm:ssXXX" , "yyyy-MM-dd'T'HH:mm:ssZ" , "yyyy-MM-dd'T'HH:mm:ss.SSSZ" } ; // SimpleDateFormat is not fully ISO8601 compatible , so we replace ' Z ' by + 0000 if ( StringUtils . contains ( source , "Z" ) ) { source = StringUtils . replace ( source , "Z" , "+0000" ) ; } Date parsedDate = null ; for ( final String format : supportedFormats ) { try { parsedDate = new SimpleDateFormat ( format ) . parse ( source ) ; } catch ( ParseException pe ) { } if ( parsedDate != null ) { return parsedDate ; } } return null ;
public class RetriableStream { /** * Adds grpc - previous - rpc - attempts in the headers of a retry / hedging RPC . */ @ VisibleForTesting final Metadata updateHeaders ( Metadata originalHeaders , int previousAttemptCount ) { } }
Metadata newHeaders = new Metadata ( ) ; newHeaders . merge ( originalHeaders ) ; if ( previousAttemptCount > 0 ) { newHeaders . put ( GRPC_PREVIOUS_RPC_ATTEMPTS , String . valueOf ( previousAttemptCount ) ) ; } return newHeaders ;
public class ViewUtils { /** * Convert the pixels to dips , based on density scale * @ param windowManager the window manager of the display to use the scale density of . * @ param pixel to be converted value . * @ return converted value ( dip ) . */ public static float pixelToDip ( WindowManager windowManager , int pixel ) { } }
DisplayMetrics metrics = new DisplayMetrics ( ) ; windowManager . getDefaultDisplay ( ) . getMetrics ( metrics ) ; return metrics . scaledDensity * pixel ;
public class ProximityTracker { /** * Computes the geometric distance between the supplied two points . */ public static int distance ( int x1 , int y1 , int x2 , int y2 ) { } }
int dx = x1 - x2 , dy = y1 - y2 ; return ( int ) Math . sqrt ( dx * dx + dy * dy ) ;
public class DumpProcessingController { /** * Stores a registered processor object in a map of processors . Used * internally to keep { @ link EntityDocumentProcessor } and * { @ link MwRevisionProcessor } objects . * @ param processor * the processor object to register * @ param model * the content model that the processor is registered for ; it * will only be notified of revisions in that model ; if null is * given , all revisions will be processed whatever their model * @ param onlyCurrentRevisions * if true , then the subscriber is only notified of the most * current revisions ; if false , then it will receive all * revisions , current or not * @ param processors * the map of lists of processors to store the processor in */ private < T > void registerProcessor ( T processor , String model , boolean onlyCurrentRevisions , Map < ListenerRegistration , List < T > > processors ) { } }
this . preferCurrent = this . preferCurrent && onlyCurrentRevisions ; ListenerRegistration listenerRegistration = new ListenerRegistration ( model , onlyCurrentRevisions ) ; if ( ! processors . containsKey ( listenerRegistration ) ) { processors . put ( listenerRegistration , new ArrayList < > ( ) ) ; } processors . get ( listenerRegistration ) . add ( processor ) ;
public class SetUtils { /** * Determines the intersection of a collection of sets . * @ param < T > * @ param sets * Basic collection of sets . * @ return The set of common elements of all given sets . */ public static < T > Set < T > intersection ( Set < T > ... sets ) { } }
return intersection ( Arrays . asList ( sets ) ) ;
public class Link { /** * Create a Builder instance and initialize it from a prototype Link . * @ param prototype the prototype link * @ return a Builder for a Link . * @ since 0.1.0 */ public static Builder copyOf ( final Link prototype ) { } }
return new Builder ( prototype . rel , prototype . href ) . withType ( prototype . type ) . withProfile ( prototype . profile ) . withTitle ( prototype . title ) . withName ( prototype . name ) . withDeprecation ( prototype . deprecation ) . withHrefLang ( prototype . hreflang ) ;
public class ClassAccessor { /** * Returns an { @ link ObjectAccessor } for an instance of T where all the * fields are initialized to their default values . I . e . , 0 for ints , and * null for objects ( except when the field is marked with a NonNull * annotation ) . * @ param enclosingType Describes the type that contains this object as a * field , to determine any generic parameters it may * contain . * @ param nonnullFields Fields which are not allowed to be set to null . * @ param annotationCache To check for any NonNull annotations . * @ return An { @ link ObjectAccessor } for an instance of T where all the * fields are initialized to their default values . */ public ObjectAccessor < T > getDefaultValuesAccessor ( TypeTag enclosingType , Set < String > nonnullFields , AnnotationCache annotationCache ) { } }
ObjectAccessor < T > result = buildObjectAccessor ( ) ; for ( Field field : FieldIterable . of ( type ) ) { if ( NonnullAnnotationVerifier . fieldIsNonnull ( field , annotationCache ) || nonnullFields . contains ( field . getName ( ) ) ) { FieldAccessor accessor = result . fieldAccessorFor ( field ) ; accessor . changeField ( prefabValues , enclosingType ) ; } } return result ;
public class SparkCommandExample { /** * An Example of submitting Spark Command as a Scala program . * Similarly , we can submit Spark Command as a SQL query , R program * and Java program . */ private static void submitScalaProgram ( QdsClient client ) throws Exception { } }
String sampleProgram = "println(\"hello world\")" ; SparkCommandBuilder sparkBuilder = client . command ( ) . spark ( ) ; // Give a name to the command . ( Optional ) sparkBuilder . name ( "spark-scala-test" ) ; // Setting the program here sparkBuilder . program ( sampleProgram ) ; // setting the language here sparkBuilder . language ( "scala" ) ; CommandResponse commandResponse = sparkBuilder . invoke ( ) . get ( ) ; ResultLatch resultLatch = new ResultLatch ( client , commandResponse . getId ( ) ) ; ResultValue resultValue = resultLatch . awaitResult ( ) ; System . out . println ( resultValue . getResults ( ) ) ; String s = client . command ( ) . logs ( "" + commandResponse . getId ( ) ) . invoke ( ) . get ( ) ; System . err . println ( s ) ;
public class JSRepeated { /** * Set the bounds ( default is { 0 , unbounded } ) . Use maxOccurs = - 1 to indicate * " unbounded . " */ public void setBounds ( int minOccurs , int maxOccurs ) { } }
if ( minOccurs < 0 || maxOccurs < - 1 ) throw new IllegalArgumentException ( "Bounds cannot be negative" ) ; else if ( maxOccurs > 0 && minOccurs > maxOccurs ) throw new IllegalArgumentException ( "Minimum bounds less than maximum bounds" ) ; limits = new int [ ] { minOccurs , maxOccurs } ;
public class BinaryAnnotationMappingDeriver { /** * Creates a mapping result from supplied binary annotations . * @ param binaryAnnotations binary annotations of span * @ return mapping result */ public MappingResult mappingResult ( List < BinaryAnnotation > binaryAnnotations ) { } }
if ( binaryAnnotations == null ) { return new MappingResult ( ) ; } List < String > componentTypes = new ArrayList < > ( ) ; List < String > endpointTypes = new ArrayList < > ( ) ; MappingResult . Builder mappingBuilder = MappingResult . builder ( ) ; for ( BinaryAnnotation binaryAnnotation : binaryAnnotations ) { if ( binaryAnnotation . getKey ( ) == null ) { continue ; } BinaryAnnotationMapping mapping = mappingStorage . getKeyBasedMappings ( ) . get ( binaryAnnotation . getKey ( ) ) ; if ( mapping != null && mapping . isIgnore ( ) ) { continue ; } if ( mapping == null || mapping . getProperty ( ) == null ) { // If no mapping , then just store property mappingBuilder . addProperty ( new Property ( binaryAnnotation . getKey ( ) , binaryAnnotation . getValue ( ) , AnnotationTypeUtil . toPropertyType ( binaryAnnotation . getType ( ) ) ) ) ; } if ( mapping != null ) { if ( mapping . getComponentType ( ) != null ) { componentTypes . add ( mapping . getComponentType ( ) ) ; } if ( mapping . getEndpointType ( ) != null ) { endpointTypes . add ( mapping . getEndpointType ( ) ) ; } if ( mapping . getProperty ( ) != null && ! mapping . getProperty ( ) . isExclude ( ) ) { String key = mapping . getProperty ( ) . getKey ( ) != null ? mapping . getProperty ( ) . getKey ( ) : binaryAnnotation . getKey ( ) ; mappingBuilder . addProperty ( new Property ( key , binaryAnnotation . getValue ( ) , AnnotationTypeUtil . toPropertyType ( binaryAnnotation . getType ( ) ) ) ) ; } } } if ( ! componentTypes . isEmpty ( ) ) { mappingBuilder . withComponentType ( componentTypes . get ( 0 ) ) ; } if ( ! endpointTypes . isEmpty ( ) ) { mappingBuilder . withEndpointType ( endpointTypes . get ( 0 ) ) ; } return mappingBuilder . build ( ) ;
public class InjectionPointImpl { /** * / * ( non - Javadoc ) * @ see org . jboss . arquillian . api . InjectionPoint # set ( org . jboss . arquillian . api . Instance ) */ @ Override public void set ( Instance < ? > value ) throws InvocationException { } }
try { if ( ! field . isAccessible ( ) ) { field . setAccessible ( true ) ; } field . set ( target , value ) ; } catch ( Exception e ) { throw new InvocationException ( e . getCause ( ) ) ; }
public class BackupLongTermRetentionPoliciesInner { /** * Creates or updates a database backup long term retention policy . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database * @ param parameters The required parameters to update a backup long term retention policy * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the BackupLongTermRetentionPolicyInner object */ public Observable < BackupLongTermRetentionPolicyInner > beginCreateOrUpdateAsync ( String resourceGroupName , String serverName , String databaseName , BackupLongTermRetentionPolicyInner parameters ) { } }
return beginCreateOrUpdateWithServiceResponseAsync ( resourceGroupName , serverName , databaseName , parameters ) . map ( new Func1 < ServiceResponse < BackupLongTermRetentionPolicyInner > , BackupLongTermRetentionPolicyInner > ( ) { @ Override public BackupLongTermRetentionPolicyInner call ( ServiceResponse < BackupLongTermRetentionPolicyInner > response ) { return response . body ( ) ; } } ) ;
public class JsMessageFactoryImpl { /** * Extract the class name from the buffer containing the ( first part of ) a restored value . * @ param buffer The buffer * @ param offset The offset of the classname ' s encoded bytes in the buffer * @ param length The length of the classname ' s encoded bytes * @ return String The class name of the message to restore * @ exception This method will throw runtime exceptions if the length is 0 , the * buffer isn ' t long enough , etc etc etc . * The caller will catch and FFDC it , as the data to be FFDC depends * on the caller . */ private final static String getClassName ( byte [ ] buffer , int offset , int length ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getClassName" , new Object [ ] { offset , length } ) ; // If the classname has a length of 0 then we ' ve been given rubbish so pack it in now if ( length == 0 ) { throw new IllegalArgumentException ( "Invalid buffer: classname length = 0" ) ; } // The classname should be in UTF8 , if that fails FFDC and default in the hope of carrying on . String className ; try { className = new String ( buffer , offset , length , "UTF8" ) ; // the class name itself } catch ( UnsupportedEncodingException e ) { FFDCFilter . processException ( e , "com.ibm.ws.sib.mfp.impl.JsMessageFactoryImpl.getClassName" , "644" ) ; className = new String ( buffer , offset , length ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "getClassName" , className ) ; return className ;
public class BlockBox { /** * Returns true if the element displays at least something */ @ Override public boolean affectsDisplay ( ) { } }
boolean ret = containsFlow ( ) ; // non - zero top or left border if ( border . top > 0 || border . bottom > 0 ) ret = true ; // the same with padding if ( padding . top > 0 || padding . bottom > 0 ) ret = true ; return ret ;
public class StorePackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EClass getDensityCollection ( ) { } }
if ( densityCollectionEClass == null ) { densityCollectionEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( StorePackage . eNS_URI ) . getEClassifiers ( ) . get ( 120 ) ; } return densityCollectionEClass ;
public class BccClient { /** * authorizing a security group rule to a specified security group * @ param request The request containing all options for authorizing security group rule */ public void authorizeSecurityGroupRule ( SecurityGroupRuleOperateRequest request ) { } }
checkNotNull ( request , "request should not be null." ) ; checkStringNotEmpty ( request . getSecurityGroupId ( ) , "securityGroupId should not be empty." ) ; if ( Strings . isNullOrEmpty ( request . getClientToken ( ) ) ) { request . setClientToken ( this . generateClientToken ( ) ) ; } if ( null == request . getRule ( ) ) { throw new IllegalArgumentException ( "request rule should not be null" ) ; } InternalRequest internalRequest = this . createRequest ( request , HttpMethodName . PUT , SECURITYGROUP_PREFIX , request . getSecurityGroupId ( ) ) ; internalRequest . addParameter ( "authorizeRule" , null ) ; internalRequest . addParameter ( "clientToken" , request . getClientToken ( ) ) ; fillPayload ( internalRequest , request ) ; invokeHttpClient ( internalRequest , AbstractBceResponse . class ) ;
public class CPDefinitionVirtualSettingUtil { /** * Returns the first cp definition virtual setting in the ordered set where uuid = & # 63 ; and companyId = & # 63 ; . * @ param uuid the uuid * @ param companyId the company ID * @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > ) * @ return the first matching cp definition virtual setting , or < code > null < / code > if a matching cp definition virtual setting could not be found */ public static CPDefinitionVirtualSetting fetchByUuid_C_First ( String uuid , long companyId , OrderByComparator < CPDefinitionVirtualSetting > orderByComparator ) { } }
return getPersistence ( ) . fetchByUuid_C_First ( uuid , companyId , orderByComparator ) ;
public class BasicOperationsBenchmark { /** * Encode a span using binary format . */ @ Benchmark @ BenchmarkMode ( Mode . AverageTime ) @ OutputTimeUnit ( TimeUnit . NANOSECONDS ) public byte [ ] encodeSpanBinary ( Data data ) { } }
return data . propagation . getBinaryFormat ( ) . toByteArray ( data . spanToEncode . getContext ( ) ) ;
public class BatchStore { /** * - - - - - process methods */ @ Process ( actionType = InitBatch . class ) public void init ( final Dispatcher . Channel channel ) { } }
List < ModelNode > steps = new ArrayList < > ( ) ; steps . add ( readResourceOp ( BATCH_ADDRESS ) ) ; steps . add ( readResourceOp ( THREAD_POOL_ADDRESS ) ) ; steps . add ( readResourceOp ( JOB_REPOSITORY_ADDRESS ) ) ; steps . add ( readThreadFactoriesOp ( ) ) ; final ModelNode comp = new ModelNode ( ) ; comp . get ( ADDRESS ) . setEmptyList ( ) ; comp . get ( OP ) . set ( COMPOSITE ) ; comp . get ( STEPS ) . set ( steps ) ; dispatcher . execute ( new DMRAction ( comp ) , new AsyncCallback < DMRResponse > ( ) { @ Override public void onFailure ( Throwable caught ) { channel . nack ( caught ) ; } @ Override public void onSuccess ( DMRResponse dmrResponse ) { ModelNode response = dmrResponse . get ( ) ; if ( response . isFailure ( ) ) { channel . nack ( new RuntimeException ( "Failed to initialize batch store using " + comp + ": " + response . getFailureDescription ( ) ) ) ; } else { ModelNode result = response . get ( RESULT ) ; ModelNode stepResult = result . get ( "step-1" ) ; if ( stepResult . get ( RESULT ) . isDefined ( ) ) { batch = stepResult . get ( RESULT ) ; } stepResult = result . get ( "step-2" ) ; if ( stepResult . get ( RESULT ) . isDefined ( ) ) { threadPool = stepResult . get ( RESULT ) ; } stepResult = result . get ( "step-3" ) ; if ( stepResult . get ( RESULT ) . isDefined ( ) ) { jobRepository = stepResult . get ( RESULT ) ; } stepResult = result . get ( "step-4" ) ; if ( stepResult . get ( RESULT ) . isDefined ( ) ) { threadFactories . clear ( ) ; threadFactories . addAll ( stepResult . get ( RESULT ) . asPropertyList ( ) ) ; } channel . ack ( ) ; } } } ) ;
public class ListPrincipalsResult { /** * The principals . * @ param principals * The principals . */ public void setPrincipals ( java . util . Collection < Principal > principals ) { } }
if ( principals == null ) { this . principals = null ; return ; } this . principals = new java . util . ArrayList < Principal > ( principals ) ;
public class SpanOperationsBenchmark { /** * Add an annotation as description only . */ @ Benchmark @ BenchmarkMode ( Mode . AverageTime ) @ OutputTimeUnit ( TimeUnit . NANOSECONDS ) public Span addAnnotationEmpty ( Data data ) { } }
Span span = data . annotationSpanEmpty ; span . addAnnotation ( ANNOTATION_DESCRIPTION ) ; return span ;
public class ListWidget { /** * Get all views from the list content * @ return list of views currently visible */ public List < Widget > getAllViews ( ) { } }
List < Widget > views = new ArrayList < > ( ) ; for ( Widget child : mContent . getChildren ( ) ) { Widget item = ( ( ListItemHostWidget ) child ) . getGuest ( ) ; if ( item != null ) { views . add ( item ) ; } } return views ;
public class Group { /** * command _ inout _ reply */ public GroupCmdReplyList command_inout_reply ( final int rid , final int tmo ) throws DevFailed { } }
final Integer rid_obj = new Integer ( rid ) ; final Boolean fwd = ( Boolean ) arp . get ( rid_obj ) ; if ( fwd == null ) { Except . throw_exception ( "API_BadAsynPollId" , "Invalid asynch. request identifier specified" , "Group.command_inout_reply" ) ; } arp . remove ( rid_obj ) ; return command_inout_reply_i ( rid , tmo , fwd . booleanValue ( ) ) ;
public class QrPose3DUtils { /** * Specifies transform from pixel to normalize image coordinates */ public void setLensDistortion ( Point2Transform2_F64 pixelToNorm , Point2Transform2_F64 undistToDist ) { } }
if ( pixelToNorm == null ) { this . pixelToNorm = new DoNothing2Transform2_F64 ( ) ; this . undistToDist = new DoNothing2Transform2_F64 ( ) ; } else { this . pixelToNorm = pixelToNorm ; this . undistToDist = undistToDist ; }
public class HadoopSecurityManager_H_2_0 { /** * function to fetch hcat token as per the specified hive configuration and then store the token * in to the credential store specified . * @ param userToProxy String value indicating the name of the user the token will be fetched for . * @ param hiveConf the configuration based off which the hive client will be initialized . * @ param logger the logger instance which writes the logging content to the job logs . */ private Token < DelegationTokenIdentifier > fetchHcatToken ( final String userToProxy , final HiveConf hiveConf , final String tokenSignatureOverwrite , final Logger logger ) throws IOException , MetaException , TException { } }
logger . info ( HiveConf . ConfVars . METASTOREURIS . varname + ": " + hiveConf . get ( HiveConf . ConfVars . METASTOREURIS . varname ) ) ; logger . info ( HiveConf . ConfVars . METASTORE_USE_THRIFT_SASL . varname + ": " + hiveConf . get ( HiveConf . ConfVars . METASTORE_USE_THRIFT_SASL . varname ) ) ; logger . info ( HiveConf . ConfVars . METASTORE_KERBEROS_PRINCIPAL . varname + ": " + hiveConf . get ( HiveConf . ConfVars . METASTORE_KERBEROS_PRINCIPAL . varname ) ) ; final IMetaStoreClient hiveClient = createRetryingMetaStoreClient ( hiveConf ) ; final String hcatTokenStr = hiveClient . getDelegationToken ( userToProxy , UserGroupInformation . getLoginUser ( ) . getShortUserName ( ) ) ; final Token < DelegationTokenIdentifier > hcatToken = new Token < > ( ) ; hcatToken . decodeFromUrlString ( hcatTokenStr ) ; // overwrite the value of the service property of the token if the signature // override is specified . // If the service field is set , do not overwrite that if ( hcatToken . getService ( ) . getLength ( ) <= 0 && tokenSignatureOverwrite != null && tokenSignatureOverwrite . trim ( ) . length ( ) > 0 ) { hcatToken . setService ( new Text ( tokenSignatureOverwrite . trim ( ) . toLowerCase ( ) ) ) ; logger . info ( HIVE_TOKEN_SIGNATURE_KEY + ":" + ( tokenSignatureOverwrite == null ? "" : tokenSignatureOverwrite ) ) ; } logger . info ( "Created hive metastore token." ) ; logger . info ( "Token kind: " + hcatToken . getKind ( ) ) ; logger . info ( "Token service: " + hcatToken . getService ( ) ) ; return hcatToken ;
public class OnePhaseResourceImpl { /** * Prepare a transaction . * < p > This is the first phase of the two - phase commit protocol . * @ return * @ exception XAException * @ exception SystemException */ public final int prepare ( ) throws XAException { } }
if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "prepare" , _resource ) ; // The underlying adapter OnePhaseXAResource will throw an // exception with FFDC specific to the wrappered RM . try { _resource . prepare ( _xid ) ; } finally { if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "prepare" ) ; } // If the resource hasn ' t thrown its own // exception throw one here . throw new XAException ( XAException . XA_RBPROTO ) ;
public class MediaEndpoint { /** * Creates the endpoint ( RTP or WebRTC ) and any other additional elements ( if * needed ) . * @ param endpointLatch */ protected void internalEndpointInitialization ( final CountDownLatch endpointLatch ) { } }
if ( this . isWeb ( ) ) { WebRtcEndpoint . Builder builder = new WebRtcEndpoint . Builder ( pipeline ) ; /* * if ( this . dataChannels ) { builder . useDataChannels ( ) ; } */ builder . buildAsync ( new Continuation < WebRtcEndpoint > ( ) { @ Override public void onSuccess ( WebRtcEndpoint result ) throws Exception { webEndpoint = result ; webEndpoint . setMaxVideoRecvBandwidth ( maxRecvKbps ) ; webEndpoint . setMinVideoRecvBandwidth ( minRecvKbps ) ; webEndpoint . setMaxVideoSendBandwidth ( maxSendKbps ) ; webEndpoint . setMinVideoSendBandwidth ( minSendKbps ) ; endpointLatch . countDown ( ) ; log . trace ( "EP {}: Created a new WebRtcEndpoint" , endpointName ) ; endpointSubscription = registerElemErrListener ( webEndpoint ) ; } @ Override public void onError ( Throwable cause ) throws Exception { endpointLatch . countDown ( ) ; log . error ( "EP {}: Failed to create a new WebRtcEndpoint" , endpointName , cause ) ; } } ) ; } else { new RtpEndpoint . Builder ( pipeline ) . buildAsync ( new Continuation < RtpEndpoint > ( ) { @ Override public void onSuccess ( RtpEndpoint result ) throws Exception { endpoint = result ; endpointLatch . countDown ( ) ; log . trace ( "EP {}: Created a new RtpEndpoint" , endpointName ) ; endpointSubscription = registerElemErrListener ( endpoint ) ; } @ Override public void onError ( Throwable cause ) throws Exception { endpointLatch . countDown ( ) ; log . error ( "EP {}: Failed to create a new RtpEndpoint" , endpointName , cause ) ; } } ) ; }
public class MtasTokenCollection { /** * Prints the . * @ throws MtasParserException the mtas parser exception */ public void print ( ) throws MtasParserException { } }
Iterator < MtasToken > it = this . iterator ( ) ; while ( it . hasNext ( ) ) { MtasToken token = it . next ( ) ; System . out . println ( token ) ; }
public class DescribeScheduledInstancesRequest { /** * The Scheduled Instance IDs . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setScheduledInstanceIds ( java . util . Collection ) } or { @ link # withScheduledInstanceIds ( java . util . Collection ) } * if you want to override the existing values . * @ param scheduledInstanceIds * The Scheduled Instance IDs . * @ return Returns a reference to this object so that method calls can be chained together . */ public DescribeScheduledInstancesRequest withScheduledInstanceIds ( String ... scheduledInstanceIds ) { } }
if ( this . scheduledInstanceIds == null ) { setScheduledInstanceIds ( new com . amazonaws . internal . SdkInternalList < String > ( scheduledInstanceIds . length ) ) ; } for ( String ele : scheduledInstanceIds ) { this . scheduledInstanceIds . add ( ele ) ; } return this ;
public class ServerBuilder { /** * Adds a new { @ link ServerPort } that listens to the specified { @ code port } of all available network * interfaces using the specified protocol . * @ deprecated Use { @ link # http ( int ) } or { @ link # https ( int ) } . * @ see < a href = " # no _ port _ specified " > What happens if no HTTP ( S ) port is specified ? < / a > */ @ Deprecated public ServerBuilder port ( int port , String protocol ) { } }
return port ( port , SessionProtocol . of ( requireNonNull ( protocol , "protocol" ) ) ) ;
public class HazardCurve { /** * Create a discount curve from given times and given zero rates using default interpolation and extrapolation methods . * The discount factor is determined by * < code > * givenSurvivalProbabilities [ timeIndex ] = givenSurvivalProbabilities [ timeIndex - 1 ] * Math . exp ( - givenHazardRates [ timeIndex ] * ( times [ timeIndex ] - times [ timeIndex - 1 ] ) ) ; * < / code > * @ param name The name of this discount curve . * @ param times Array of times as doubles . * @ param givenHazardRates Array of corresponding zero rates . * @ return A new discount factor object . */ public static HazardCurve createHazardCurveFromHazardRate ( String name , double [ ] times , double [ ] givenHazardRates ) { } }
double [ ] givenSurvivalProbabilities = new double [ givenHazardRates . length ] ; if ( givenHazardRates [ 0 ] < 0 ) { throw new IllegalArgumentException ( "First hazard rate is not positive" ) ; } // initialize the term structure givenSurvivalProbabilities [ 0 ] = Math . exp ( - givenHazardRates [ 0 ] * times [ 0 ] ) ; /* * Construct the hazard curve by numerically integrating the hazard rates . * At each step check if the input hazard rate is positive . */ for ( int timeIndex = 1 ; timeIndex < times . length ; timeIndex ++ ) { if ( givenHazardRates [ timeIndex ] < 0 ) { throw new IllegalArgumentException ( "The " + timeIndex + "-th hazard rate is not positive" ) ; } givenSurvivalProbabilities [ timeIndex ] = givenSurvivalProbabilities [ timeIndex - 1 ] * Math . exp ( - givenHazardRates [ timeIndex ] * ( times [ timeIndex ] - times [ timeIndex - 1 ] ) ) ; } return createHazardCurveFromSurvivalProbabilities ( name , times , givenSurvivalProbabilities ) ;
public class Seconds { /** * Obtains a { @ code Seconds } representing the number of seconds * equivalent to a number of hours . * The resulting amount will be second - based , with the number of seconds * equal to the number of hours multiplied by 3600. * @ param hours the number of hours , positive or negative * @ return the amount with the input hours converted to seconds , not null * @ throws ArithmeticException if numeric overflow occurs */ public static Seconds ofHours ( int hours ) { } }
if ( hours == 0 ) { return ZERO ; } return new Seconds ( Math . multiplyExact ( hours , SECONDS_PER_HOUR ) ) ;
public class FileInfo { /** * Converts the given path to look like a directory path . * If the path already looks like a directory path then * this call is a no - op . * @ param path Path to convert . * @ return Directory path for the given path . */ public static URI convertToDirectoryPath ( PathCodec pathCodec , URI path ) { } }
StorageResourceId resourceId = pathCodec . validatePathAndGetId ( path , true ) ; if ( resourceId . isStorageObject ( ) ) { if ( ! objectHasDirectoryPath ( resourceId . getObjectName ( ) ) ) { resourceId = convertToDirectoryPath ( resourceId ) ; path = pathCodec . getPath ( resourceId . getBucketName ( ) , resourceId . getObjectName ( ) , false /* allow empty name */ ) ; } } return path ;
public class ControlBrowseStatusImpl { /** * Get summary trace line for this message * Javadoc description supplied by ControlMessage interface . */ public void getTraceSummaryLine ( StringBuilder buff ) { } }
// Get the common fields for control messages super . getTraceSummaryLine ( buff ) ; buff . append ( ",browseID=" ) ; buff . append ( getBrowseID ( ) ) ; buff . append ( ",status=" ) ; buff . append ( getStatus ( ) ) ;
public class TimeUtil { /** * 格式化日期 * < p > Function : formatDate < / p > * < p > Description : < / p > * @ param timeType * @ param date * @ return * @ author acexy @ thankjava . com * @ date 2015年6月18日 上午10:01:09 * @ version 1.0 */ public static String formatDate ( TimeType timeType , Date date ) { } }
return getDateFormat ( timeType ) . format ( date ) ;
public class Transform1D { /** * Translate . * < p > If the given < var > path < / var > contains only one segment , * the transformation will follow the segment ' s direction . * @ param thePath the path to follow . * @ param move where < code > x < / code > is the curviline coordinate and < code > y < / code > is the shift coordinate . */ @ Inline ( value = "translate($1, null, $2)" ) public void translate ( List < ? extends S > thePath , Tuple2D < ? > move ) { } }
translate ( thePath , null , move ) ;
public class LinearEquationSystem { /** * solves linear system with the chosen method * @ param method the pivot search method */ private void solve ( int method ) throws NullPointerException { } }
// solution exists if ( solved ) { return ; } // bring in reduced row echelon form if ( ! reducedRowEchelonForm ) { reducedRowEchelonForm ( method ) ; } if ( ! isSolvable ( method ) ) { if ( LOG . isDebugging ( ) ) { LOG . debugFine ( "Equation system is not solvable!" ) ; } return ; } // compute one special solution final int cols = coeff [ 0 ] . length ; int numbound = 0 , numfree = 0 ; int [ ] boundIndices = new int [ cols ] , freeIndices = new int [ cols ] ; x_0 = new double [ cols ] ; outer : for ( int i = 0 ; i < coeff . length ; i ++ ) { for ( int j = i ; j < coeff [ row [ i ] ] . length ; j ++ ) { if ( coeff [ row [ i ] ] [ col [ j ] ] == 1 ) { x_0 [ col [ i ] ] = rhs [ row [ i ] ] ; boundIndices [ numbound ++ ] = col [ i ] ; continue outer ; } } freeIndices [ numfree ++ ] = i ; } StringBuilder msg = new StringBuilder ( ) ; if ( LOG . isDebugging ( ) ) { msg . append ( "\nSpecial solution x_0 = [" ) . append ( FormatUtil . format ( x_0 , "," , FormatUtil . NF4 ) ) . append ( ']' ) . append ( "\nbound Indices " ) . append ( FormatUtil . format ( boundIndices , "," ) ) . append ( "\nfree Indices " ) . append ( FormatUtil . format ( freeIndices , "," ) ) ; } // compute solution space of homogeneous linear equation system Arrays . sort ( boundIndices , 0 , numbound ) ; int freeIndex = 0 ; int boundIndex = 0 ; u = new double [ cols ] [ numfree ] ; for ( int j = 0 ; j < u [ 0 ] . length ; j ++ ) { for ( int i = 0 ; i < u . length ; i ++ ) { if ( freeIndex < numfree && i == freeIndices [ freeIndex ] ) { u [ i ] [ j ] = 1 ; } else if ( boundIndex < numbound && i == boundIndices [ boundIndex ] ) { u [ i ] [ j ] = - coeff [ row [ boundIndex ] ] [ freeIndices [ freeIndex ] ] ; boundIndex ++ ; } } freeIndex ++ ; boundIndex = 0 ; // Restart } if ( LOG . isDebugging ( ) ) { msg . append ( "\nU" ) ; for ( double [ ] anU : u ) { msg . append ( '\n' ) . append ( FormatUtil . format ( anU , "," , FormatUtil . NF4 ) ) ; } LOG . debugFine ( msg . toString ( ) ) ; } solved = true ;
public class AvatarNodeZkUtil { /** * This method tries to update the information in ZooKeeper For every address * of the NameNode it is being run for ( fs . default . name , * dfs . namenode . dn - address , dfs . namenode . http . address ) if they are present . It * also creates information for aliases in ZooKeeper for lists of strings in * fs . default . name . aliases , dfs . namenode . dn - address . aliases and * dfs . namenode . http . address . aliases * Each address it transformed to the address of the zNode to be created by * substituting all . and : characters to / . The slash is also added in the * front to make it a valid zNode address . So dfs . domain . com : 9000 will be * / dfs / domain / com / 9000 * If any part of the path does not exist it is created automatically */ public static void updateZooKeeper ( Configuration originalConf , Configuration conf , boolean toOverwrite , String serviceName , String primaryInstance ) throws IOException { } }
String connection = conf . get ( FSConstants . FS_HA_ZOOKEEPER_QUORUM ) ; if ( connection == null ) return ; AvatarZooKeeperClient zk = new AvatarZooKeeperClient ( conf , null ) ; if ( registerClientProtocolAddress ( zk , originalConf , conf , toOverwrite ) ) { return ; } registerDnProtocolAddress ( zk , originalConf , conf , toOverwrite ) ; registerHttpAddress ( zk , originalConf , conf , toOverwrite ) ; for ( ZookeeperKey key : ZookeeperKey . values ( ) ) { zk . registerPrimary ( getZnodeName ( conf , serviceName , Avatar . ACTIVE , key ) , key . getIpPortString ( conf ) , true ) ; } if ( primaryInstance . equalsIgnoreCase ( StartupOption . NODEZERO . getName ( ) ) ) { primaryInstance = StartupOption . NODEONE . getName ( ) ; } else { primaryInstance = StartupOption . NODEZERO . getName ( ) ; } Configuration tempConf = AvatarZKShell . updateConf ( primaryInstance , originalConf ) ; for ( ZookeeperKey key : ZookeeperKey . values ( ) ) { zk . registerPrimary ( getZnodeName ( tempConf , serviceName , Avatar . STANDBY , key ) , key . getIpPortString ( tempConf ) , true ) ; }
public class Ifc4PackageImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public EEnum getIfcPileTypeEnum ( ) { } }
if ( ifcPileTypeEnumEEnum == null ) { ifcPileTypeEnumEEnum = ( EEnum ) EPackage . Registry . INSTANCE . getEPackage ( Ifc4Package . eNS_URI ) . getEClassifiers ( ) . get ( 1031 ) ; } return ifcPileTypeEnumEEnum ;
public class KunderaMetadataManager { /** * Gets the metamodel . * @ param persistenceUnits * the persistence units * @ return the metamodel */ public static MetamodelImpl getMetamodel ( final KunderaMetadata kunderaMetadata , String ... persistenceUnits ) { } }
MetamodelImpl metamodel = null ; for ( String pu : persistenceUnits ) { metamodel = ( MetamodelImpl ) kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( pu ) ; if ( metamodel != null ) { return metamodel ; } } // FIXME : I need to verify this why we need common entity metadata now ! // if ( metamodel = = null ) // metamodel = ( MetamodelImpl ) // kunderaMetadata . getApplicationMetadata ( ) . getMetamodel ( // Constants . COMMON _ ENTITY _ METADATAS ) ; return metamodel ;
public class JKObjectUtil { /** * Creates the instance for generic class . * @ param < T > the generic type * @ param parent the parent * @ return the t */ public static < T > T createInstanceForGenericClass ( Object parent ) { } }
try { Object instance = getGenericClassFromParent ( parent ) . newInstance ( ) ; return ( T ) instance ; } catch ( InstantiationException | IllegalAccessException e ) { JK . throww ( e ) ; } return null ;
public class ArrayMask { /** * Writes this mask to the specified output stream . */ public void writeTo ( ObjectOutputStream out ) throws IOException { } }
out . writeShort ( _mask . length ) ; out . write ( _mask ) ;
public class ZooKeeperMasterModel { /** * Returns a list of the hosts / agents that have been registered . */ @ Override public List < String > listHosts ( ) { } }
try { // TODO ( dano ) : only return hosts whose agents completed registration ( i . e . has id nodes ) return provider . get ( "listHosts" ) . getChildren ( Paths . configHosts ( ) ) ; } catch ( KeeperException . NoNodeException e ) { return emptyList ( ) ; } catch ( KeeperException e ) { throw new HeliosRuntimeException ( "listing hosts failed" , e ) ; }
public class AptType { /** * Checks a MethodDeclaration for a ' private ' modifier . * @ param md MethodDeclaration to check . * @ return true if private modifier is present . */ protected boolean isPrivateMethod ( MethodDeclaration md ) { } }
Collection < Modifier > modifiers = md . getModifiers ( ) ; for ( Modifier m : modifiers ) { if ( m . compareTo ( Modifier . PRIVATE ) == 0 ) return true ; } return false ;
public class KatharsisClient { /** * Sets the factory to use to create action stubs ( like JAX - RS annotated * repository methods ) . * @ param actionStubFactory * to use */ public void setActionStubFactory ( ActionStubFactory actionStubFactory ) { } }
this . actionStubFactory = actionStubFactory ; if ( actionStubFactory != null ) { actionStubFactory . init ( new ActionStubFactoryContext ( ) { @ Override public ServiceUrlProvider getServiceUrlProvider ( ) { return moduleRegistry . getResourceRegistry ( ) . getServiceUrlProvider ( ) ; } @ Override public HttpAdapter getHttpAdapter ( ) { return httpAdapter ; } } ) ; }
public class Redwood { /** * Various informal tests of Redwood functionality * @ param args Unused */ public static void main ( String [ ] args ) { } }
// - - STRESS TEST THREADS - - Runnable [ ] tasks = new Runnable [ 1000 ] ; for ( int i = 0 ; i < tasks . length ; i ++ ) { final int fI = i ; tasks [ i ] = new Runnable ( ) { public void run ( ) { startTrack ( "Runnable " + fI ) ; log ( Thread . currentThread ( ) . getId ( ) ) ; log ( "message " + fI + ".1" ) ; log ( "message " + fI + ".2" ) ; log ( "message " + fI + ".3" ) ; log ( FORCE , "message " + fI + ".4" ) ; log ( "message " + fI + ".5" ) ; forceTrack ( "Runnable " + fI + ".1" ) ; endTrack ( "Runnable " + fI + ".1" ) ; forceTrack ( "Runnable " + fI + ".2" ) ; log ( "a message" ) ; endTrack ( "Runnable " + fI + ".2" ) ; forceTrack ( "Runnable " + fI + ".3" ) ; log ( "a message" ) ; log ( FORCE , "A forced message" ) ; endTrack ( "Runnable " + fI + ".3" ) ; endTrack ( "Runnable " + fI ) ; } } ; } startTrack ( "Wrapper" ) ; for ( int i = 0 ; i < 100 ; i ++ ) { Util . threadAndRun ( new ArrayIterable ( tasks ) , 100 ) ; } endTrack ( "Wrapper" ) ; System . exit ( 1 ) ; forceTrack ( "Track 1" ) ; log ( "tag" , ERR , "hello world" ) ; startTrack ( "Hidden" ) ; startTrack ( "Subhidden" ) ; endTrack ( "Subhidden" ) ; endTrack ( "Hidden" ) ; startTrack ( FORCE , "Shown" ) ; startTrack ( FORCE , "Subshown" ) ; endTrack ( "Subshown" ) ; endTrack ( "Shown" ) ; log ( "^shown should have appeared above" ) ; startTrack ( "Track 1.1" ) ; log ( WARN , "some" , "something in 1.1" ) ; log ( "some" , ERR , "something in 1.1" ) ; log ( FORCE , "some" , WARN , "something in 1.1" ) ; log ( WARN , FORCE , "some" , "something in 1.1" ) ; logf ( "format string %s then int %d" , "hello" , 7 ) ; endTrack ( "Track 1.1" ) ; startTrack ( ) ; log ( "In an anonymous track" ) ; endTrack ( ) ; endTrack ( "Track 1" ) ; log ( "outside of a track" ) ; log ( "these" , "channels" , "should" , "be" , "in" , DBG , "alphabetical" , "order" , "a log item with lots of channels" ) ; log ( "these" , "channels" , "should" , "be" , "in" , DBG , "alphabetical" , "order" , "a log item\nthat spans\nmultiple\nlines" ) ; log ( DBG , "a last log item" ) ; log ( ERR , null ) ; // - - Repeated Records RedwoodConfiguration . current ( ) . collapseExact ( ) . apply ( ) ; // ( simple case ) forceTrack ( "Strict Equality" ) ; for ( int i = 0 ; i < 100 ; i ++ ) { log ( "this is a message" ) ; } endTrack ( "Strict Equality" ) ; // ( in - track change ) forceTrack ( "Change" ) ; for ( int i = 0 ; i < 10 ; i ++ ) { log ( "this is a message" ) ; } for ( int i = 0 ; i < 10 ; i ++ ) { log ( "this is a another message" ) ; } for ( int i = 0 ; i < 10 ; i ++ ) { log ( "this is a third message" ) ; } for ( int i = 0 ; i < 5 ; i ++ ) { log ( "this is a fourth message" ) ; } log ( FORCE , "this is a fourth message" ) ; for ( int i = 0 ; i < 5 ; i ++ ) { log ( "this is a fourth message" ) ; } log ( "^middle 'fourth message' was forced" ) ; endTrack ( "Change" ) ; // ( suppress tracks ) forceTrack ( "Repeated Tracks" ) ; for ( int i = 0 ; i < 100 ; i ++ ) { startTrack ( "Track type 1" ) ; log ( "a message" ) ; endTrack ( "Track type 1" ) ; } for ( int i = 0 ; i < 100 ; i ++ ) { startTrack ( "Track type 2" ) ; log ( "a message" ) ; endTrack ( "Track type 2" ) ; } for ( int i = 0 ; i < 100 ; i ++ ) { startTrack ( "Track type 3" ) ; log ( "a message" ) ; endTrack ( "Track type 3" ) ; } startTrack ( "Track type 3" ) ; startTrack ( "nested" ) ; log ( FORCE , "this should show up" ) ; endTrack ( "nested" ) ; endTrack ( "Track type 3" ) ; for ( int i = 0 ; i < 5 ; i ++ ) { startTrack ( "Track type 3" ) ; log ( FORCE , "this should show up" ) ; endTrack ( "Track type 3" ) ; } log ( WARN , "The log message 'this should show up' should show up 6 (5+1) times above" ) ; endTrack ( "Repeated Tracks" ) ; // ( tracks with invisible things ) Redwood . hideOnlyChannels ( DBG ) ; forceTrack ( "Hidden Subtracks" ) ; for ( int i = 0 ; i < 100 ; i ++ ) { startTrack ( "Only has debug messages" ) ; log ( DBG , "You shouldn't see me" ) ; endTrack ( "Only has debug messages" ) ; } log ( "You shouldn't see any other messages or 'skipped tracks' here" ) ; endTrack ( "Hidden Subtracks" ) ; // ( fuzzy repeats ) RedwoodConfiguration . standard ( ) . apply ( ) ; RedwoodConfiguration . current ( ) . collapseApproximate ( ) . apply ( ) ; forceTrack ( "Fuzzy Equality" ) ; for ( int i = 0 ; i < 100 ; i ++ ) { log ( "iter " + i + " ended with value " + ( - 34587292534.0 + Math . sqrt ( i ) * 3000000000.0 ) ) ; } endTrack ( "Fuzzy Equality" ) ; forceTrack ( "Fuzzy Equality (timing)" ) ; for ( int i = 0 ; i < 100 ; i ++ ) { log ( "iter " + i + " ended with value " + ( - 34587292534.0 + Math . sqrt ( i ) * 3000000000.0 ) ) ; try { Thread . sleep ( 50 ) ; } catch ( InterruptedException e ) { } } endTrack ( "Fuzzy Equality (timing)" ) ; // - - Util Helper Util . log ( "hello world" ) ; Util . log ( DBG , "hello world" ) ; Util . debug ( "hello world" ) ; Util . debug ( "atag" , "hello world" ) ; // - - Show Name at Track Finish Redwood . getHandler ( ConsoleHandler . class ) . minLineCountForTrackNameReminder = 5 ; startTrack ( "Long Track" ) ; for ( int i = 0 ; i < 10 ; i ++ ) { log ( FORCE , "contents of long track" ) ; } endTrack ( "Long TracK" ) ; startTrack ( "Long Track" ) ; startTrack ( "But really this is the long one" ) ; try { Thread . sleep ( 3000 ) ; } catch ( InterruptedException e ) { } for ( int i = 0 ; i < 10 ; i ++ ) { log ( FORCE , "contents of long track" ) ; } endTrack ( "But really this is the long one" ) ; endTrack ( "Long TracK" ) ; Redwood . getHandler ( ConsoleHandler . class ) . minLineCountForTrackNameReminder = 50 ; // - - Multithreading ExecutorService exec = Executors . newFixedThreadPool ( 10 ) ; startThreads ( "name" ) ; for ( int i = 0 ; i < 50 ; i ++ ) { final int theI = i ; exec . execute ( new Runnable ( ) { public void run ( ) { startTrack ( "Thread " + theI + " (" + Thread . currentThread ( ) . getId ( ) + ")" ) ; for ( int time = 0 ; time < 5 ; time ++ ) { log ( "tick " + time + " from " + theI + " (" + Thread . currentThread ( ) . getId ( ) + ")" ) ; try { Thread . sleep ( 50 ) ; } catch ( Exception e ) { } } endTrack ( "Thread " + theI + " (" + Thread . currentThread ( ) . getId ( ) + ")" ) ; finishThread ( ) ; } } ) ; } exec . shutdown ( ) ; try { exec . awaitTermination ( Long . MAX_VALUE , TimeUnit . SECONDS ) ; } catch ( InterruptedException e ) { } endThreads ( "name" ) ; // - - System Streams Redwood . captureSystemStreams ( true , true ) ; System . out . println ( "Hello World" ) ; System . err . println ( "This is an error!" ) ; // - - Neat Exit RedwoodConfiguration . standard ( ) . collapseExact ( ) . apply ( ) ; // ( on close ) for ( int i = 0 ; i < 100 ; i ++ ) { // startTrack ( ) ; log ( "stuff!" ) ; // endTrack ( ) ; } Util . exit ( 0 ) ; // ( on exception ) System . out . println ( "I'm going to exception soon (on purpose)" ) ; RedwoodConfiguration . current ( ) . neatExit ( ) . apply ( ) ; startTrack ( "I should close" ) ; log ( FORCE , "so I'm nonempty..." ) ; try { Thread . sleep ( 3000 ) ; } catch ( InterruptedException e ) { } throw new IllegalArgumentException ( ) ;
public class SAML2AuthnResponseValidator { /** * Searches the sessionIndex in the assertion * @ param subjectAssertion assertion from the response * @ return the sessionIndex if found in the assertion */ protected String getSessionIndex ( final Assertion subjectAssertion ) { } }
List < AuthnStatement > authnStatements = subjectAssertion . getAuthnStatements ( ) ; if ( authnStatements != null && authnStatements . size ( ) > 0 ) { AuthnStatement statement = authnStatements . get ( 0 ) ; if ( statement != null ) { return statement . getSessionIndex ( ) ; } } return null ;
public class MediumOrientationImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ @ Override public Object eGet ( int featureID , boolean resolve , boolean coreType ) { } }
switch ( featureID ) { case AfplibPackage . MEDIUM_ORIENTATION__MED_ORIENT : return getMedOrient ( ) ; } return super . eGet ( featureID , resolve , coreType ) ;