signature stringlengths 43 39.1k | implementation stringlengths 0 450k |
|---|---|
public class EstimateKeywordTraffic { /** * Runs the example .
* @ param adWordsServices the services factory .
* @ param session the session .
* @ throws ApiException if the API request failed with one or more service errors .
* @ throws RemoteException if the API request failed due to other errors . */
public static void runExample ( AdWordsServicesInterface adWordsServices , AdWordsSession session ) throws RemoteException { } } | // Get the TrafficEstimatorService .
TrafficEstimatorServiceInterface trafficEstimatorService = adWordsServices . get ( session , TrafficEstimatorServiceInterface . class ) ; // Create keywords . Refer to the TrafficEstimatorService documentation for the maximum
// number of keywords that can be passed in a single request .
// https : / / developers . google . com / adwords / api / docs / reference / latest / TrafficEstimatorService
List < Keyword > keywords = new ArrayList < Keyword > ( ) ; Keyword marsCruiseKeyword = new Keyword ( ) ; marsCruiseKeyword . setText ( "mars cruise" ) ; marsCruiseKeyword . setMatchType ( KeywordMatchType . BROAD ) ; keywords . add ( marsCruiseKeyword ) ; Keyword cheapCruiseKeyword = new Keyword ( ) ; cheapCruiseKeyword . setText ( "cheap cruise" ) ; cheapCruiseKeyword . setMatchType ( KeywordMatchType . PHRASE ) ; keywords . add ( cheapCruiseKeyword ) ; Keyword cruiseKeyword = new Keyword ( ) ; cruiseKeyword . setText ( "cruise" ) ; cruiseKeyword . setMatchType ( KeywordMatchType . EXACT ) ; keywords . add ( cruiseKeyword ) ; // Create a keyword estimate request for each keyword .
List < KeywordEstimateRequest > keywordEstimateRequests = keywords . stream ( ) . map ( keyword -> { KeywordEstimateRequest keywordEstimateRequest = new KeywordEstimateRequest ( ) ; keywordEstimateRequest . setKeyword ( keyword ) ; return keywordEstimateRequest ; } ) . collect ( Collectors . toList ( ) ) ; // Add a negative keyword to the traffic estimate .
KeywordEstimateRequest negativeKeywordEstimateRequest = new KeywordEstimateRequest ( ) ; negativeKeywordEstimateRequest . setKeyword ( new Keyword ( null , null , null , "hiking tour" , KeywordMatchType . BROAD ) ) ; negativeKeywordEstimateRequest . setIsNegative ( true ) ; keywordEstimateRequests . add ( negativeKeywordEstimateRequest ) ; // Create ad group estimate requests .
List < AdGroupEstimateRequest > adGroupEstimateRequests = new ArrayList < AdGroupEstimateRequest > ( ) ; AdGroupEstimateRequest adGroupEstimateRequest = new AdGroupEstimateRequest ( ) ; adGroupEstimateRequest . setKeywordEstimateRequests ( keywordEstimateRequests . toArray ( new KeywordEstimateRequest [ ] { } ) ) ; adGroupEstimateRequest . setMaxCpc ( new Money ( null , 1000000L ) ) ; adGroupEstimateRequests . add ( adGroupEstimateRequest ) ; // Create campaign estimate requests .
List < CampaignEstimateRequest > campaignEstimateRequests = new ArrayList < CampaignEstimateRequest > ( ) ; CampaignEstimateRequest campaignEstimateRequest = new CampaignEstimateRequest ( ) ; campaignEstimateRequest . setAdGroupEstimateRequests ( adGroupEstimateRequests . toArray ( new AdGroupEstimateRequest [ ] { } ) ) ; Location unitedStates = new Location ( ) ; unitedStates . setId ( 2840L ) ; Language english = new Language ( ) ; english . setId ( 1000L ) ; campaignEstimateRequest . setCriteria ( new Criterion [ ] { unitedStates , english } ) ; campaignEstimateRequests . add ( campaignEstimateRequest ) ; // Create selector .
TrafficEstimatorSelector selector = new TrafficEstimatorSelector ( ) ; selector . setCampaignEstimateRequests ( campaignEstimateRequests . toArray ( new CampaignEstimateRequest [ ] { } ) ) ; // Optional : Request a list of campaign level estimates segmented by platform .
selector . setPlatformEstimateRequested ( true ) ; // Get traffic estimates .
TrafficEstimatorResult result = trafficEstimatorService . get ( selector ) ; // Display traffic estimates .
if ( result != null && result . getCampaignEstimates ( ) != null && result . getCampaignEstimates ( ) . length > 0 ) { CampaignEstimate campaignEstimate = result . getCampaignEstimates ( ) [ 0 ] ; // Display the campaign level estimates segmented by platform .
if ( campaignEstimate . getPlatformEstimates ( ) != null ) { for ( PlatformCampaignEstimate platformEstimate : campaignEstimate . getPlatformEstimates ( ) ) { String platformMessage = String . format ( "Results for the platform with ID %d and name '%s':%n" , platformEstimate . getPlatform ( ) . getId ( ) , platformEstimate . getPlatform ( ) . getPlatformName ( ) ) ; displayMeanEstimates ( platformMessage , platformEstimate . getMinEstimate ( ) , platformEstimate . getMaxEstimate ( ) ) ; } } // Display the keyword estimates .
KeywordEstimate [ ] keywordEstimates = campaignEstimate . getAdGroupEstimates ( ) [ 0 ] . getKeywordEstimates ( ) ; for ( int i = 0 ; i < keywordEstimates . length ; i ++ ) { if ( Boolean . TRUE . equals ( keywordEstimateRequests . get ( i ) . getIsNegative ( ) ) ) { continue ; } Keyword keyword = keywordEstimateRequests . get ( i ) . getKeyword ( ) ; KeywordEstimate keywordEstimate = keywordEstimates [ i ] ; String keywordMessage = String . format ( "Results for the keyword with text '%s' and match type '%s':%n" , keyword . getText ( ) , keyword . getMatchType ( ) ) ; displayMeanEstimates ( keywordMessage , keywordEstimate . getMin ( ) , keywordEstimate . getMax ( ) ) ; } } else { System . out . println ( "No traffic estimates were returned." ) ; } |
public class TableRef { /** * Attach a listener to run every time the eventType occurs .
* < pre >
* StorageRef storage = new StorageRef ( " your _ app _ key " , " your _ token " ) ;
* TableRef tableRef = storage . table ( " your _ table " ) ;
* / / Add an update listener
* tableRef . on ( StorageRef . StorageEvent . UPDATE , new OnItemSnapshot ( ) {
* & # 064 ; Override
* public void run ( ItemSnapshot itemSnapshot ) {
* if ( itemSnapshot ! = null ) {
* Log . d ( " TableRef " , " Item updated : " + itemSnapshot . val ( ) ) ;
* < / pre >
* @ param eventType
* The type of the event to listen . Possible values : put , update , delete
* @ param onItemSnapshot
* The function to run whenever the event occurs . The function is called with the snapshot of affected item as argument . If the event type is " put " , it will immediately trigger a " getItems " to get the initial data and run the callback with each item snapshot as argument . Note : If you are using GCM the value of received ItemSnapshot can be null .
* @ return Current table reference */
public TableRef on ( StorageEvent eventType , final OnItemSnapshot onItemSnapshot ) { } } | return on ( eventType , onItemSnapshot , null ) ; |
public class AbstractSQLInsertClause { /** * Populate the INSERT clause with the properties of the given bean using
* the given Mapper .
* @ param obj object to use for population
* @ param mapper mapper to use
* @ return the current object */
@ SuppressWarnings ( "rawtypes" ) @ WithBridgeMethods ( value = SQLInsertClause . class , castRequired = true ) public < T > C populate ( T obj , Mapper < T > mapper ) { } } | Map < Path < ? > , Object > values = mapper . createMap ( entity , obj ) ; for ( Map . Entry < Path < ? > , Object > entry : values . entrySet ( ) ) { set ( ( Path ) entry . getKey ( ) , entry . getValue ( ) ) ; } return ( C ) this ; |
public class PoolDeleteOptions { /** * Set a timestamp indicating the last modified time of the resource known to the client . The operation will be performed only if the resource on the service has been modified since the specified time .
* @ param ifModifiedSince the ifModifiedSince value to set
* @ return the PoolDeleteOptions object itself . */
public PoolDeleteOptions withIfModifiedSince ( DateTime ifModifiedSince ) { } } | if ( ifModifiedSince == null ) { this . ifModifiedSince = null ; } else { this . ifModifiedSince = new DateTimeRfc1123 ( ifModifiedSince ) ; } return this ; |
public class UpdateSubscriptionDefinitionRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( UpdateSubscriptionDefinitionRequest updateSubscriptionDefinitionRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( updateSubscriptionDefinitionRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( updateSubscriptionDefinitionRequest . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( updateSubscriptionDefinitionRequest . getSubscriptionDefinitionId ( ) , SUBSCRIPTIONDEFINITIONID_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class GitRepo { /** * This can be done recursively but will result in { @ link StackOverflowError } for large repos . */
private void populateComitPerTag ( final RevCommit from , final ObjectId to , final Map < String , Ref > tagPerCommitHash , final Map < String , String > tagPerCommitsHash , final Map < String , Set < GitCommit > > commitsPerTag , final Map < String , Date > datePerTag , final String startingTagName ) throws Exception { } } | final Set < TraversalWork > moreWork = populateCommitPerTag ( from , to , commitsPerTag , tagPerCommitHash , tagPerCommitsHash , datePerTag , startingTagName ) ; do { final Set < TraversalWork > evenMoreWork = newTreeSet ( ) ; for ( final TraversalWork tw : newArrayList ( moreWork ) ) { moreWork . remove ( tw ) ; final Set < TraversalWork > newWork = populateCommitPerTag ( from , tw . getTo ( ) , commitsPerTag , tagPerCommitHash , tagPerCommitsHash , datePerTag , tw . getCurrentTagName ( ) ) ; evenMoreWork . addAll ( newWork ) ; } moreWork . addAll ( evenMoreWork ) ; LOG . debug ( "Work left: " + moreWork . size ( ) ) ; } while ( ! moreWork . isEmpty ( ) ) ; |
public class From { /** * Creates and chains a Joins object for specifying the JOIN clause of the query .
* @ param joins The Join objects .
* @ return The Joins object that represents the JOIN clause of the query . */
@ NonNull @ Override public Joins join ( @ NonNull Join ... joins ) { } } | if ( joins == null ) { throw new IllegalArgumentException ( "joins cannot be null." ) ; } return new Joins ( this , Arrays . asList ( joins ) ) ; |
public class ClasspathUtility { /** * Search all files that match the given Regex pattern .
* @ param classpathEntryPath the root folder used for search
* @ param searchPattern the regex pattern used as a filter
* @ param cachedJar is a jar cached by Java Webstart without any extension
* @ return list of resources that match the pattern */
private static List < String > getResources ( final String classpathEntryPath , final Pattern searchPattern , final boolean cachedJar ) { } } | final List < String > resources = new ArrayList < > ( ) ; // System . out . println ( " Search into " + classpathEntryPath ) ;
final File classpathEntryFile = new File ( classpathEntryPath ) ; // The classpath entry could be a jar or a folder
if ( classpathEntryFile . isDirectory ( ) ) { // Browse the folder content
resources . addAll ( getResourcesFromDirectory ( classpathEntryFile , searchPattern ) ) ; } else if ( classpathEntryFile . getName ( ) . endsWith ( ".jar" ) || classpathEntryFile . getName ( ) . endsWith ( ".zip" ) || cachedJar ) { // Explode and browse jar | zip content
resources . addAll ( getResourcesFromJarOrZipFile ( classpathEntryFile , searchPattern ) ) ; } else { LOGGER . log ( RESOURCE_IGNORED , classpathEntryFile . getAbsolutePath ( ) ) ; } return resources ; |
public class Workbench { /** * Adds a nonempty , not acquired workbench entry to the workbench .
* @ param entry a nonempty , not acquired workbench entry . */
public void add ( WorkbenchEntry entry ) { } } | assert ! entry . isEmpty ( ) : entry ; assert ! entry . acquired : entry ; entries . add ( entry ) ; approximatedSize . incrementAndGet ( ) ; |
public class HelloSignClient { /** * Refreshes the OauthData for this client with the provided refresh token .
* @ param refreshToken String
* @ return OauthData new OAuthData returned from HelloSign
* @ throws HelloSignException thrown if there ' s a problem processing the
* HTTP request or the JSON response . */
public OauthData refreshOauthData ( String refreshToken ) throws HelloSignException { } } | OauthData data = new OauthData ( httpClient . withAuth ( auth ) . withPostField ( OAUTH_GRANT_TYPE , OAUTH_GRANT_TYPE_REFRESH_TOKEN ) . withPostField ( OAUTH_REFRESH_TOKEN , refreshToken ) . post ( OAUTH_TOKEN_URL ) . asJson ( ) ) ; if ( data != null ) { setAccessToken ( data . getAccessToken ( ) , data . getTokenType ( ) ) ; } return data ; |
public class HttpDateFormatImpl { /** * Get access to the format wrapper class that is local to this particular
* worker thread .
* < br >
* @ return HttpLocalFormat */
private HttpLocalFormat getFormat ( ) { } } | HttpLocalFormat format = threadStorage . get ( ) ; if ( null == format ) { format = new HttpLocalFormat ( ) ; threadStorage . set ( format ) ; } return format ; |
public class UserApi { /** * Creates a GitLabApiForm instance that will optionally include the
* with _ custom _ attributes query param if enabled .
* @ return a GitLabApiForm instance that will optionally include the
* with _ custom _ attributes query param if enabled */
private GitLabApiForm createGitLabApiForm ( ) { } } | GitLabApiForm formData = new GitLabApiForm ( ) ; return ( customAttributesEnabled ? formData . withParam ( "with_custom_attributes" , true ) : formData ) ; |
public class BruteForceInferencer { /** * Gets the product of all the factors in the factor graph . If working in
* the log - domain , this will do factor addition .
* @ return The product of all the factors . */
private static VarTensor getProductOfAllFactors ( FactorGraph fg , Algebra s ) { } } | VarTensor joint = new VarTensor ( s , new VarSet ( ) , s . one ( ) ) ; for ( int a = 0 ; a < fg . getNumFactors ( ) ; a ++ ) { Factor f = fg . getFactor ( a ) ; VarTensor factor = safeNewVarTensor ( s , f ) ; assert ! factor . containsBadValues ( ) : factor ; joint . prod ( factor ) ; } return joint ; |
public class Trigger { /** * Sets the inertia associated with the trigger in milliseconds .
* @ param inertiaMillis The inertia associated with the trigger in milliseconds . Cannot be null or negative . */
public void setInertia ( Long inertiaMillis ) { } } | if ( this . alert == null ) { // Only during deserialization .
this . inertia = inertiaMillis ; } else { requireArgument ( inertiaMillis != null && inertiaMillis >= 0 , "Inertia cannot be negative." ) ; Long longestIntervalLength = AlertUtils . getMaximumIntervalLength ( this . alert . getExpression ( ) ) ; if ( inertiaMillis > longestIntervalLength ) throw new IllegalArgumentException ( String . format ( "Inertia %d cannot be more than width of the longest interval %d." , inertiaMillis , longestIntervalLength ) ) ; this . inertia = inertiaMillis ; } |
public class CPMeasurementUnitPersistenceImpl { /** * Creates a new cp measurement unit with the primary key . Does not add the cp measurement unit to the database .
* @ param CPMeasurementUnitId the primary key for the new cp measurement unit
* @ return the new cp measurement unit */
@ Override public CPMeasurementUnit create ( long CPMeasurementUnitId ) { } } | CPMeasurementUnit cpMeasurementUnit = new CPMeasurementUnitImpl ( ) ; cpMeasurementUnit . setNew ( true ) ; cpMeasurementUnit . setPrimaryKey ( CPMeasurementUnitId ) ; String uuid = PortalUUIDUtil . generate ( ) ; cpMeasurementUnit . setUuid ( uuid ) ; cpMeasurementUnit . setCompanyId ( companyProvider . getCompanyId ( ) ) ; return cpMeasurementUnit ; |
public class LinkPredicates { /** * Returns a Predicate that is matching links having the specified name { @ link Link # getName ( ) name }
* @ param name the expected name of the link
* @ return Predicate used to select links */
public static Predicate < Link > havingName ( final String name ) { } } | return link -> name . equals ( link . getName ( ) ) ; |
public class AjcHelper { /** * Constructs AspectJ compiler classpath string
* @ param project the Maven Project
* @ param pluginArtifacts the plugin Artifacts
* @ param outDirs the outputDirectories
* @ return a os spesific classpath string */
@ SuppressWarnings ( "unchecked" ) public static String createClassPath ( MavenProject project , List < Artifact > pluginArtifacts , List < String > outDirs ) { } } | String cp = new String ( ) ; Set < Artifact > classPathElements = Collections . synchronizedSet ( new LinkedHashSet < Artifact > ( ) ) ; Set < Artifact > dependencyArtifacts = project . getDependencyArtifacts ( ) ; classPathElements . addAll ( dependencyArtifacts == null ? Collections . < Artifact > emptySet ( ) : dependencyArtifacts ) ; classPathElements . addAll ( project . getArtifacts ( ) ) ; classPathElements . addAll ( pluginArtifacts == null ? Collections . < Artifact > emptySet ( ) : pluginArtifacts ) ; for ( Artifact classPathElement : classPathElements ) { File artifact = classPathElement . getFile ( ) ; if ( null != artifact ) { String type = classPathElement . getType ( ) ; if ( ! type . equals ( "pom" ) ) { cp += classPathElement . getFile ( ) . getAbsolutePath ( ) ; cp += File . pathSeparatorChar ; } } } Iterator < String > outIter = outDirs . iterator ( ) ; while ( outIter . hasNext ( ) ) { cp += outIter . next ( ) ; cp += File . pathSeparatorChar ; } if ( cp . endsWith ( "" + File . pathSeparatorChar ) ) { cp = cp . substring ( 0 , cp . length ( ) - 1 ) ; } cp = StringUtils . replace ( cp , "//" , "/" ) ; return cp ; |
public class Database { /** * Executes a query and converts the results to instances of given class using default mechanisms . */
public @ NotNull < T > List < T > findAll ( @ NotNull Class < T > cl , @ NotNull SqlQuery query ) { } } | return executeQuery ( resultProcessorForClass ( cl ) , query ) ; |
public class LUDecompositionBase_ZDRM { /** * Solve the using the lower triangular matrix in LU . Diagonal elements are assumed
* to be 1 */
protected void solveL ( double [ ] vv ) { } } | int ii = 0 ; for ( int i = 0 ; i < n ; i ++ ) { int ip = indx [ i ] ; double sumReal = vv [ ip * 2 ] ; double sumImg = vv [ ip * 2 + 1 ] ; vv [ ip * 2 ] = vv [ i * 2 ] ; vv [ ip * 2 + 1 ] = vv [ i * 2 + 1 ] ; if ( ii != 0 ) { // for ( int j = ii - 1 ; j < i ; j + + )
// sum - = dataLU [ i * n + j ] * vv [ j ] ;
int index = i * stride + ( ii - 1 ) * 2 ; for ( int j = ii - 1 ; j < i ; j ++ ) { double luReal = dataLU [ index ++ ] ; double luImg = dataLU [ index ++ ] ; double vvReal = vv [ j * 2 ] ; double vvImg = vv [ j * 2 + 1 ] ; sumReal -= luReal * vvReal - luImg * vvImg ; sumImg -= luReal * vvImg + luImg * vvReal ; } } else if ( sumReal * sumReal + sumImg * sumImg != 0.0 ) { ii = i + 1 ; } vv [ i * 2 ] = sumReal ; vv [ i * 2 + 1 ] = sumImg ; } |
public class TextInputFormat { @ Override public void configure ( Configuration parameters ) { } } | super . configure ( parameters ) ; if ( charsetName == null || ! Charset . isSupported ( charsetName ) ) { throw new RuntimeException ( "Unsupported charset: " + charsetName ) ; } |
public class MMTImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
@ SuppressWarnings ( "unchecked" ) @ Override public void eSet ( int featureID , Object newValue ) { } } | switch ( featureID ) { case AfplibPackage . MMT__RG : getRG ( ) . clear ( ) ; getRG ( ) . addAll ( ( Collection < ? extends MMTRG > ) newValue ) ; return ; } super . eSet ( featureID , newValue ) ; |
public class LssClient { /** * pause domain ' s stream in the live stream service .
* @ param request The request object containing all options for pause a domain ' s stream
* @ return the response */
public ResumeDomainStreamResponse resumeDomainStream ( ResumeDomainStreamRequest request ) { } } | checkNotNull ( request , "The parameter request should NOT be null." ) ; checkStringNotEmpty ( request . getDomain ( ) , "Domain should NOT be empty." ) ; checkStringNotEmpty ( request . getApp ( ) , "App should NOT be empty." ) ; checkStringNotEmpty ( request . getStream ( ) , "Stream should NOT be empty." ) ; InternalRequest internalRequest = createRequest ( HttpMethodName . PUT , request , LIVE_DOMAIN , request . getDomain ( ) , LIVE_APP , request . getApp ( ) , LIVE_STREAM , request . getStream ( ) ) ; internalRequest . addParameter ( RESUME , null ) ; return invokeHttpClient ( internalRequest , ResumeDomainStreamResponse . class ) ; |
public class PorterStemmer { /** * cvc ( i ) is true < = > i - 2 , i - 1 , i has the form consonant - vowel - consonant
* and also if the second c is not w , x or y . this is used when trying to
* restore an e at the end of a short word . e . g .
* cav ( e ) , lov ( e ) , hop ( e ) , crim ( e ) , but snow , box , tray . */
private final boolean cvc ( int i ) { } } | if ( i < 2 || ! isConsonant ( i ) || isConsonant ( i - 1 ) || ! isConsonant ( i - 2 ) ) { return false ; } { int ch = b [ i ] ; if ( ch == 'w' || ch == 'x' || ch == 'y' ) { return false ; } } return true ; |
public class TagProjectRequestMarshaller { /** * Marshall the given parameter object . */
public void marshall ( TagProjectRequest tagProjectRequest , ProtocolMarshaller protocolMarshaller ) { } } | if ( tagProjectRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( tagProjectRequest . getId ( ) , ID_BINDING ) ; protocolMarshaller . marshall ( tagProjectRequest . getTags ( ) , TAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class WNumberField { /** * { @ inheritDoc } */
@ Override public void setData ( final Object data ) { } } | // This override is necessary to maintain other internal state
NumberFieldModel model = getOrCreateComponentModel ( ) ; try { super . setData ( convertValue ( data ) ) ; model . text = null ; model . validNumber = true ; } catch ( SystemException e ) { super . setData ( data ) ; model . text = data . toString ( ) ; model . validNumber = false ; } |
public class BinarySearcher { /** * Search bytes in byte array returns indexes within this byte - array of all
* occurrences of the specified ( search bytes ) byte array in the specified
* range
* @ param srcBytes
* @ param searchBytes
* @ param searchStartIndex
* @ param searchEndIndex
* @ return result index list */
public List < Integer > searchBytes ( byte [ ] srcBytes , byte [ ] searchBytes , int searchStartIndex , int searchEndIndex ) { } } | final int destSize = searchBytes . length ; final List < Integer > positionIndexList = new ArrayList < Integer > ( ) ; int cursor = searchStartIndex ; while ( cursor < searchEndIndex + 1 ) { final int index = indexOf ( srcBytes , searchBytes , cursor , searchEndIndex ) ; if ( index >= 0 ) { positionIndexList . add ( index ) ; cursor = index + destSize ; } else { cursor ++ ; } } return positionIndexList ; |
public class Animate { /** * Animate any element with specific animation . Animation is done by CSS and runs multiple times .
* Animation is started when element is appended to the DOM or new ( not same ) animation is added
* to already displayed element . Animation runs on hidden elements too and is not paused / stopped
* when element is set as hidden .
* @ param widget Widget to apply animation to .
* @ param animation Custom CSS class name used as animation .
* @ param count Number of animation repeats . 0 disables animation , any negative value set repeats to infinite .
* @ param duration Animation duration in ms . 0 disables animation , any negative value keeps default of original animation .
* @ param delay Delay before starting the animation loop in ms . Value < = 0 means no delay .
* @ param < T > Any object extending UIObject class ( typically Widget ) .
* @ return Animation ' s CSS class name , which can be removed to stop animation . */
public static < T extends UIObject > String animate ( final T widget , final String animation , final int count , final int duration , final int delay ) { } } | if ( widget != null && animation != null ) { // on valid input
if ( widget . getStyleName ( ) . contains ( animation ) ) { // animation is present , remove it and run again .
stopAnimation ( widget , animation ) ; Scheduler . get ( ) . scheduleFixedDelay ( new Scheduler . RepeatingCommand ( ) { @ Override public boolean execute ( ) { styleElement ( widget . getElement ( ) , animation , count , duration , delay ) ; return false ; } } , 200 ) ; return animation + " " + getStyleNameFromAnimation ( animation , count , duration , delay ) ; } else { // animation was not present , run immediately
return styleElement ( widget . getElement ( ) , animation , count , duration , delay ) ; } } else { return null ; } |
public class COSUnderFileSystem { /** * Creates an COS { @ code ClientConfiguration } using an Alluxio Configuration .
* @ return the COS { @ link ClientConfig } */
private static ClientConfig createCOSClientConfig ( String regionName , UnderFileSystemConfiguration conf ) { } } | ClientConfig config = new ClientConfig ( new Region ( regionName ) ) ; config . setConnectionTimeout ( ( int ) conf . getMs ( PropertyKey . COS_CONNECTION_TIMEOUT ) ) ; config . setSocketTimeout ( ( int ) conf . getMs ( PropertyKey . COS_SOCKET_TIMEOUT ) ) ; config . setMaxConnectionsCount ( conf . getInt ( PropertyKey . COS_CONNECTION_MAX ) ) ; return config ; |
public class ExpectUtils { /** * TODO Simulate " Could not open connection to the host , on port . . . . "
* TODO Simulate " Connection refused " */
public static Expect4j telnet ( String hostname , int port ) throws Exception { } } | // This library has trouble with EOF
final TelnetClient client = new TelnetClient ( ) ; TerminalTypeOptionHandler ttopt = new TerminalTypeOptionHandler ( "VT100" , false , false , true , true ) ; EchoOptionHandler echoopt = new EchoOptionHandler ( true , false , true , false ) ; SuppressGAOptionHandler gaopt = new SuppressGAOptionHandler ( false , false , false , false ) ; client . addOptionHandler ( ttopt ) ; client . addOptionHandler ( echoopt ) ; client . addOptionHandler ( gaopt ) ; client . connect ( hostname , port ) ; InputStream is = new FromNetASCIIInputStream ( client . getInputStream ( ) ) ; // null until client connected
OutputStream os = new ToNetASCIIOutputStream ( client . getOutputStream ( ) ) ; /* URL url = new URL ( " telnet " , hostname , port , " " , new thor . net . URLStreamHandler ( ) ) ;
final URLConnection urlConnection = url . openConnection ( ) ;
urlConnection . connect ( ) ;
if ( urlConnection instanceof TelnetURLConnection ) {
( ( TelnetURLConnection ) urlConnection ) . setTelnetTerminalHandler ( new SimpleTelnetTerminalHandler ( ) ) ;
OutputStream os = urlConnection . getOutputStream ( ) ;
InputStream is = urlConnection . getInputStream ( ) ;
StreamPair pair = new StreamPair ( is , os ) {
public void close ( ) {
try { ( ( TelnetURLConnection ) urlConnection ) . disconnect ( ) ; } catch ( Exception e ) { } */
return new Expect4j ( is , os ) { public void close ( ) { super . close ( ) ; try { client . disconnect ( ) ; } catch ( IOException e ) { logger . error ( "Failed to close telnet session" , e ) ; } } } ; |
public class HttpServerShellCmdHandler { /** * it is called when receiving a http request .
* @ param parsedHttpRequest
* @ param response */
@ Override public final synchronized void onHttpRequest ( final ParsedHttpRequest parsedHttpRequest , final HttpServletResponse response ) throws IOException , ServletException { } } | LOG . log ( Level . INFO , "HttpServeShellCmdHandler in webserver onHttpRequest is called: {0}" , parsedHttpRequest . getRequestUri ( ) ) ; final String queryStr = parsedHttpRequest . getQueryString ( ) ; if ( parsedHttpRequest . getTargetEntity ( ) . equalsIgnoreCase ( "Evaluators" ) ) { final byte [ ] b = HttpShellJobDriver . CODEC . encode ( queryStr ) ; LOG . log ( Level . INFO , "HttpServeShellCmdHandler call HelloDriver onCommand(): {0}" , queryStr ) ; messageHandler . get ( ) . onNext ( b ) ; notify ( ) ; final long endTime = System . currentTimeMillis ( ) + WAIT_TIMEOUT ; while ( cmdOutput == null ) { final long waitTime = endTime - System . currentTimeMillis ( ) ; if ( waitTime <= 0 ) { break ; } try { wait ( WAIT_TIME ) ; } catch ( final InterruptedException e ) { LOG . log ( Level . WARNING , "HttpServeShellCmdHandler onHttpRequest InterruptedException: {0}" , e ) ; } } if ( cmdOutput != null ) { response . getOutputStream ( ) . write ( cmdOutput . getBytes ( StandardCharsets . UTF_8 ) ) ; cmdOutput = null ; } } else if ( parsedHttpRequest . getTargetEntity ( ) . equalsIgnoreCase ( "Driver" ) ) { final String commandOutput = CommandUtils . runCommand ( queryStr ) ; response . getOutputStream ( ) . write ( commandOutput . getBytes ( StandardCharsets . UTF_8 ) ) ; } |
public class AbstractEntityFieldProcessor { /** * Gets the valid jpa column name .
* @ param entity
* the entity
* @ param f
* the f
* @ return the valid jpa column name */
protected final String getValidJPAColumnName ( Class < ? > entity , Field f ) { } } | String name = null ; if ( f . isAnnotationPresent ( Column . class ) ) { Column c = f . getAnnotation ( Column . class ) ; if ( ! c . name ( ) . isEmpty ( ) ) { name = c . name ( ) ; } else { name = f . getName ( ) ; } } else if ( f . isAnnotationPresent ( Basic . class ) ) { name = f . getName ( ) ; } if ( f . isAnnotationPresent ( Temporal . class ) ) { if ( ! f . getType ( ) . equals ( Date . class ) ) { log . error ( "@Temporal must map to java.util.Date for @Entity(" + entity . getName ( ) + "." + f . getName ( ) + ")" ) ; return name ; } if ( null == name ) { name = f . getName ( ) ; } } return name ; |
public class Notify { /** * Updates Icon parameter of once displayed Notify .
* This method is shortcut when using FONT AWESOME iconic font .
* @ param type IconType to get CSS class name to set */
public final void updateIcon ( final IconType type ) { } } | if ( type != null ) updateIcon ( Styles . FONT_AWESOME_BASE + " " + type . getCssName ( ) ) ; |
public class MultiFileJournalHelper { /** * Get the Journal Files that exist the Journal Directory , sorted by name . */
static File [ ] getSortedArrayOfJournalFiles ( File journalDirectory , String filenamePrefix ) { } } | JournalFileFilter filter = new JournalFileFilter ( filenamePrefix ) ; File [ ] journalFiles = journalDirectory . listFiles ( filter ) ; Arrays . sort ( journalFiles , new FilenameComparator ( ) ) ; return journalFiles ; |
public class AfplibPackageImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public EClass getGSGCH ( ) { } } | if ( gsgchEClass == null ) { gsgchEClass = ( EClass ) EPackage . Registry . INSTANCE . getEPackage ( AfplibPackage . eNS_URI ) . getEClassifiers ( ) . get ( 463 ) ; } return gsgchEClass ; |
public class Preconditions { /** * A { @ code double } specialized version of { @ link # checkPrecondition ( Object ,
* Predicate , Function ) }
* @ param value The value
* @ param predicate The predicate
* @ param describer The describer of the predicate
* @ return value
* @ throws PreconditionViolationException If the predicate is false */
public static double checkPreconditionD ( final double value , final DoublePredicate predicate , final DoubleFunction < String > describer ) { } } | final boolean ok ; try { ok = predicate . test ( value ) ; } catch ( final Throwable e ) { final Violations violations = singleViolation ( failedPredicate ( e ) ) ; throw new PreconditionViolationException ( failedMessage ( Double . valueOf ( value ) , violations ) , e , violations . count ( ) ) ; } return innerCheckD ( value , ok , describer ) ; |
public class CheckClassAdapter { /** * Checks an identifier .
* @ param signature
* a string containing the signature that must be checked .
* @ param pos
* index of first character to be checked .
* @ return the index of the first character after the checked part . */
private static int checkIdentifier ( final String signature , int pos ) { } } | if ( ! Character . isJavaIdentifierStart ( getChar ( signature , pos ) ) ) { throw new IllegalArgumentException ( signature + ": identifier expected at index " + pos ) ; } ++ pos ; while ( Character . isJavaIdentifierPart ( getChar ( signature , pos ) ) ) { ++ pos ; } return pos ; |
public class StrUtils { /** * Compare 2 array
* @ return true if each item equal */
public static boolean arraysEqual ( Object [ ] array1 , Object [ ] array2 ) { } } | if ( array1 == null || array1 . length == 0 || array2 == null || array2 . length == 0 ) DialectException . throwEX ( "StrUtils arraysEqual() method can not compare empty arrays" ) ; for ( int i = 0 ; array1 != null && array2 != null && i < array1 . length ; i ++ ) if ( ! array1 [ i ] . equals ( array2 [ i ] ) ) return false ; return true ; |
public class ExternalBeanAttributesFactory { /** * Validates { @ link BeanAttributes } .
* @ param attributes { @ link BeanAttributes } to validate */
public static void validateBeanAttributes ( BeanAttributes < ? > attributes , BeanManager manager ) { } } | validateStereotypes ( attributes , manager ) ; validateQualifiers ( attributes , manager ) ; validateTypes ( attributes , manager ) ; validateScope ( attributes , manager ) ; |
public class AdHocNTBase { /** * Log ad hoc batch info
* @ param batch planned statement batch */
private void logBatch ( final CatalogContext context , final AdHocPlannedStmtBatch batch , final Object [ ] userParams ) { } } | final int numStmts = batch . getPlannedStatementCount ( ) ; final int numParams = userParams == null ? 0 : userParams . length ; final String readOnly = batch . readOnly ? "yes" : "no" ; final String singlePartition = batch . isSinglePartitionCompatible ( ) ? "yes" : "no" ; final String user = getUsername ( ) ; final String [ ] groupNames = context . authSystem . getGroupNamesForUser ( user ) ; final String groupList = StringUtils . join ( groupNames , ',' ) ; // String [ ] stmtArray = batch . stmts . stream ( ) . map ( s - > new String ( s . sql , Charsets . UTF _ 8 ) ) . toArray ( String [ ] : : new ) ;
adhocLog . debug ( String . format ( "=== statements=%d parameters=%d read-only=%s single-partition=%s user=%s groups=[%s]" , numStmts , numParams , readOnly , singlePartition , user , groupList ) ) ; for ( int i = 0 ; i < batch . getPlannedStatementCount ( ) ; i ++ ) { AdHocPlannedStatement stmt = batch . getPlannedStatement ( i ) ; String sql = stmt . sql == null ? "SQL_UNKNOWN" : new String ( stmt . sql , Charsets . UTF_8 ) ; adhocLog . debug ( String . format ( "Statement #%d: %s" , i + 1 , sql ) ) ; } if ( userParams != null ) { for ( int i = 0 ; i < userParams . length ; ++ i ) { Object value = userParams [ i ] ; final String valueString = ( value != null ? value . toString ( ) : "NULL" ) ; adhocLog . debug ( String . format ( "Parameter #%d: %s" , i + 1 , valueString ) ) ; } } |
public class BaseDelegatingExpirationPolicy { /** * Add policy .
* @ param name the name
* @ param policy the policy */
public void addPolicy ( final String name , final ExpirationPolicy policy ) { } } | LOGGER . trace ( "Adding expiration policy [{}] with name [{}]" , policy , name ) ; this . policies . put ( name , policy ) ; |
public class Radar { /** * Animates the radar beam of the component . This has no effect
* on the functionality but is only eye candy .
* @ param RUN enables / disables the animation of the beam */
public void animate ( final boolean RUN ) { } } | if ( isEnabled ( ) ) { if ( RUN ) { if ( timeline . getState ( ) != Timeline . TimelineState . PLAYING_FORWARD && timeline . getState ( ) != Timeline . TimelineState . SUSPENDED ) { timeline = new Timeline ( this ) ; timeline . addPropertyToInterpolate ( "rotationAngle" , this . rotationAngle , 2 * Math . PI ) ; timeline . setEase ( new org . pushingpixels . trident . ease . Linear ( ) ) ; timeline . setDuration ( ( long ) ( 5000 ) ) ; timeline . playLoop ( Timeline . RepeatBehavior . LOOP ) ; } else if ( timeline . getState ( ) == Timeline . TimelineState . SUSPENDED ) { timeline . resume ( ) ; } } else { timeline . suspend ( ) ; } } |
public class PropertiesHelper { /** * Converts Properties to Map
* @ param properties properties to convert .
* @ return map version of properties . */
public Map < String , Object > convertPropertiesToMap ( Properties properties ) { } } | return properties . entrySet ( ) . stream ( ) . collect ( toLinkedMap ( e -> e . getKey ( ) . toString ( ) , e -> e . getValue ( ) ) ) ; |
public class Polygon { /** * Adds the corner of Polygon .
* @ param v The coordinates of a new added corner . */
public void vertex ( Vector3D v ) { } } | MODE = LINES_3D ; this . cornerX . add ( v . getX ( ) ) ; this . cornerY . add ( v . getY ( ) ) ; this . cornerZ . add ( v . getZ ( ) ) ; setNumberOfCorner ( this . cornerX . size ( ) ) ; calcG ( ) ; |
public class BatterySkin { /** * * * * * * Private Methods * * * * * */
private void setBar ( final double VALUE ) { } } | double factor = VALUE / gauge . getRange ( ) ; Color barColor = gauge . getBarColor ( ) ; if ( gauge . isGradientBarEnabled ( ) && ! gauge . getGradientBarStops ( ) . isEmpty ( ) ) { barColor = gauge . getGradientLookup ( ) . getColorAt ( factor ) ; } else if ( gauge . getSectionsVisible ( ) && ! sections . isEmpty ( ) ) { int listSize = sections . size ( ) ; for ( int i = 0 ; i < listSize ; i ++ ) { if ( sections . get ( i ) . contains ( VALUE ) ) { barColor = sections . get ( i ) . getColor ( ) ; break ; } } } if ( Orientation . HORIZONTAL == orientation ) { batteryPaint = new LinearGradient ( 0 , 0 , size , 0 , false , CycleMethod . NO_CYCLE , new Stop ( 0 , barColor ) , new Stop ( factor , barColor ) , new Stop ( factor , Color . TRANSPARENT ) , new Stop ( 1 , Color . TRANSPARENT ) ) ; } else { batteryPaint = new LinearGradient ( 0 , 0 , 0 , size , false , CycleMethod . NO_CYCLE , new Stop ( 0 , Color . TRANSPARENT ) , new Stop ( 1 - factor , Color . TRANSPARENT ) , new Stop ( 1 - factor , barColor ) , new Stop ( 1 , barColor ) ) ; } battery . setFill ( batteryPaint ) ; valueText . setText ( String . format ( locale , "%.0f%%" , factor * 100 ) ) ; valueText . relocate ( ( size - valueText . getLayoutBounds ( ) . getWidth ( ) ) * 0.5 , ( size - valueText . getLayoutBounds ( ) . getHeight ( ) ) * 0.5 ) ; |
public class VirtualMachinesInner { /** * Sets the state of the virtual machine to generalized .
* @ param resourceGroupName The name of the resource group .
* @ param vmName The name of the virtual machine .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ throws CloudException thrown if the request is rejected by server
* @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @ return the OperationStatusResponseInner object if successful . */
public OperationStatusResponseInner generalize ( String resourceGroupName , String vmName ) { } } | return generalizeWithServiceResponseAsync ( resourceGroupName , vmName ) . toBlocking ( ) . single ( ) . body ( ) ; |
public class Pcg32 { /** * Returns the next pseudorandom , Gaussian ( " normally " ) distributed
* { @ code double } value with mean { @ code 0.0 } and standard
* deviation { @ code 1.0 } from this random number generator ' s sequence .
* @ see java . util . Random # nextGaussian ( ) */
public double nextGaussian ( ) { } } | // See Knuth , ACP , Section 3.4.1 Algorithm C .
if ( haveNextNextGaussian ) { haveNextNextGaussian = false ; return nextNextGaussian ; } else { double v1 , v2 , s ; do { v1 = 2 * nextDouble ( ) - 1 ; // between - 1 and 1
v2 = 2 * nextDouble ( ) - 1 ; // between - 1 and 1
s = v1 * v1 + v2 * v2 ; } while ( s >= 1 || s == 0 ) ; double multiplier = StrictMath . sqrt ( - 2 * StrictMath . log ( s ) / s ) ; nextNextGaussian = v2 * multiplier ; haveNextNextGaussian = true ; return v1 * multiplier ; } |
public class FactorTable { /** * Computes the probability of the sequence OF being at the end of the table
* given that the first tag in table is GIVEN . given is at the beginning , of is
* at the end .
* @ return the probability of the sequence of being at the end of the table */
public double unnormalizedConditionalLogProbGivenFirst ( int given , int [ ] of ) { } } | if ( of . length != windowSize - 1 ) { throw new IllegalArgumentException ( "unnormalizedConditionalLogProbGivenFirst requires of one less than clique size (" + windowSize + ") but was " + Arrays . toString ( of ) ) ; } // compute P ( given , of )
int [ ] labels = new int [ windowSize ] ; labels [ 0 ] = given ; System . arraycopy ( of , 0 , labels , 1 , windowSize - 1 ) ; // double probAll = logProb ( labels ) ;
double probAll = unnormalizedLogProb ( labels ) ; // compute P ( given )
// double probGiven = logProbFront ( given ) ;
// double probGiven = unnormalizedLogProbFront ( given ) ;
// compute P ( given , of ) / P ( given )
// return probAll - probGiven ;
return probAll ; |
public class SelectorBuilderImpl { /** * Copies the whole set of predicates , by creating new instances of each one in the given set .
* @ return the new set of Predicate objects with different instance for each of the Predicate */
private Set < Predicate > copyPredicatesSet ( ) { } } | Set < Predicate > predicatesCopy = Sets . newLinkedHashSet ( ) ; for ( Predicate predicate : this . predicates ) { Predicate copyPredicate = new Predicate ( ) ; copyPredicate . setField ( predicate . getField ( ) ) ; copyPredicate . setOperator ( predicate . getOperator ( ) ) ; copyPredicate . getValues ( ) . addAll ( predicate . getValues ( ) ) ; predicatesCopy . add ( copyPredicate ) ; } return predicatesCopy ; |
public class ToIntFunctionBuilder { /** * One of ways of creating builder . This is possibly the least verbose way where compiler should be able to guess the generic parameters . */
@ Nonnull public static < T > ToIntFunction < T > toIntFunctionFrom ( Consumer < ToIntFunctionBuilder < T > > buildingFunction ) { } } | ToIntFunctionBuilder builder = new ToIntFunctionBuilder ( ) ; buildingFunction . accept ( builder ) ; return builder . build ( ) ; |
public class HttpUtils { /** * Submits http post data to an HttpURLConnection .
* @ param conn teh connection
* @ param encodedData the encoded data to be sent
* @ throws IOException on any connection */
public static void sendPostData ( HttpURLConnection conn , String encodedData ) throws IOException { } } | StreamManager sm = new StreamManager ( ) ; try { conn . setDoOutput ( true ) ; conn . setRequestMethod ( "POST" ) ; if ( conn . getRequestProperty ( "Content-Type" ) == null ) { conn . setRequestProperty ( "Content-Type" , "application/x-www-form-urlencoded" ) ; } if ( encodedData != null ) { if ( conn . getRequestProperty ( "Content-Length" ) == null ) { conn . setRequestProperty ( "Content-Length" , String . valueOf ( encodedData . getBytes ( "UTF-8" ) . length ) ) ; } DataOutputStream out = new DataOutputStream ( sm . handle ( conn . getOutputStream ( ) ) ) ; out . write ( encodedData . getBytes ( ) ) ; out . close ( ) ; } } finally { sm . closeAll ( ) ; } |
public class ExpandableGridView { /** * Expands the group , which corresponds to a specific index .
* @ param groupIndex
* The index of the group , which should be expanded , as an { @ link Integer } value
* @ return True , if the group has been expanded , false otherwise */
public final boolean expandGroup ( final int groupIndex ) { } } | ExpandableListAdapter adapter = getExpandableListAdapter ( ) ; if ( adapter != null && ! isGroupExpanded ( groupIndex ) ) { expandedGroups . add ( groupIndex ) ; notifyDataSetChanged ( ) ; return true ; } return false ; |
public class Log { /** * Send an INFO log message with { @ link SUBSYSTEM # MAIN } as default one .
* @ param tag Used to identify the source of a log message . It usually identifies the class or
* activity where the log call occurs .
* @ param msg The message you would like logged .
* @ return */
public static int i ( String tag , String msg ) { } } | return i ( SUBSYSTEM . MAIN , tag , msg ) ; |
public class RunsInner { /** * Gets a link to download the run logs .
* @ param resourceGroupName The name of the resource group to which the container registry belongs .
* @ param registryName The name of the container registry .
* @ param runId The run ID .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the RunGetLogResultInner object */
public Observable < RunGetLogResultInner > getLogSasUrlAsync ( String resourceGroupName , String registryName , String runId ) { } } | return getLogSasUrlWithServiceResponseAsync ( resourceGroupName , registryName , runId ) . map ( new Func1 < ServiceResponse < RunGetLogResultInner > , RunGetLogResultInner > ( ) { @ Override public RunGetLogResultInner call ( ServiceResponse < RunGetLogResultInner > response ) { return response . body ( ) ; } } ) ; |
public class NIOReadStream { /** * Move all bytes in current read buffers to output array , free read buffers
* back to thread local memory pool .
* @ param output */
void getBytes ( byte [ ] output ) { } } | if ( m_totalAvailable < output . length ) { throw new IllegalStateException ( "Requested " + output . length + " bytes; only have " + m_totalAvailable + " bytes; call tryRead() first" ) ; } int bytesCopied = 0 ; while ( bytesCopied < output . length ) { BBContainer firstC = m_readBBContainers . peekFirst ( ) ; if ( firstC == null ) { // Steal the write buffer
m_poolBBContainer . b ( ) . flip ( ) ; m_readBBContainers . add ( m_poolBBContainer ) ; firstC = m_poolBBContainer ; m_poolBBContainer = null ; } ByteBuffer first = firstC . b ( ) ; assert first . remaining ( ) > 0 ; // Copy bytes from first into output
int bytesRemaining = first . remaining ( ) ; int bytesToCopy = output . length - bytesCopied ; if ( bytesToCopy > bytesRemaining ) bytesToCopy = bytesRemaining ; first . get ( output , bytesCopied , bytesToCopy ) ; bytesCopied += bytesToCopy ; m_totalAvailable -= bytesToCopy ; if ( first . remaining ( ) == 0 ) { // read an entire block : move it to the empty buffers list
m_readBBContainers . poll ( ) ; firstC . discard ( ) ; } } |
public class JMSDestinationResourceFactoryBuilder { /** * Creates a resource factory that creates handles of the type specified
* by the { @ link ResourceFactory # CREATES _ OBJECT _ CLASS } property .
* @ param props the resource - specific type information
* @ return the resource factory
* @ throws Exception a resource - specific exception */
@ Override public ResourceFactory createResourceFactory ( Map < String , Object > props ) throws Exception { } } | final boolean trace = TraceComponent . isAnyTracingEnabled ( ) ; if ( trace && tc . isEntryEnabled ( ) ) Tr . entry ( tc , "createResourceFactory" , props ) ; // Place holder for admin object properties like queue / topic
Hashtable < String , Object > adminObjectSvcProps = new Hashtable < String , Object > ( ) ; // Store all the props from annotation as well as from deployment descriptor
Map < String , Object > annotationDDProps = new HashMap < String , Object > ( ) ; // Just move all the properties from props to annotationProps after resolving strings .
VariableRegistry variableRegistry = variableRegistryRef . getServiceWithException ( ) ; for ( Map . Entry < String , Object > prop : props . entrySet ( ) ) { Object value = prop . getValue ( ) ; if ( value instanceof String ) value = variableRegistry . resolveString ( ( String ) value ) ; annotationDDProps . put ( prop . getKey ( ) , value ) ; } String application = ( String ) annotationDDProps . remove ( AppDefinedResource . APPLICATION ) ; String declaringApplication = ( String ) annotationDDProps . remove ( DECLARING_APPLICATION ) ; String module = ( String ) annotationDDProps . remove ( AppDefinedResource . MODULE ) ; String component = ( String ) annotationDDProps . remove ( AppDefinedResource . COMPONENT ) ; String jndiName = ( String ) annotationDDProps . remove ( AdminObjectService . JNDI_NAME ) ; annotationDDProps . remove ( DESCRIPTION ) ; annotationDDProps . remove ( NAME ) ; String adminObjectID = getadminObjectID ( application , module , component , jndiName ) ; StringBuilder filter = new StringBuilder ( FilterUtils . createPropertyFilter ( ID , adminObjectID ) ) ; filter . insert ( filter . length ( ) - 1 , '*' ) ; // Fail if server . xml is already using the id
if ( ! removeExistingConfigurations ( filter . toString ( ) ) ) throw new IllegalArgumentException ( adminObjectID ) ; // internal error , shouldn ' t ever have been permitted in server . xml
adminObjectSvcProps . put ( ID , adminObjectID ) ; adminObjectSvcProps . put ( CONFIG_DISPLAY_ID , adminObjectID ) ; // Use the unique identifier because jndiName is not always unique for app - defined data sources
adminObjectSvcProps . put ( UNIQUE_JNDI_NAME , adminObjectID ) ; adminObjectSvcProps . put ( ResourceFactory . JNDI_NAME , jndiName ) ; if ( application != null ) { adminObjectSvcProps . put ( AppDefinedResource . APPLICATION , application ) ; if ( module != null ) { adminObjectSvcProps . put ( AppDefinedResource . MODULE , module ) ; if ( component != null ) adminObjectSvcProps . put ( AppDefinedResource . COMPONENT , component ) ; } } String resourceAdapter = ( ( String ) annotationDDProps . remove ( RESOURCE_ADAPTER ) ) ; String destinationName = ( String ) annotationDDProps . get ( JMSResourceDefinitionConstants . DESTINATION_NAME ) ; String interfaceName = ( String ) annotationDDProps . remove ( INTERFACE_NAME ) ; // If the resource adapter type is wasJms , then the destination name has to be mapped with respective name in the resource adapter .
if ( JMSResourceDefinitionConstants . RESOURCE_ADAPTER_WASJMS . equals ( resourceAdapter ) ) { // The destinationName specified by via annotation / dd has to be mapped to the respective adapter property .
if ( JMSResourceDefinitionConstants . JMS_QUEUE_INTERFACE . equals ( interfaceName ) ) { annotationDDProps . put ( JMSResourceDefinitionConstants . JMS_QUEUE_NAME , destinationName ) ; } else if ( JMSResourceDefinitionConstants . JMS_TOPIC_INTERFACE . equals ( interfaceName ) ) { annotationDDProps . put ( JMSResourceDefinitionConstants . JMS_TOPIC_NAME , destinationName ) ; } // If the resource adapter type is wmqJms , then the destination name has to be mapped with respective name in the resource adapter .
} else if ( JMSResourceDefinitionConstants . RESOURCE_ADAPTER_WMQJMS . equals ( resourceAdapter ) ) { // The destinationName specified by via annotation / dd has to be mapped to the respective adapter property .
if ( JMSResourceDefinitionConstants . JMS_QUEUE_INTERFACE . equals ( interfaceName ) ) { annotationDDProps . put ( JMSResourceDefinitionConstants . WMQ_QUEUE_NAME , destinationName ) ; } else if ( JMSResourceDefinitionConstants . JMS_TOPIC_INTERFACE . equals ( interfaceName ) ) { annotationDDProps . put ( JMSResourceDefinitionConstants . WMQ_TOPIC_NAME , destinationName ) ; } } // Get props with default values only and see if the same is specified by the user in annotation / dd , then use that value otherwise set the default value .
// Note : Its not necessary for the user to specify the props which has default value , so we set them in here .
Dictionary < String , Object > adminObjectDefaultProps = getDefaultProperties ( resourceAdapter , interfaceName ) ; for ( Enumeration < String > keys = adminObjectDefaultProps . keys ( ) ; keys . hasMoreElements ( ) ; ) { String key = keys . nextElement ( ) ; Object value = adminObjectDefaultProps . get ( key ) ; // Override the administered object default property values with values provided annotation
if ( annotationDDProps . containsKey ( key ) ) value = annotationDDProps . remove ( key ) ; if ( value instanceof String ) value = variableRegistry . resolveString ( ( String ) value ) ; adminObjectSvcProps . put ( JMSResourceDefinitionConstants . PROPERTIES_REF_KEY + key , value ) ; } // Get all the properties for a given resource ( get the resource by the interfaceName ) from the corresponding resource adapter ,
// then see if user specified any of these props in annotation / dd , if yes set that value otherwise ignore .
// Note : The above section for handling default values can be eliminated by handling the same here in this section since we get all the properties . Will be taken care in future .
AttributeDefinition [ ] ads = getAttributeDefinitions ( resourceAdapter , interfaceName ) ; for ( AttributeDefinition attributeDefinition : ads ) { Object value = annotationDDProps . remove ( attributeDefinition . getID ( ) ) ; if ( value != null ) { if ( value instanceof String ) value = variableRegistry . resolveString ( ( String ) value ) ; adminObjectSvcProps . put ( JMSResourceDefinitionConstants . PROPERTIES_REF_KEY + attributeDefinition . getID ( ) , value ) ; } } adminObjectSvcProps . put ( BOOTSTRAP_CONTEXT , "(id=" + resourceAdapter + ")" ) ; BundleContext bundleContext = FrameworkUtil . getBundle ( AdminObjectService . class ) . getBundleContext ( ) ; StringBuilder adminObjectFilter = new StringBuilder ( 200 ) ; adminObjectFilter . append ( "(&" ) . append ( FilterUtils . createPropertyFilter ( ID , adminObjectID ) ) ; adminObjectFilter . append ( FilterUtils . createPropertyFilter ( Constants . OBJECTCLASS , AdminObjectService . class . getName ( ) ) ) . append ( ")" ) ; ResourceFactory factory = new AppDefinedResourceFactory ( this , bundleContext , adminObjectID , adminObjectFilter . toString ( ) , declaringApplication ) ; try { String bundleLocation = bundleContext . getBundle ( ) . getLocation ( ) ; ConfigurationAdmin configAdmin = configAdminRef . getService ( ) ; Configuration adminObjectSvcConfig = configAdmin . createFactoryConfiguration ( AdminObjectService . ADMIN_OBJECT_PID , bundleLocation ) ; adminObjectSvcConfig . update ( adminObjectSvcProps ) ; } catch ( Exception x ) { factory . destroy ( ) ; throw x ; } catch ( Error x ) { factory . destroy ( ) ; throw x ; } if ( trace && tc . isEntryEnabled ( ) ) Tr . exit ( tc , "createResourceFactory" , factory ) ; return factory ; |
public class TransportRequestHandler { /** * Responds to a single message with some Encodable object . If a failure occurs while sending ,
* it will be logged and the channel closed . */
private ChannelFuture respond ( Encodable result ) { } } | SocketAddress remoteAddress = channel . remoteAddress ( ) ; return channel . writeAndFlush ( result ) . addListener ( future -> { if ( future . isSuccess ( ) ) { logger . trace ( "Sent result {} to client {}" , result , remoteAddress ) ; } else { logger . error ( String . format ( "Error sending result %s to %s; closing connection" , result , remoteAddress ) , future . cause ( ) ) ; channel . close ( ) ; } } ) ; |
public class RenderAPI { /** * 自我渲染
* @ param template */
public static void selfRender ( XWPFTemplate template ) { } } | ObjectUtils . requireNonNull ( template , "Template is null, should be setted first." ) ; List < ElementTemplate > elementTemplates = template . getElementTemplates ( ) ; if ( null == elementTemplates || elementTemplates . isEmpty ( ) ) return ; RenderPolicy policy = null ; for ( ElementTemplate runTemplate : elementTemplates ) { LOGGER . debug ( "Start self-render TemplateName:{}, Sign:{}" , runTemplate . getTagName ( ) , runTemplate . getSign ( ) ) ; policy = template . getConfig ( ) . getDefaultPolicys ( ) . get ( Character . valueOf ( '\0' ) ) ; policy . render ( runTemplate , new TextRenderData ( runTemplate . getSource ( ) ) , template ) ; } |
public class SDBaseOps { /** * Create a new 1d array with values evenly spaced between values ' start ' and ' stop '
* For example , linspace ( start = 3.0 , stop = 4.0 , number = 3 ) will generate [ 3.0 , 3.5 , 4.0]
* @ param name Name of the new variable
* @ param dataType Data type of the output array
* @ param start Start value
* @ param stop Stop value
* @ param number Number of values to generate
* @ return SDVariable with linearly spaced elements */
public SDVariable linspace ( String name , DataType dataType , double start , double stop , long number ) { } } | SDVariable ret = f ( ) . linspace ( sd ( ) . constant ( start ) , sd ( ) . constant ( stop ) , sd ( ) . constant ( number ) , dataType ) ; return updateVariableNameAndReference ( ret , name ) ; |
public class PropertyAccessorHelper { /** * Sets a byte - array onto a field .
* @ param target
* the target
* @ param field
* the field
* @ param fieldVal
* the field value
* @ throws PropertyAccessException
* the property access exception */
public static void set ( Object target , Field field , String fieldVal ) { } } | PropertyAccessor < ? > accessor = PropertyAccessorFactory . getPropertyAccessor ( field ) ; Object value = accessor . fromString ( target . getClass ( ) , fieldVal ) ; set ( target , field , value ) ; |
public class ID3v2Tag { /** * Returns the data found in the frame specified by the id . If the frame
* doesn ' t exist , then a zero length array is returned .
* @ param id the id of the frame to get the data from
* @ return the data found in the frame */
public byte [ ] getFrameData ( String id ) { } } | if ( frames . containsKey ( id ) ) { return ( ( ID3v2Frame ) frames . get ( id ) ) . getFrameData ( ) ; } return null ; |
public class CmsObject { /** * Removes a resource from the current project of the user . < p >
* This is used to reduce the current users project with the
* specified resource , in case that the resource is already part of the project .
* The resource is not really removed like in a regular copy operation ,
* it is in fact only " disabled " in the current users project . < p >
* @ param resourcename the name of the resource to remove to the current project ( full current site relative path )
* @ throws CmsException if something goes wrong */
public void removeResourceFromProject ( String resourcename ) throws CmsException { } } | // TODO : this should be also possible if the resource has been deleted
CmsResource resource = readResource ( resourcename , CmsResourceFilter . ALL ) ; getResourceType ( resource ) . removeResourceFromProject ( this , m_securityManager , resource ) ; |
public class KeyValueLocator { /** * Helper method to handle potentially different node sizes in the actual list and in the config .
* @ return true if they are not equal , false if they are . */
private static boolean handleNotEqualNodeSizes ( int configNodeSize , int actualNodeSize ) { } } | if ( configNodeSize != actualNodeSize ) { if ( LOGGER . isDebugEnabled ( ) ) { LOGGER . debug ( "Node list and configuration's partition hosts sizes : {} <> {}, rescheduling" , actualNodeSize , configNodeSize ) ; } return true ; } return false ; |
public class Variant { /** * Opposite of the { @ link # toDate ( double ) } method . */
static double fromDate ( Date dt ) { } } | // the number of milliseconds since January 1 , 1970 , 00:00:00 GMT
long t = dt . getTime ( ) ; // the number of milliseconds since January 1 , 1970 , 00:00:00 Local Time
Calendar c = new GregorianCalendar ( ) ; c . setTime ( dt ) ; t += ( c . get ( Calendar . ZONE_OFFSET ) + c . get ( Calendar . DST_OFFSET ) ) ; // the number of milliseconds since December 30 , 1899 , 00:00:00 Local Time
t += 2209161600000L ; // DATE is an offset from " 30 December 1899"
if ( t < 0 ) { // -0.3 - > - 0.7
long offset = - ( t % MSPD ) ; // TODO : check
t = t - MSPD + offset ; } double d = ( ( double ) t ) / MSPD ; return d ; |
public class JBossAdminApi { /** * curl - v - - digest - u " admin " - F " file = @ git / cadmium / test . cadmium . localhost . war ; filename = test . cadmium . localhost . war " http : / / localhost : 9990 / management / add - content
* result : { " outcome " : " success " , " result " : { " BYTES _ VALUE " : " SbejgggTNOuHdke5k6EeKdB8Zfo = " } } */
public String uploadWar ( String warName , File warFile ) throws Exception { } } | HttpPost post = new HttpPost ( "http://" + host + ":" + port + "/management/add-content" ) ; try { MultipartEntity entity = new MultipartEntity ( ) ; entity . addPart ( "filename" , new StringBody ( warName ) ) ; entity . addPart ( "attachment" , new FileBody ( warFile , ContentType . APPLICATION_OCTET_STREAM . getMimeType ( ) ) ) ; post . setEntity ( entity ) ; HttpResponse response = client . execute ( post ) ; if ( response . getStatusLine ( ) . getStatusCode ( ) == HttpStatus . SC_OK ) { String respStr = EntityUtils . toString ( response . getEntity ( ) ) ; Map < String , Object > respObj = new Gson ( ) . fromJson ( respStr , new TypeToken < Map < String , Object > > ( ) { } . getType ( ) ) ; if ( "success" . equals ( respObj . get ( "outcome" ) ) ) { Object resultObj = respObj . get ( "result" ) ; if ( resultObj instanceof Map ) { Map < String , Object > resultMap = ( Map < String , Object > ) resultObj ; return ( String ) resultMap . get ( "BYTES_VALUE" ) ; } } else { String failureMessage = ( String ) respObj . get ( "failure-description" ) ; if ( failureMessage == null ) { failureMessage = "Failed to process request." ; } logger . warn ( failureMessage ) ; throw new Exception ( "Received " + failureMessage + " response from management api." ) ; } } } finally { post . releaseConnection ( ) ; } return null ; |
public class Try { /** * Calls the callable and if it returns a value , the Try is successful with that value or if throws
* and exception the Try captures that
* @ param callable the code to call
* @ param < V > the value type
* @ return a Try which is the result of the call */
public static < V > Try < V > tryCall ( Callable < V > callable ) { } } | try { return Try . succeeded ( callable . call ( ) ) ; } catch ( Exception e ) { return Try . failed ( e ) ; } |
public class ApiBuilder { /** * Adds a PATCH request handler for the specified path to the { @ link Javalin } instance .
* The method can only be called inside a { @ link Javalin # routes ( EndpointGroup ) } .
* @ see < a href = " https : / / javalin . io / documentation # handlers " > Handlers in docs < / a > */
public static void patch ( @ NotNull String path , @ NotNull Handler handler ) { } } | staticInstance ( ) . patch ( prefixPath ( path ) , handler ) ; |
public class PackageIndexWriter { /** * Generate the package index page for the right - hand frame .
* @ param configuration the current configuration of the doclet .
* @ throws DocFileIOException if there is a problem generating the package index page */
public static void generate ( ConfigurationImpl configuration ) throws DocFileIOException { } } | DocPath filename = DocPaths . overviewSummary ( configuration . frames ) ; PackageIndexWriter packgen = new PackageIndexWriter ( configuration , filename ) ; packgen . buildPackageIndexFile ( "doclet.Window_Overview_Summary" , true ) ; |
public class AttributeFormFieldRegistry { /** * Create a new { @ link DataSourceField } instance for the given attribute info . This field can provide additional
* validators on the field type ( if they are registered ) , to protect the data . < br / >
* If the attribute info object has the < code > formInputType < / code > set , than that will be used to search for the
* correct field type , otherwise the attribute TYPE name is used ( i . e . PrimitiveType . INTEGER . name ( ) ) .
* @ param info The actual attribute info to create a data source field for .
* @ return The new data source field instance associated with the type of attribute . */
public static DataSourceField createDataSourceField ( AbstractReadOnlyAttributeInfo info ) { } } | DataSourceField field = null ; List < Validator > validators = new ArrayList < Validator > ( ) ; if ( info . getFormInputType ( ) != null ) { String formInputType = info . getFormInputType ( ) ; DataSourceFieldFactory factory = DATA_SOURCE_FIELDS . get ( formInputType ) ; if ( null != factory ) { field = factory . create ( ) ; List < Validator > fieldValidators = FIELD_VALIDATORS . get ( formInputType ) ; if ( null != fieldValidators ) { validators . addAll ( fieldValidators ) ; } } else { Log . logWarn ( "Cannot find data source factory for " + info . getFormInputType ( ) + ", using default instead." ) ; } } if ( field == null ) { if ( info instanceof PrimitiveAttributeInfo ) { String name = ( ( PrimitiveAttributeInfo ) info ) . getType ( ) . name ( ) ; field = DATA_SOURCE_FIELDS . get ( name ) . create ( ) ; validators = new ArrayList < Validator > ( FIELD_VALIDATORS . get ( name ) ) ; } else if ( info instanceof SyntheticAttributeInfo ) { String name = PrimitiveType . STRING . name ( ) ; field = DATA_SOURCE_FIELDS . get ( name ) . create ( ) ; validators . addAll ( FIELD_VALIDATORS . get ( name ) ) ; } else if ( info instanceof AssociationAttributeInfo ) { String name = ( ( AssociationAttributeInfo ) info ) . getType ( ) . name ( ) ; field = DATA_SOURCE_FIELDS . get ( name ) . create ( ) ; validators . addAll ( FIELD_VALIDATORS . get ( name ) ) ; } else { throw new IllegalStateException ( "Don't know how to handle field " + info . getName ( ) + ", " + "maybe you need to define the formInputType." ) ; } } if ( field != null ) { field . setName ( info . getName ( ) ) ; field . setTitle ( info . getLabel ( ) ) ; field . setCanEdit ( info . isEditable ( ) ) ; field . setRequired ( info instanceof AbstractEditableAttributeInfo && isRequired ( ( ( AbstractEditableAttributeInfo ) info ) . getValidator ( ) ) ) ; if ( info instanceof PrimitiveAttributeInfo ) { validators . addAll ( convertConstraints ( ( PrimitiveAttributeInfo ) info ) ) ; } if ( validators . size ( ) > 0 ) { field . setValidators ( validators . toArray ( new Validator [ validators . size ( ) ] ) ) ; } return field ; } return null ; |
public class GoogRequire { /** * Returns a reference to the module object using { @ code goog . module . get } */
public Expression googModuleGet ( ) { } } | if ( chunk ( ) instanceof VariableDeclaration ) { throw new IllegalStateException ( "requires with aliases shouldn't use goog.module.get" ) ; } return dottedIdWithRequires ( "goog.module.get" , ImmutableSet . of ( this ) ) . call ( stringLiteral ( symbol ( ) ) ) ; |
public class FormAuthenticator { public void setErrorPage ( String path ) { } } | if ( path == null || path . trim ( ) . length ( ) == 0 ) { _formErrorPath = null ; _formErrorPage = null ; } else { if ( ! path . startsWith ( "/" ) ) { log . warn ( "form-error-page must start with /" ) ; path = "/" + path ; } _formErrorPage = path ; _formErrorPath = path ; if ( _formErrorPath != null && _formErrorPath . indexOf ( '?' ) > 0 ) _formErrorPath = _formErrorPath . substring ( 0 , _formErrorPath . indexOf ( '?' ) ) ; } |
public class AbstractItem { /** * This bridge method is to maintain binary compatibility with { @ link TopLevelItem # getParent ( ) } . */
@ WithBridgeMethods ( value = Jenkins . class , castRequired = true ) @ Override public @ Nonnull ItemGroup getParent ( ) { } } | if ( parent == null ) { throw new IllegalStateException ( "no parent set on " + getClass ( ) . getName ( ) + "[" + name + "]" ) ; } return parent ; |
public class FetchTupleElement { /** * This function fetches an element from an array at a specific index .
* @ param arr Array that the function fetches an element from .
* @ param idx The index of the element that needs to be fetched .
* @ return Element from array at the specific index .
* Examples :
* > > > fetchArrayElement ( new Object [ ] { ' w ' , 3 , ' r ' , ' e ' , ' s ' , ' o ' , ' u ' , ' r ' , ' c ' , ' e ' } , 3)
* > > > fetchArrayElement ( new Object [ ] { ' w ' , 3 , ' r ' , ' e ' , ' s ' , ' o ' , ' u ' , ' r ' , ' c ' , ' e ' } , - 4)
* > > > fetchArrayElement ( new Object [ ] { ' w ' , 3 , ' r ' , ' e ' , ' s ' , ' o ' , ' u ' , ' r ' , ' c ' , ' e ' } , - 3) */
public static Object fetchArrayElement ( Object [ ] arr , int idx ) { } } | if ( idx < 0 ) { idx = arr . length + idx ; } return arr [ idx ] ; |
public class UserInterfaceApi { /** * Open New Mail Window Open the New Mail window , according to settings from
* the request if applicable - - - SSO Scope : esi - ui . open _ window . v1
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ param uiNewMail
* ( optional )
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public void postUiOpenwindowNewmail ( String datasource , String token , UiNewMail uiNewMail ) throws ApiException { } } | postUiOpenwindowNewmailWithHttpInfo ( datasource , token , uiNewMail ) ; |
public class KDTree { /** * Returns true if candidate is a full owner in respect to a competitor .
* The candidate has been the closer point to the current rectangle or even
* has been a point within the rectangle . The competitor is competing with the
* candidate for a few points out of the rectangle although it is a point
* further away from the rectangle then the candidate . The extrem point is the
* corner of the rectangle that is furthest away from the candidate towards
* the direction of the competitor .
* If the distance candidate to this extreme point is smaller then the
* distance competitor to this extreme point , then it is proven that none of
* the points in the rectangle can be owned be the competitor and the
* candidate is full owner of the rectangle in respect to this competitor . See
* also D . Pelleg and A . Moore ' s paper ' Accelerating exact k - means Algorithms
* with Geometric Reasoning ' .
* @ param node The current KDTreeNode / hyperrectangle .
* @ param candidateinstance that is candidate to be owner
* @ param competitorinstance that competes against the candidate
* @ return true if candidate is full owner
* @ throws Exception If some problem occurs . */
protected boolean candidateIsFullOwner ( KDTreeNode node , Instance candidate , Instance competitor ) throws Exception { } } | // get extreme point
Instance extreme = ( Instance ) candidate . copy ( ) ; for ( int i = 0 ; i < m_Instances . numAttributes ( ) ; i ++ ) { if ( ( competitor . value ( i ) - candidate . value ( i ) ) > 0 ) { extreme . setValue ( i , node . m_NodeRanges [ i ] [ MAX ] ) ; } else { extreme . setValue ( i , node . m_NodeRanges [ i ] [ MIN ] ) ; } } boolean isFullOwner = m_EuclideanDistance . distance ( extreme , candidate ) < m_EuclideanDistance . distance ( extreme , competitor ) ; return isFullOwner ; |
public class BasicBinder { /** * Register the configuration file ( bindings . xml ) at the given URL */
public final void registerConfiguration ( URL nextLocation ) { } } | BindingConfiguration configuration = BindingXmlLoader . load ( nextLocation ) ; for ( Provider nextProvider : configuration . getProviders ( ) ) { try { registerConverterProvider ( nextProvider . getProviderClass ( ) . newInstance ( ) ) ; } catch ( InstantiationException e ) { throw new IllegalStateException ( "Cannot instantiate binding provider class: " + nextProvider . getProviderClass ( ) . getName ( ) ) ; } catch ( IllegalAccessException e ) { throw new IllegalStateException ( "Cannot access binding provider class: " + nextProvider . getProviderClass ( ) . getName ( ) ) ; } } registerBindingConfigurationEntries ( configuration . getBindingEntries ( ) ) ; |
public class ScorecardModel { /** * Scorecard model builder : JIT a subclass with the fast version wired in to
* ' score ( row ) ' */
public static ScorecardModel make ( final String name , final double initialScore , RuleTable [ ] rules ) { } } | // Get the list of features
String [ ] colNames = new String [ rules . length ] ; for ( int i = 0 ; i < rules . length ; i ++ ) colNames [ i ] = rules [ i ] . _name ; // javassist support for rewriting class files
ClassPool _pool = ClassPool . getDefault ( ) ; try { // Make a javassist class in the java hierarchy
String cname = uniqueClassName ( name ) ; CtClass scClass = _pool . makeClass ( cname ) ; CtClass baseClass = _pool . get ( "water.score.ScorecardModel" ) ; // Full Name Lookup
scClass . setSuperclass ( baseClass ) ; // Produce the scoring method ( s )
ScorecardModel scm = new ScorecardModel ( name , colNames , initialScore , rules ) ; scm . makeScoreHashMethod ( scClass ) ; scm . makeScoreAryMethod ( scClass ) ; // Produce a 1 - arg constructor
String cons = " public " + cname + "(water.score.ScorecardModel base) { super(base); }" ; CtConstructor happyConst = CtNewConstructor . make ( cons , scClass ) ; scClass . addConstructor ( happyConst ) ; Class myClass = scClass . toClass ( ScorecardModel . class . getClassLoader ( ) , null ) ; Constructor < ScorecardModel > co = myClass . getConstructor ( ScorecardModel . class ) ; ScorecardModel jitted_scm = co . newInstance ( scm ) ; return jitted_scm ; } catch ( Exception e ) { Log . err ( Sys . SCORM , "Javassist failed" , e ) ; } return null ; |
public class DefaultLogger { /** * Logs a message , if the priority is suitable . In non - emacs mode , task
* level messages are prefixed by the task name which is right - justified .
* @ param event A BuildEvent containing message information . Must not be
* < code > null < / code > . */
@ Override public void messageLogged ( final BuildEvent event ) { } } | final int priority = event . getPriority ( ) ; // Filter out messages based on priority
if ( priority <= msgOutputLevel ) { final StringBuilder message = new StringBuilder ( ) ; if ( event . getTask ( ) != null && ! emacsMode ) { // Print out the name of the task if we ' re in one
final String name = event . getTask ( ) . getTaskName ( ) ; String label = "[" + name + "] " ; final int size = LEFT_COLUMN_SIZE - label . length ( ) ; final StringBuilder tmp = new StringBuilder ( ) ; for ( int i = 0 ; i < size ; i ++ ) { tmp . append ( " " ) ; } tmp . append ( label ) ; label = tmp . toString ( ) ; BufferedReader r = null ; try { r = new BufferedReader ( new StringReader ( event . getMessage ( ) ) ) ; String line = r . readLine ( ) ; boolean first = true ; do { if ( first ) { if ( line == null ) { message . append ( label ) ; break ; } } else { message . append ( StringUtils . LINE_SEP ) ; } first = false ; message . append ( label ) . append ( line ) ; line = r . readLine ( ) ; } while ( line != null ) ; } catch ( final IOException e ) { // shouldn ' t be possible
message . append ( label ) . append ( event . getMessage ( ) ) ; } finally { if ( r != null ) { FileUtils . close ( r ) ; } } } else { // emacs mode or there is no task
message . append ( event . getMessage ( ) ) ; } final Throwable ex = event . getException ( ) ; if ( Project . MSG_DEBUG <= msgOutputLevel && ex != null ) { message . append ( StringUtils . getStackTrace ( ex ) ) ; } final String msg = message . toString ( ) ; if ( priority != Project . MSG_ERR ) { printMessage ( msg , out , priority ) ; } else { printMessage ( msg , err , priority ) ; } log ( msg ) ; } |
public class AuthorizationStrategy { /** * Returns all the registered { @ link AuthorizationStrategy } descriptors . */
public static @ Nonnull DescriptorExtensionList < AuthorizationStrategy , Descriptor < AuthorizationStrategy > > all ( ) { } } | return Jenkins . getInstance ( ) . < AuthorizationStrategy , Descriptor < AuthorizationStrategy > > getDescriptorList ( AuthorizationStrategy . class ) ; |
public class ChunkedHashStore { /** * Adds the elements returned by an iterator to this store , associating them with specified values ,
* possibly building the associated value frequency map .
* @ param elements an iterator returning elements .
* @ param values an iterator on values parallel to { @ code elements } .
* @ param requiresValue2CountMap whether to build the value frequency map ( associating with each value its frequency ) . */
public void addAll ( final Iterator < ? extends T > elements , final LongIterator values , final boolean requiresValue2CountMap ) throws IOException { } } | if ( pl != null ) { pl . expectedUpdates = - 1 ; pl . start ( "Adding elements..." ) ; } final long [ ] triple = new long [ 3 ] ; while ( elements . hasNext ( ) ) { Hashes . spooky4 ( transform . toBitVector ( elements . next ( ) ) , seed , triple ) ; add ( triple , values != null ? values . nextLong ( ) : filteredSize ) ; if ( pl != null ) pl . lightUpdate ( ) ; } if ( values != null && values . hasNext ( ) ) throw new IllegalStateException ( "The iterator on values contains more entries than the iterator on keys" ) ; if ( pl != null ) pl . done ( ) ; |
public class FileUtil { /** * 文件或目录是否存在 */
public static boolean existsAny ( String ... paths ) { } } | return Arrays . stream ( paths ) . anyMatch ( path -> new File ( path ) . exists ( ) ) ; |
public class DeleteFileSystemWindowsResponseMarshaller { /** * Marshall the given parameter object . */
public void marshall ( DeleteFileSystemWindowsResponse deleteFileSystemWindowsResponse , ProtocolMarshaller protocolMarshaller ) { } } | if ( deleteFileSystemWindowsResponse == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( deleteFileSystemWindowsResponse . getFinalBackupId ( ) , FINALBACKUPID_BINDING ) ; protocolMarshaller . marshall ( deleteFileSystemWindowsResponse . getFinalBackupTags ( ) , FINALBACKUPTAGS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; } |
public class SpringBootUtilTask { /** * Build up a command string to launch in new process */
private List < String > buildCommand ( ) { } } | List < String > command = new ArrayList < String > ( ) ; command . add ( cmd ) ; command . add ( "thin" ) ; command . add ( "--sourceAppPath=" + getSourceAppPath ( ) ) ; command . add ( "--targetLibCachePath=" + getTargetLibCachePath ( ) ) ; command . add ( "--targetThinAppPath=" + getTargetThinAppPath ( ) ) ; if ( getParentLibCachePath ( ) != null ) { command . add ( "--parentLibCachePath=" + getParentLibCachePath ( ) ) ; } return command ; |
public class SliceUtf8 { /** * Gets the UTF - 8 encoded code point at the { @ code position } .
* Note : This method does not explicitly check for valid UTF - 8 , and may
* return incorrect results or throw an exception for invalid UTF - 8. */
public static int getCodePointAt ( Slice utf8 , int position ) { } } | int unsignedStartByte = utf8 . getByte ( position ) & 0xFF ; if ( unsignedStartByte < 0x80 ) { // normal ASCII
// 0xxx _ xxxx
return unsignedStartByte ; } if ( unsignedStartByte < 0xc0 ) { // illegal bytes
// 10xx _ xxxx
throw new InvalidUtf8Exception ( "Illegal start 0x" + toHexString ( unsignedStartByte ) . toUpperCase ( ) + " of code point" ) ; } if ( unsignedStartByte < 0xe0 ) { // 110x _ xxxx 10xx _ xxxx
if ( position + 1 >= utf8 . length ( ) ) { throw new InvalidUtf8Exception ( "UTF-8 sequence truncated" ) ; } return ( ( unsignedStartByte & 0b0001_1111 ) << 6 ) | ( utf8 . getByte ( position + 1 ) & 0b0011_1111 ) ; } if ( unsignedStartByte < 0xf0 ) { // 1110 _ xxxx 10xx _ xxxx 10xx _ xxxx
if ( position + 2 >= utf8 . length ( ) ) { throw new InvalidUtf8Exception ( "UTF-8 sequence truncated" ) ; } return ( ( unsignedStartByte & 0b0000_1111 ) << 12 ) | ( ( utf8 . getByteUnchecked ( position + 1 ) & 0b0011_1111 ) << 6 ) | ( utf8 . getByteUnchecked ( position + 2 ) & 0b0011_1111 ) ; } if ( unsignedStartByte < 0xf8 ) { // 1111_0xxx 10xx _ xxxx 10xx _ xxxx 10xx _ xxxx
if ( position + 3 >= utf8 . length ( ) ) { throw new InvalidUtf8Exception ( "UTF-8 sequence truncated" ) ; } return ( ( unsignedStartByte & 0b0000_0111 ) << 18 ) | ( ( utf8 . getByteUnchecked ( position + 1 ) & 0b0011_1111 ) << 12 ) | ( ( utf8 . getByteUnchecked ( position + 2 ) & 0b0011_1111 ) << 6 ) | ( utf8 . getByteUnchecked ( position + 3 ) & 0b0011_1111 ) ; } // Per RFC3629 , UTF - 8 is limited to 4 bytes , so more bytes are illegal
throw new InvalidUtf8Exception ( "Illegal start 0x" + toHexString ( unsignedStartByte ) . toUpperCase ( ) + " of code point" ) ; |
public class TreeMap { /** * ( non - Javadoc )
* @ see com . ibm . ws . objectManager . Map # containsKey ( java . lang . Object , com . ibm . ws . objectManager . Transaction ) */
public synchronized boolean containsKey ( Object key , Transaction transaction ) throws ObjectManagerException { } } | if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . entry ( this , cclass , "get" , new Object [ ] { key , transaction } ) ; boolean returnValue = false ; if ( getEntry ( key , transaction ) != null ) returnValue = true ; if ( Tracing . isAnyTracingEnabled ( ) && trace . isEntryEnabled ( ) ) trace . exit ( this , cclass , "containsKey" , "returns " + returnValue + "(boolean)" ) ; return returnValue ; |
public class ProjectUtilities { /** * Using the natures name , check whether the current project has FindBugs
* nature .
* @ return boolean < code > true < / code > , if the FindBugs nature is assigned to
* the project , < code > false < / code > otherwise . */
public static boolean hasFindBugsNature ( IProject project ) { } } | try { return ProjectUtilities . isJavaProject ( project ) && project . hasNature ( FindbugsPlugin . NATURE_ID ) ; } catch ( CoreException e ) { FindbugsPlugin . getDefault ( ) . logException ( e , "Error while testing SpotBugs nature for project " + project ) ; } return false ; |
public class Util { /** * Get long type property value from a property map .
* If { @ code properties } is null or property value is null , default value is returned
* @ param properties map of properties
* @ param key property name
* @ param defaultVal default value of the property
* @ return integer value of the property , */
public static Long getLongProperty ( Map < String , Object > properties , String key , long defaultVal ) { } } | if ( properties == null ) { return defaultVal ; } Object propertyVal = properties . get ( key ) ; if ( propertyVal == null ) { return defaultVal ; } if ( ! ( propertyVal instanceof Long ) ) { throw new IllegalArgumentException ( "Property : " + key + " must be a long" ) ; } return ( Long ) propertyVal ; |
public class DBSnapshotAttribute { /** * The value or values for the manual DB snapshot attribute .
* If the < code > AttributeName < / code > field is set to < code > restore < / code > , then this element returns a list of IDs
* of the AWS accounts that are authorized to copy or restore the manual DB snapshot . If a value of < code > all < / code >
* is in the list , then the manual DB snapshot is public and available for any AWS account to copy or restore .
* @ param attributeValues
* The value or values for the manual DB snapshot attribute . < / p >
* If the < code > AttributeName < / code > field is set to < code > restore < / code > , then this element returns a list
* of IDs of the AWS accounts that are authorized to copy or restore the manual DB snapshot . If a value of
* < code > all < / code > is in the list , then the manual DB snapshot is public and available for any AWS account
* to copy or restore . */
public void setAttributeValues ( java . util . Collection < String > attributeValues ) { } } | if ( attributeValues == null ) { this . attributeValues = null ; return ; } this . attributeValues = new com . amazonaws . internal . SdkInternalList < String > ( attributeValues ) ; |
public class TableFactoryService { /** * Converts the prefix of properties with wildcards ( e . g . , " format . * " ) . */
private static List < String > extractWildcardPrefixes ( List < String > propertyKeys ) { } } | return propertyKeys . stream ( ) . filter ( p -> p . endsWith ( "*" ) ) . map ( s -> s . substring ( 0 , s . length ( ) - 1 ) ) . collect ( Collectors . toList ( ) ) ; |
public class Fat16RootDirectory { /** * Reads a { @ code Fat16RootDirectory } as indicated by the specified
* { @ code Fat16BootSector } .
* @ param bs the boot sector that describes the root directory to read
* @ param readOnly if the directory shold be created read - only
* @ return the directory that was read
* @ throws IOException on read error */
public static Fat16RootDirectory read ( Fat16BootSector bs , boolean readOnly ) throws IOException { } } | final Fat16RootDirectory result = new Fat16RootDirectory ( bs , readOnly ) ; result . read ( ) ; return result ; |
public class StringUtils { /** * Determines whether the String value contains any digits , guarding against null values .
* @ param value the String value being evaluated for digits containment .
* @ return a boolean value indicating whether the String value contains any digits .
* @ see # toCharArray ( String )
* @ see java . lang . Character # isDigit ( char ) */
@ NullSafe public static boolean containsDigits ( String value ) { } } | for ( char chr : toCharArray ( value ) ) { if ( Character . isDigit ( chr ) ) { return true ; } } return false ; |
public class PluginConfig { /** * For internal use only , please do not rely on this method .
* @ return the environment specific configuration key . The original key will be returned if no prefixed configuration is specified for the current env . */
private static String extendedKey ( Properties properties , String key ) { } } | String extendedKey = extendedKey ( key ) ; return properties . containsKey ( extendedKey ) ? extendedKey : key ; |
public class QueueResultsProcessing { /** * Stores the output from a Frontier into the queue , pausing and waiting if the given queue is too large
* @ param resultsMap map of String and String representing the output of a Frontier ' s DFS */
@ Override public void processOutput ( Map < String , String > resultsMap ) { } } | queue . add ( resultsMap ) ; if ( queue . size ( ) > 10000 ) { log . info ( "Queue size " + queue . size ( ) + ", waiting" ) ; try { Thread . sleep ( 500 ) ; } catch ( InterruptedException ex ) { log . info ( "Interrupted " , ex ) ; } } |
public class CPDefinitionOptionValueRelPersistenceImpl { /** * Returns all the cp definition option value rels where key = & # 63 ; .
* @ param key the key
* @ return the matching cp definition option value rels */
@ Override public List < CPDefinitionOptionValueRel > findByKey ( String key ) { } } | return findByKey ( key , QueryUtil . ALL_POS , QueryUtil . ALL_POS , null ) ; |
public class BulkProcessor { /** * Attempts a flush operation , handling failed documents based on configured error listeners .
* @ return A result object detailing the success or failure of the request , including information about any
* failed documents .
* @ throws EsHadoopException in the event that the bulk operation fails or is aborted . */
public BulkResponse tryFlush ( ) { } } | BulkResponse bulkResult = null ; boolean trackingArrayExpanded = false ; String bulkLoggingID = createDebugTxnID ( ) ; try { // double check data - it might be a false flush ( called on clean - up )
if ( data . length ( ) > 0 ) { int totalDocs = data . entries ( ) ; int docsSent = 0 ; int docsSkipped = 0 ; int docsAborted = 0 ; long totalTime = 0L ; boolean retryOperation = false ; int totalAttempts = 0 ; long waitTime = 0L ; List < BulkAttempt > retries = new ArrayList < BulkAttempt > ( ) ; List < BulkResponse . BulkError > abortErrors = new ArrayList < BulkResponse . BulkError > ( ) ; do { // Throw to break out of a possible infinite loop , but only if the limit is a positive number
if ( retryLimit >= 0 && totalAttempts > retryLimit ) { throw new EsHadoopException ( "Executed too many bulk requests without success. Attempted [" + totalAttempts + "] write operations, which exceeds the bulk request retry limit specified" + "by [" + ConfigurationOptions . ES_BATCH_WRITE_RETRY_LIMIT + "], and found data still " + "not accepted. Perhaps there is an error handler that is not terminating? Bailing out..." ) ; } // Log messages , and if wait time is set , perform the thread sleep .
initFlushOperation ( bulkLoggingID , retryOperation , retries . size ( ) , waitTime ) ; // Exec bulk operation to ES , get response .
debugLog ( bulkLoggingID , "Submitting request" ) ; RestClient . BulkActionResponse bar = restClient . bulk ( resource , data ) ; debugLog ( bulkLoggingID , "Response received" ) ; totalAttempts ++ ; totalTime += bar . getTimeSpent ( ) ; // Log retry stats if relevant
if ( retryOperation ) { stats . docsRetried += data . entries ( ) ; stats . bytesRetried += data . length ( ) ; stats . bulkRetries ++ ; stats . bulkRetriesTotalTime += bar . getTimeSpent ( ) ; } executedBulkWrite = true ; // Handle bulk write failures
if ( ! bar . getEntries ( ) . hasNext ( ) ) { // Legacy Case :
// If no items on response , assume all documents made it in .
// Recorded bytes are ack ' d here
stats . bytesAccepted += data . length ( ) ; stats . docsAccepted += data . entries ( ) ; retryOperation = false ; bulkResult = BulkResponse . complete ( bar . getResponseCode ( ) , totalTime , totalDocs , totalDocs , 0 ) ; } else { // Base Case :
// Iterate over the response and the data in the tracking bytes array at the same time , passing
// errors to error handlers for resolution .
// Keep track of which document we are on as well as where we are in the tracking bytes array .
int documentNumber = 0 ; int trackingBytesPosition = 0 ; // Hand off the previous list of retries so that we can track the next set of retries ( if any ) .
List < BulkAttempt > previousRetries = retries ; retries = new ArrayList < BulkAttempt > ( ) ; // If a document is edited and retried then it is added at the end of the buffer . Keep a tail list of these new retry attempts .
List < BulkAttempt > newDocumentRetries = new ArrayList < BulkAttempt > ( ) ; BulkWriteErrorCollector errorCollector = new BulkWriteErrorCollector ( ) ; // Iterate over all entries , and for each error found , attempt to handle the problem .
for ( Iterator < Map > iterator = bar . getEntries ( ) ; iterator . hasNext ( ) ; ) { // The array of maps are ( operation - > document info ) maps
Map map = iterator . next ( ) ; // Get the underlying document information as a map and extract the error information .
Map values = ( Map ) map . values ( ) . iterator ( ) . next ( ) ; Integer docStatus = ( Integer ) values . get ( "status" ) ; EsHadoopException error = errorExtractor . extractError ( values ) ; if ( error == null ) { // Write operation for this entry succeeded
stats . bytesAccepted += data . length ( trackingBytesPosition ) ; stats . docsAccepted += 1 ; docsSent += 1 ; data . remove ( trackingBytesPosition ) ; } else { // Found a failed write
BytesArray document = data . entry ( trackingBytesPosition ) ; // In pre - 2 . x ES versions , the status is not included .
int status = docStatus == null ? - 1 : docStatus ; // Figure out which attempt number sending this document was and which position the doc was in
BulkAttempt previousAttempt ; if ( previousRetries . isEmpty ( ) ) { // No previous retries , create an attempt for the first run
previousAttempt = new BulkAttempt ( 1 , documentNumber ) ; } else { // Grab the previous attempt for the document we ' re processing , and bump the attempt number .
previousAttempt = previousRetries . get ( documentNumber ) ; previousAttempt . attemptNumber ++ ; } // Handle bulk write failures
// Todo : We should really do more with these bulk error pass reasons if the final outcome is an ABORT .
List < String > bulkErrorPassReasons = new ArrayList < String > ( ) ; BulkWriteFailure failure = new BulkWriteFailure ( status , error , document , previousAttempt . attemptNumber , bulkErrorPassReasons ) ; // Label the loop since we ' ll be breaking to / from it within a switch block .
handlerLoop : for ( IBulkWriteErrorHandler errorHandler : documentBulkErrorHandlers ) { HandlerResult result ; try { result = errorHandler . onError ( failure , errorCollector ) ; } catch ( EsHadoopAbortHandlerException ahe ) { // Count this as an abort operation , but capture the error message from the
// exception as the reason . Log any cause since it will be swallowed .
Throwable cause = ahe . getCause ( ) ; if ( cause != null ) { LOG . error ( "Bulk write error handler abort exception caught with underlying cause:" , cause ) ; } result = HandlerResult . ABORT ; error = ahe ; } catch ( Exception e ) { throw new EsHadoopException ( "Encountered exception during error handler." , e ) ; } switch ( result ) { case HANDLED : Assert . isTrue ( errorCollector . getAndClearMessage ( ) == null , "Found pass message with Handled response. Be sure to return the value " + "returned from pass(String) call." ) ; // Check for document retries
if ( errorCollector . receivedRetries ( ) ) { byte [ ] retryDataBuffer = errorCollector . getAndClearRetryValue ( ) ; if ( retryDataBuffer == null || document . bytes ( ) == retryDataBuffer ) { // Retry the same data .
// Continue to track the previous attempts .
retries . add ( previousAttempt ) ; trackingBytesPosition ++ ; } else { // Check document contents to see if it was deserialized and reserialized .
if ( ArrayUtils . sliceEquals ( document . bytes ( ) , document . offset ( ) , document . length ( ) , retryDataBuffer , 0 , retryDataBuffer . length ) ) { // Same document content . Leave the data as is in tracking buffer ,
// and continue tracking previous attempts .
retries . add ( previousAttempt ) ; trackingBytesPosition ++ ; } else { // Document has changed .
// Track new attempts .
BytesRef newEntry = validateEditedEntry ( retryDataBuffer ) ; data . remove ( trackingBytesPosition ) ; data . copyFrom ( newEntry ) ; // Determine if our tracking bytes array is going to expand .
if ( ba . available ( ) < newEntry . length ( ) ) { trackingArrayExpanded = true ; } previousAttempt . attemptNumber = 0 ; newDocumentRetries . add ( previousAttempt ) ; } } } else { // Handled but not retried means we won ' t have sent that document .
data . remove ( trackingBytesPosition ) ; docsSkipped += 1 ; } break handlerLoop ; case PASS : String reason = errorCollector . getAndClearMessage ( ) ; if ( reason != null ) { bulkErrorPassReasons . add ( reason ) ; } continue handlerLoop ; case ABORT : errorCollector . getAndClearMessage ( ) ; // Sanity clearing
data . remove ( trackingBytesPosition ) ; docsAborted += 1 ; abortErrors . add ( new BulkResponse . BulkError ( previousAttempt . originalPosition , document , status , error ) ) ; break handlerLoop ; } } } documentNumber ++ ; } // Place any new documents that have been added at the end of the data buffer at the end of the retry list .
retries . addAll ( newDocumentRetries ) ; if ( ! retries . isEmpty ( ) ) { retryOperation = true ; waitTime = errorCollector . getDelayTimeBetweenRetries ( ) ; } else { retryOperation = false ; if ( docsAborted > 0 ) { bulkResult = BulkResponse . partial ( bar . getResponseCode ( ) , totalTime , totalDocs , docsSent , docsSkipped , docsAborted , abortErrors ) ; } else { bulkResult = BulkResponse . complete ( bar . getResponseCode ( ) , totalTime , totalDocs , docsSent , docsSkipped ) ; } } } } while ( retryOperation ) ; debugLog ( bulkLoggingID , "Completed. [%d] Original Entries. [%d] Attempts. [%d/%d] Docs Sent. [%d/%d] Docs Skipped. [%d/%d] Docs Aborted." , totalDocs , totalAttempts , docsSent , totalDocs , docsSkipped , totalDocs , docsAborted , totalDocs ) ; } else { bulkResult = BulkResponse . complete ( ) ; } } catch ( EsHadoopException ex ) { debugLog ( bulkLoggingID , "Failed. %s" , ex . getMessage ( ) ) ; hadWriteErrors = true ; throw ex ; } // always discard data since there ' s no code path that uses the in flight data
// during retry operations , the tracking bytes array may grow . In that case , do a hard reset .
// TODO : Perhaps open an issue to limit the expansion of a single byte array ( for repeated rewrite - retries )
if ( trackingArrayExpanded ) { ba = new BytesArray ( new byte [ settings . getBatchSizeInBytes ( ) ] , 0 ) ; data = new TrackingBytesArray ( ba ) ; } else { data . reset ( ) ; dataEntries = 0 ; } return bulkResult ; |
public class Include { private void includeFile ( File file ) throws IOException { } } | if ( ! file . exists ( ) ) throw new FileNotFoundException ( file . toString ( ) ) ; if ( file . isDirectory ( ) ) { List list = new List ( List . Unordered ) ; String [ ] ls = file . list ( ) ; for ( int i = 0 ; i < ls . length ; i ++ ) list . add ( ls [ i ] ) ; StringWriter sw = new StringWriter ( ) ; list . write ( sw ) ; reader = new StringReader ( sw . toString ( ) ) ; } else { reader = new BufferedReader ( new FileReader ( file ) ) ; } |
public class BirthDateFromAncestorsEstimator { /** * Apply a standard adjustment from an ancestor ' s marriage date to a
* person ' s birth date .
* @ param date the ancestor ' s marriage date
* @ return the adjusted date */
private LocalDate ancestorAdjustment ( final LocalDate date ) { } } | if ( date == null ) { return null ; } return date . plusYears ( typicals . ageAtMarriage ( ) + typicals . gapBetweenChildren ( ) ) . withMonthOfYear ( 1 ) . withDayOfMonth ( 1 ) ; |
public class H2O { /** * Calculate a unique model id that includes User - Agent info ( if it can be discovered ) .
* For the user agent info to be discovered , this needs to be called from a Jetty thread .
* This lets us distinguish models created from R vs . other front - ends , for example .
* At some future point , it could make sense to include a sessionId here .
* The algorithm is :
* descModel _ [ userAgentPrefixIfKnown _ ] cloudId _ monotonicallyIncreasingInteger
* Right now because of the way the REST API works , a bunch of numbers are created and
* thrown away . So the values are monotonically increasing but not contiguous .
* @ param desc Model description .
* @ return The suffix . */
synchronized public static String calcNextUniqueModelId ( String desc ) { } } | StringBuilder sb = new StringBuilder ( ) ; sb . append ( desc ) . append ( "_model_" ) ; // Append user agent string if we can figure it out .
String source = ServletUtils . getUserAgent ( ) ; if ( source != null ) { StringBuilder ua = new StringBuilder ( ) ; if ( source . contains ( "Safari" ) ) { ua . append ( "safari" ) ; } else if ( source . contains ( "Python" ) ) { ua . append ( "python" ) ; } else { for ( int i = 0 ; i < source . length ( ) ; i ++ ) { char c = source . charAt ( i ) ; if ( c >= 'a' && c <= 'z' ) { ua . append ( c ) ; continue ; } else if ( c >= 'A' && c <= 'Z' ) { ua . append ( c ) ; continue ; } break ; } } if ( ua . toString ( ) . length ( ) > 0 ) { sb . append ( ua . toString ( ) ) . append ( "_" ) ; } } // REST API needs some refactoring to avoid burning lots of extra numbers .
// I actually tried only doing the addAndGet only for POST requests ( and junk UUID otherwise ) ,
// but that didn ' t eliminate the gaps .
long n = nextModelNum . addAndGet ( 1 ) ; sb . append ( Long . toString ( CLUSTER_ID ) ) . append ( "_" ) . append ( Long . toString ( n ) ) ; return sb . toString ( ) ; |
public class JPAApplInfo { /** * Adds all of the Persistence Units defined in the specified
* persistence . xml file to the application , sorting by scope
* and archive name . < p >
* @ param pxml provides access to a persistence . xml file as well
* as the archive name and scope . */
public void addPersistenceUnits ( JPAPXml pxml ) { } } | final boolean isTraceOn = TraceComponent . isAnyTracingEnabled ( ) ; if ( isTraceOn && tc . isDebugEnabled ( ) ) Tr . debug ( tc , "addPersistenceUnits" , pxml ) ; JPAPuScope puScope = pxml . getPuScope ( ) ; String scopeKey = ( puScope == JPAPuScope . EAR_Scope ) ? EAR_SCOPE_MODULE_NAME : pxml . getArchiveName ( ) ; JPAScopeInfo scopeInfo = null ; synchronized ( puScopes ) { scopeInfo = puScopes . get ( scopeKey ) ; if ( scopeInfo == null ) { scopeInfo = new JPAScopeInfo ( scopeKey , puScope ) ; puScopes . put ( scopeKey , scopeInfo ) ; } } // delegate to scopeInfo to read in the persistence - unit data
JPAIntrospection . beginPUScopeVisit ( scopeInfo ) ; try { scopeInfo . processPersistenceUnit ( pxml , this ) ; } finally { JPAIntrospection . endPUScopeVisit ( ) ; } |
public class MeshGenerator { /** * Generates a solid cuboid mesh . This mesh includes the positions , normals , texture coords and tangents . The center is at the middle of the cuboid .
* @ param size The size of the cuboid to generate , on x , y and z
* @ return The vertex data */
public static VertexData generateCuboid ( Vector3f size ) { } } | final VertexData destination = new VertexData ( ) ; final VertexAttribute positionsAttribute = new VertexAttribute ( "positions" , DataType . FLOAT , 3 ) ; destination . addAttribute ( 0 , positionsAttribute ) ; final TFloatList positions = new TFloatArrayList ( ) ; final VertexAttribute normalsAttribute = new VertexAttribute ( "normals" , DataType . FLOAT , 3 ) ; destination . addAttribute ( 1 , normalsAttribute ) ; final TFloatList normals = new TFloatArrayList ( ) ; final TIntList indices = destination . getIndices ( ) ; final VertexAttribute textureCoordsAttribute = new VertexAttribute ( "textureCoords" , DataType . FLOAT , 2 ) ; destination . addAttribute ( 2 , textureCoordsAttribute ) ; final TFloatList texturesCoords = new TFloatArrayList ( ) ; // Generate the mesh
generateCuboid ( positions , normals , texturesCoords , indices , size ) ; // Put the mesh in the vertex data
positionsAttribute . setData ( positions ) ; normalsAttribute . setData ( normals ) ; textureCoordsAttribute . setData ( texturesCoords ) ; return destination ; |
public class ApiOvhOrder { /** * Get prices and contracts information
* REST : GET / order / dedicated / server / { serviceName } / feature / { duration }
* @ param feature [ required ] the feature
* @ param serviceName [ required ] The internal name of your dedicated server
* @ param duration [ required ] Duration */
public OvhOrder dedicated_server_serviceName_feature_duration_GET ( String serviceName , String duration , OvhOrderableSysFeatureEnum feature ) throws IOException { } } | String qPath = "/order/dedicated/server/{serviceName}/feature/{duration}" ; StringBuilder sb = path ( qPath , serviceName , duration ) ; query ( sb , "feature" , feature ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhOrder . class ) ; |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.