signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class ResponseCreationSupport { /** * Create a { @ link URI } from a path . This is similar to calling * ` new URI ( null , null , path , null ) ` with the { @ link URISyntaxException } * converted to a { @ link IllegalArgumentException } . * @ param path the path * @ return the uri * @ throws IllegalArgumentException if the string violates * RFC 2396 */ public static URI uriFromPath ( String path ) throws IllegalArgumentException { } }
try { return new URI ( null , null , path , null ) ; } catch ( URISyntaxException e ) { throw new IllegalArgumentException ( e ) ; }
public class CPRuleAssetCategoryRelServiceUtil { /** * NOTE FOR DEVELOPERS : * Never modify this class directly . Add custom service methods to { @ link com . liferay . commerce . product . service . impl . CPRuleAssetCategoryRelServiceImpl } and rerun ServiceBuilder to regenerate this class . */ public static com . liferay . commerce . product . model . CPRuleAssetCategoryRel addCPRuleAssetCategoryRel ( long cpRuleId , long assetCategoryId , com . liferay . portal . kernel . service . ServiceContext serviceContext ) throws com . liferay . portal . kernel . exception . PortalException { } }
return getService ( ) . addCPRuleAssetCategoryRel ( cpRuleId , assetCategoryId , serviceContext ) ;
public class StocatorPath { /** * Transform scheme : / / hostname / a / b / _ temporary / 0 into scheme : / / hostname / a / b / * We mainly need this for mkdirs operations * @ param path input directory with temporary prefix * @ return modified directory without temporary prefix */ public String getBaseDirectory ( String path ) { } }
if ( path != null ) { int finishIndex = path . indexOf ( HADOOP_TEMPORARY ) ; if ( finishIndex > 0 ) { String newPath = path . substring ( 0 , finishIndex ) ; if ( newPath . endsWith ( "/" ) ) { return newPath . substring ( 0 , newPath . length ( ) - 1 ) ; } return newPath ; } } return path ;
public class WebContainer { /** * Access the temp directory location unique to this running bundle ' s * webcontainer . This will be of the form * " server \ workarea \ & lt ; framework - specific - bundle - path & gt ; \ temp \ " ( with * platform - specific separator characters ) or will be null if the bundle is * not running . * @ return String */ public static String getTempDirectory ( ) { } }
WebContainer instance = ( WebContainer ) self . get ( ) ; if ( instance == null ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "WebContainer not running, returning null temp dir" ) ; } return null ; } String rc = null ; try { File f = instance . context . getBundleContext ( ) . getDataFile ( "temp" ) ; if ( null != f ) { rc = f . getAbsolutePath ( ) + File . separatorChar ; } } catch ( Throwable t ) { FFDCFilter . processException ( t , CLASS_NAME , "getTempDirectory" , self ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEventEnabled ( ) ) { Tr . event ( tc , "Error getting temp dir; " + t ) ; } rc = null ; } return rc ;
public class OWLDisjointDataPropertiesAxiomImpl_CustomFieldSerializer { /** * Deserializes the content of the object from the * { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } . * @ param streamReader the { @ link com . google . gwt . user . client . rpc . SerializationStreamReader } to read the * object ' s content from * @ param instance the object instance to deserialize * @ throws com . google . gwt . user . client . rpc . SerializationException * if the deserialization operation is not * successful */ @ Override public void deserializeInstance ( SerializationStreamReader streamReader , OWLDisjointDataPropertiesAxiomImpl instance ) throws SerializationException { } }
deserialize ( streamReader , instance ) ;
public class Fingerprint { /** * Returns true if any of the builds recorded in this fingerprint * is still retained . * This is used to find out old fingerprint records that can be removed * without losing too much information . */ public synchronized boolean isAlive ( ) { } }
if ( original != null && original . isAlive ( ) ) return true ; for ( Entry < String , RangeSet > e : usages . entrySet ( ) ) { Job j = Jenkins . getInstance ( ) . getItemByFullName ( e . getKey ( ) , Job . class ) ; if ( j == null ) continue ; Run firstBuild = j . getFirstBuild ( ) ; if ( firstBuild == null ) continue ; int oldest = firstBuild . getNumber ( ) ; if ( ! e . getValue ( ) . isSmallerThan ( oldest ) ) return true ; } return false ;
public class lbwlm { /** * Use this API to update lbwlm . */ public static base_response update ( nitro_service client , lbwlm resource ) throws Exception { } }
lbwlm updateresource = new lbwlm ( ) ; updateresource . wlmname = resource . wlmname ; updateresource . katimeout = resource . katimeout ; return updateresource . update_resource ( client ) ;
public class PersonGroupPersonsImpl { /** * Add a representative face to a person for identification . The input face is specified as an image with a targetFace rectangle . * @ param personGroupId Id referencing a particular person group . * @ param personId Id referencing a particular person . * @ param url Publicly reachable URL of an image * @ param addPersonFaceFromUrlOptionalParameter the object representing the optional parameters to be set before calling this API * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PersistedFace object */ public Observable < PersistedFace > addPersonFaceFromUrlAsync ( String personGroupId , UUID personId , String url , AddPersonFaceFromUrlOptionalParameter addPersonFaceFromUrlOptionalParameter ) { } }
return addPersonFaceFromUrlWithServiceResponseAsync ( personGroupId , personId , url , addPersonFaceFromUrlOptionalParameter ) . map ( new Func1 < ServiceResponse < PersistedFace > , PersistedFace > ( ) { @ Override public PersistedFace call ( ServiceResponse < PersistedFace > response ) { return response . body ( ) ; } } ) ;
public class Config { /** * < p > Initializes the configuration . < / p > < p > Looks for a properties ( programName . conf ) in the classpath , the user * home directory , and in the run directory to load < / p > < p > The command line arguments are parsed and added to the * configuration . < / p > * @ param programName the program name * @ param args the command line arguments * @ param parser the to use for parsing the arguments * @ param otherPackages Other packages whose configs we should load * @ return Non config / option parameters from command line */ @ SneakyThrows public static String [ ] initialize ( String programName , String [ ] args , CommandLineParser parser , String ... otherPackages ) { } }
Preloader . preload ( ) ; String rval [ ] ; if ( args != null ) { rval = parser . parse ( args ) ; } else { rval = new String [ 0 ] ; } // Check if we should only explain the config if ( NamedOption . CONFIG_EXPLAIN . < Boolean > getValue ( ) ) { getInstance ( ) . setterFunction = ConfigExplainSettingFunction . INSTANCE ; } else { getInstance ( ) . setterFunction = ConfigSettingFunction . INSTANCE ; } if ( otherPackages != null ) { for ( String otherPackage : otherPackages ) { loadPackageConfig ( otherPackage ) ; } } // Auto - discover the package of the calling class . String className = getCallingClass ( ) ; if ( className != null ) { loadDefaultConf ( className ) ; } // Look for application specific properties Stream . of ( new ClasspathResource ( programName . replace ( "." , "/" ) + ".conf" , defaultClassLoader ) , Resources . fromFile ( new File ( SystemInfo . USER_HOME , programName + ".conf" ) ) , localConfigDirectory . getChild ( programName + ".conf" ) , Resources . fromFile ( new File ( programName + ".conf" ) ) ) . filter ( Resource :: exists ) . forEach ( resource -> { log . finest ( "Loading {0}.conf from :" , programName ) ; loadConfig ( resource ) ; } ) ; // Store the command line arguments as a config settings . if ( args != null ) { parser . getSetEntries ( ) . forEach ( entry -> { ConfigSettingFunction . INSTANCE . setProperty ( entry . getKey ( ) , entry . getValue ( ) , "CommandLine" ) ; } ) ; } if ( parser . isSet ( NamedOption . CONFIG ) ) { loadConfig ( NamedOption . CONFIG . getValue ( ) ) ; } setAllCommandLine ( parser ) ; // If config - explain was set then output the config recording and then quit if ( parser . isSet ( NamedOption . CONFIG_EXPLAIN ) ) { ConfigExplainSettingFunction settings = ( ConfigExplainSettingFunction ) getInstance ( ) . setterFunction ; for ( String key : new TreeSet < > ( settings . properties . keySet ( ) ) ) { System . err . println ( key ) ; int max = settings . properties . get ( key ) . size ( ) ; int i = 1 ; for ( String prop : settings . properties . get ( key ) ) { System . err . println ( "\t" + ( i == max ? "*" : "" ) + prop . replaceAll ( "\r?\n" , " " ) ) ; i ++ ; } System . err . println ( "--------------------------------------------------" ) ; } System . exit ( 0 ) ; } return rval ;
import java . util . * ; public class Main { /** * This function computes the number of times a specific character occurs in a string that is repeated . * Examples : * characterFrequencyInRepeatedString ( ' abcac ' , ' a ' ) - > 4 * characterFrequencyInRepeatedString ( ' abca ' , ' c ' ) - > 2 * characterFrequencyInRepeatedString ( ' aba ' , ' a ' ) - > 7 * Args : * input _ string : The string in which the character ' s occurrences are counted . * character : The specific character whose count is being calculated . * Returns : * Frequency of the character in the repeated string . */ public static int characterFrequencyInRepeatedString ( String inputString , char character ) { } public static void main ( String args [ ] ) { System . out . println ( characterFrequencyInRepeatedString ( "abcac" , 'a' ) ) ; System . out . println ( characterFrequencyInRepeatedString ( "abca" , 'c' ) ) ; System . out . println ( characterFrequencyInRepeatedString ( "aba" , 'a' ) ) ; } }
int frequency = 0 ; for ( int i = 0 ; i < inputString . length ( ) ; i ++ ) { if ( inputString . charAt ( i ) == character ) { frequency ++ ; } } int repetitions = ( 10 / inputString . length ( ) ) ; frequency *= repetitions ; int remainingChars = ( 10 % inputString . length ( ) ) ; for ( int i = 0 ; i < remainingChars ; i ++ ) { if ( inputString . charAt ( i ) == character ) { frequency ++ ; } } return frequency ;
public class Blobs { /** * probably not insert it into Cassandra either ; ) */ private static ByteBuffer readAll ( File file ) throws IOException { } }
try ( FileInputStream inputStream = new FileInputStream ( file ) ) { FileChannel channel = inputStream . getChannel ( ) ; ByteBuffer buffer = ByteBuffer . allocate ( ( int ) channel . size ( ) ) ; channel . read ( buffer ) ; buffer . flip ( ) ; return buffer ; }
public class GeneratedMessage { /** * Called by subclasses to parse an unknown field . * @ return { @ code true } unless the tag is an end - group tag . */ protected boolean parseUnknownField ( CodedInputStream input , UnknownFieldSet . Builder unknownFields , ExtensionRegistryLite extensionRegistry , int tag ) throws IOException { } }
return unknownFields . mergeFieldFrom ( tag , input ) ;
public class Op { /** * Creates an array with the specified elements and an < i > operation expression < / i > on it . * @ param elements the elements of the array being created * @ return an operator , ready for chaining */ public static < T > Level0ArrayOperator < Character [ ] , Character > onArrayFor ( final Character ... elements ) { } }
return onArrayOf ( Types . CHARACTER , VarArgsUtil . asRequiredObjectArray ( elements ) ) ;
public class CLIQUE { /** * The specified sorted list of dense subspaces is divided into the selected * set I and the pruned set P . For each set the difference from the specified * mean values is computed . * @ param denseSubspaces denseSubspaces the dense subspaces in reverse order * by their coverage * @ param mi the mean of the selected sets I * @ param mp the mean of the pruned sets P * @ return the difference from the specified mean values , the first value is * the difference from the mean of the selected set I , the second * value is the difference from the mean of the pruned set P . */ private double [ ] [ ] computeDiffs ( List < CLIQUESubspace > denseSubspaces , int [ ] mi , int [ ] mp ) { } }
int n = denseSubspaces . size ( ) - 1 ; double [ ] diff_mi = new double [ n + 1 ] , diff_mp = new double [ n + 1 ] ; double resultMI = 0 , resultMP = 0 ; for ( int i = 0 ; i < denseSubspaces . size ( ) ; i ++ ) { double diffMI = Math . abs ( denseSubspaces . get ( i ) . getCoverage ( ) - mi [ i ] ) ; resultMI += log2OrZero ( diffMI ) ; double diffMP = ( i != n ) ? Math . abs ( denseSubspaces . get ( n - i ) . getCoverage ( ) - mp [ n - 1 - i ] ) : 0 ; resultMP += log2OrZero ( diffMP ) ; diff_mi [ i ] = resultMI ; if ( i != n ) { diff_mp [ n - 1 - i ] = resultMP ; } } return new double [ ] [ ] { diff_mi , diff_mp } ;
public class LauncherModel { /** * Launcher */ @ Override public void prepare ( FeatureProvider provider ) { } }
super . prepare ( provider ) ; localizable = provider . getFeature ( Transformable . class ) ; if ( provider instanceof LauncherListener ) { addListener ( ( LauncherListener ) provider ) ; } if ( provider instanceof LaunchableListener ) { addListener ( ( LaunchableListener ) provider ) ; }
public class Mapper { /** * Returns the { @ link TierKeyMap } instance , allowed the key like " a . b [ 0 ] . c . d [ 3 ] " , * and if the key class type is not { @ link String } will happen nothing . * @ return */ public Mapper < K , V > tierKey ( ) { } }
if ( ! ( delegate . get ( ) instanceof TierKeyMap ) ) { delegate = Optional . of ( ( Map < K , V > ) TierKeyMap . from ( delegate . get ( ) ) ) ; } return this ;
public class JspCompilationContext { /** * Get the full value of a URI relative to this compilations context * uses current file as the base . * @ param uri The relative URI * @ return absolute URI */ public String resolveRelativeUri ( String uri ) { } }
// sometimes we get uri ' s massaged from File ( String ) , so check for // a root directory separator char if ( uri . startsWith ( "/" ) || uri . startsWith ( File . separator ) ) { return uri ; } else { return baseURI + uri ; }
public class InstructionView { /** * Initializes all animations needed to show / hide views . */ private void initializeAnimations ( ) { } }
Context context = getContext ( ) ; rerouteSlideDownTop = AnimationUtils . loadAnimation ( context , R . anim . slide_down_top ) ; rerouteSlideUpTop = AnimationUtils . loadAnimation ( context , R . anim . slide_up_top ) ;
public class AmazonAutoScalingClient { /** * Sets the size of the specified Auto Scaling group . * For more information about desired capacity , see < a * href = " https : / / docs . aws . amazon . com / autoscaling / ec2 / userguide / what - is - amazon - ec2 - auto - scaling . html " > What Is Amazon * EC2 Auto Scaling ? < / a > in the < i > Amazon EC2 Auto Scaling User Guide < / i > . * @ param setDesiredCapacityRequest * @ return Result of the SetDesiredCapacity operation returned by the service . * @ throws ScalingActivityInProgressException * The operation can ' t be performed because there are scaling activities in progress . * @ throws ResourceContentionException * You already have a pending update to an Amazon EC2 Auto Scaling resource ( for example , an Auto Scaling * group , instance , or load balancer ) . * @ sample AmazonAutoScaling . SetDesiredCapacity * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / autoscaling - 2011-01-01 / SetDesiredCapacity " target = " _ top " > AWS * API Documentation < / a > */ @ Override public SetDesiredCapacityResult setDesiredCapacity ( SetDesiredCapacityRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeSetDesiredCapacity ( request ) ;
public class RoutingTrie { /** * Returns a { @ link Node } which is mapped to the given { @ code path } . * If { @ code exact } is { @ code true } , internally - added node may be returned . */ @ Nullable @ VisibleForTesting Node < V > findNode ( String path , boolean exact ) { } }
requireNonNull ( path , "path" ) ; return findNode ( root , path , 0 , exact ) ;
public class FundamentalToProjective { /** * Given three fundamental matrices describing the relationship between three views , compute a consistent * set of projective camera matrices . Consistent means that the camera matrices will give back the same * fundamental matrices . Non - linear refinement might be needed to get an optimal solution . * The first camera matrix , without loss of generality , P1 = [ I | 0 ] . * NOTE : HZ does not recommend using this function because the optimal solution is not geometric and prone * to errors . Instead it recommends use of trifocal tensor to find a set of camera matrices . When testing * the function it was noted that the geometry had to be carefully selected to ensure stability . Hinting that * it might not be the most practical algorithm in real data . * < ol > * < li > Page 301 in Y . Ma , S . Soatto , J . Kosecka , and S . S . Sastry , " An Invitation to 3 - D Vision " * Springer - Verlad , 2004 < / li > * < li > Page 386 and 255 in R . Hartley , and A . Zisserman , " Multiple View Geometry in Computer Vision " , 2nd Ed , Cambridge 2003 < / li > * < / ol > * @ param F21 ( Input ) Fundamental matrix between view 1 and 2 * @ param F31 ( Input ) Fundamental matrix between view 1 and 3 * @ param F32 ( Input ) Fundamental matrix between view 2 and 3 * @ param P2 ( Output ) Camera matrix for view 2 * @ param P3 ( Output ) Camera matrix for view 3 */ public boolean threeView ( DMatrixRMaj F21 , DMatrixRMaj F31 , DMatrixRMaj F32 , DMatrixRMaj P2 , DMatrixRMaj P3 ) { } }
// left epipoles . renamed to make code easier to read Point3D_F64 e21 = e1 ; Point3D_F64 e31 = e2 ; alg . process ( F21 , null , e21 ) ; twoView ( F21 , e21 , zero , 1 , P2 ) ; alg . process ( F31 , null , e31 ) ; // find transform which will make P2 = [ I | 0] if ( ! p2i . process ( P2 ) ) return false ; // Perform non - linear optimization to find the parameters for u and minimize the difference residual . setF31 ( F31 , e31 ) ; residual . setF32 ( F32 ) ; residual . setH ( p2i . getPseudoInvP ( ) , p2i . getU ( ) ) ; // optimizer . setVerbose ( System . out , 0 ) ; optimizer . setFunction ( residual , jacobian ) ; optimizer . initialize ( initialV , convergence . ftol , convergence . gtol ) ; for ( int i = 0 ; i < convergence . maxIterations ; i ++ ) { if ( optimizer . iterate ( ) ) break ; } P3 . set ( residual . computeP3 ( optimizer . getParameters ( ) ) ) ; // / / used to sanity check the found solution // double err [ ] = new double [ 9 ] ; // residual . process ( optimizer . getParameters ( ) , err ) ; // residual . F32 . print ( ) ; // residual . F32 _ est . print ( ) ; return true ;
public class StartDeviceSyncRequest { /** * Request structure to start the device sync . Required . * @ param features * Request structure to start the device sync . Required . * @ return Returns a reference to this object so that method calls can be chained together . * @ see Feature */ public StartDeviceSyncRequest withFeatures ( Feature ... features ) { } }
java . util . ArrayList < String > featuresCopy = new java . util . ArrayList < String > ( features . length ) ; for ( Feature value : features ) { featuresCopy . add ( value . toString ( ) ) ; } if ( getFeatures ( ) == null ) { setFeatures ( featuresCopy ) ; } else { getFeatures ( ) . addAll ( featuresCopy ) ; } return this ;
public class XMLOutputter { /** * Resets this < code > XMLOutputter < / code > and configures it for the * specified output stream and encoding . This resets the state to * { @ link # BEFORE _ XML _ DECLARATION } and clears the stack of open elements . * @ param out the output stream to write to , not < code > null < / code > . * @ param encoding the encoding , not < code > null < / code > . * @ throws IllegalArgumentException if < code > out = = null | | encoding = = null < / code > . * @ throws UnsupportedEncodingException if the specified encoding is not supported . */ public final void reset ( Writer out , String encoding ) throws IllegalArgumentException , UnsupportedEncodingException { } }
// Check arguments if ( encoding == null ) { throw new IllegalArgumentException ( "encoding == null" ) ; } reset ( out ) ; // Store the fields _encoder = XMLEncoder . getEncoder ( encoding ) ; // State has changed , check checkInvariants ( ) ;
public class CacheImpl { /** * When we remove any object , we must found again nextTimeSomeExpired . * @ param key - key to remove * @ return - object , if any */ public Object remove ( Object key ) { } }
synchronized ( objects ) { Object remove = objects . remove ( key ) ; objectTimeStamps . remove ( key ) ; if ( remove != null ) { findNextExpireTime ( ) ; } return remove ; }
public class Entropy { /** * < p > Calculates and returns the Shannon entropy of the given byte array . < / p > * < p > The returned value is a value between < code > 0 < / code > and < code > 1 < / code > where * a value of < code > 0 < / code > signifies no entropy and a value of < code > 1 < / code > maximum entropy . < / p > * < p > This method may be used to assess the entropy of randomly generated key material but is in no way * by itself a reliable indicator of true randomness . < / p > * < p > It indicates distribution entropy as opposed to sequence entropy . < / p > * @ param bytes * @ return */ public static final double shannon ( byte [ ] bytes ) { } }
int n = bytes . length ; long [ ] values = new long [ 256 ] ; for ( int i = 0 ; i < n ; i ++ ) { values [ bytes [ i ] - Byte . MIN_VALUE ] ++ ; } double entropy = 0 ; double p ; double log256 = Math . log ( 256 ) ; for ( long count : values ) { if ( count != 0 ) { p = ( double ) count / n ; entropy -= p * ( Math . log ( p ) / log256 ) ; } } return entropy ;
public class HttpMessage { /** * Returns a map of data suitable for including in an { @ link Event } * @ since TODO add version */ @ Override public Map < String , String > toEventData ( ) { } }
Map < String , String > map = new HashMap < String , String > ( ) ; map . put ( EVENT_DATA_URI , getRequestHeader ( ) . getURI ( ) . toString ( ) ) ; map . put ( EVENT_DATA_REQUEST_HEADER , getRequestHeader ( ) . toString ( ) ) ; map . put ( EVENT_DATA_REQUEST_BODY , getRequestBody ( ) . toString ( ) ) ; if ( ! getResponseHeader ( ) . isEmpty ( ) ) { map . put ( EVENT_DATA_RESPONSE_HEADER , getResponseHeader ( ) . toString ( ) ) ; map . put ( EVENT_DATA_RESPONSE_BODY , getResponseBody ( ) . toString ( ) ) ; } return map ;
public class HistoryFilterPlusDialog { /** * This method initializes btnReset * @ return javax . swing . JButton */ private JButton getBtnReset ( ) { } }
if ( btnReset == null ) { btnReset = new JButton ( ) ; btnReset . setText ( Constant . messages . getString ( "history.filter.button.clear" ) ) ; btnReset . addActionListener ( new java . awt . event . ActionListener ( ) { @ Override public void actionPerformed ( java . awt . event . ActionEvent e ) { exitResult = JOptionPane . NO_OPTION ; // Unset everything methodList . setSelectedIndices ( new int [ 0 ] ) ; codeList . setSelectedIndices ( new int [ 0 ] ) ; tagList . setSelectedIndices ( new int [ 0 ] ) ; riskList . setSelectedIndices ( new int [ 0 ] ) ; confidenceList . setSelectedIndices ( new int [ 0 ] ) ; notesComboBox . setSelectedItem ( HistoryFilter . NOTES_IGNORE ) ; regexInc . setText ( "" ) ; regexExc . setText ( "" ) ; filter . reset ( ) ; } } ) ; } return btnReset ;
public class ELParser { /** * This is an element that can start a value */ final public Expression ValuePrefix ( ) throws ParseException { } }
Expression ret ; switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case INTEGER_LITERAL : case FLOATING_POINT_LITERAL : case STRING_LITERAL : case TRUE : case FALSE : case NULL : ret = Literal ( ) ; break ; case LPAREN : jj_consume_token ( LPAREN ) ; ret = Expression ( ) ; jj_consume_token ( RPAREN ) ; break ; default : jj_la1 [ 27 ] = jj_gen ; if ( jj_2_1 ( 2147483647 ) ) { ret = FunctionInvocation ( ) ; } else { switch ( ( jj_ntk == - 1 ) ? jj_ntk ( ) : jj_ntk ) { case IDENTIFIER : ret = NamedValue ( ) ; break ; default : jj_la1 [ 28 ] = jj_gen ; jj_consume_token ( - 1 ) ; throw new ParseException ( ) ; } } } { if ( true ) return ret ; } throw new Error ( "Missing return statement in function" ) ;
public class DefaultSystemInfoProvider { /** * This method will read and parse the ' / proc / cpuinfo ' into a collection of properties . * @ param target * @ return * @ throws IOException * @ throws InterruptedException * @ throws UnsupportedOperationException */ protected String getCpuInfo ( String target ) throws IOException , InterruptedException , UnsupportedOperationException { } }
// if the CPU data has not been previously acquired , then acquire it now if ( cpuInfo == null ) { cpuInfo = new HashMap < > ( ) ; try ( BufferedReader br = new BufferedReader ( new FileReader ( "/proc/cpuinfo" ) ) ) { for ( String line ; ( line = br . readLine ( ) ) != null ; ) { String parts [ ] = line . split ( ":" , 2 ) ; if ( parts . length >= 2 && ! parts [ 0 ] . trim ( ) . isEmpty ( ) && ! parts [ 1 ] . trim ( ) . isEmpty ( ) ) { String cpuKey = parts [ 0 ] . trim ( ) ; cpuInfo . put ( cpuKey , parts [ 1 ] . trim ( ) ) ; } } } } if ( cpuInfo . containsKey ( target ) ) { return cpuInfo . get ( target ) ; } throw new UnsupportedOperationException ( "Invalid target: " + target ) ;
public class ImageGenerator { /** * Apply the specified TypeFace to the date font * OPTIONAL * @ param fontName Name of the date font to be generated */ public void setDateTypeFace ( String fontName ) { } }
mDateTypeFace = Typeface . createFromAsset ( mContext . getAssets ( ) , "fonts/" + fontName ) ; mDateTypeFaceSet = true ;
public class TrackerLocator { /** * 获取Tracker服务器地址 * @ return */ public InetSocketAddress getTrackerAddress ( ) { } }
TrackerAddressHolder holder ; // 遍历连接地址 , 抓取当前有效的地址 for ( int i = 0 ; i < trackerAddressCircular . size ( ) ; i ++ ) { holder = trackerAddressCircular . next ( ) ; if ( holder . canTryToConnect ( retryAfterSecend ) ) { return holder . getAddress ( ) ; } } throw new FdfsUnavailableException ( "找不到可用的tracker " + getTrackerAddressConfigString ( ) ) ;
public class BaseDataAdaptiveTableLayoutAdapter { /** * Switch 2 rows with data * @ param rowIndex row from * @ param rowToIndex row to */ void switchTwoRows ( int rowIndex , int rowToIndex ) { } }
for ( int i = 0 ; i < getItems ( ) . length ; i ++ ) { Object cellData = getItems ( ) [ rowToIndex ] [ i ] ; getItems ( ) [ rowToIndex ] [ i ] = getItems ( ) [ rowIndex ] [ i ] ; getItems ( ) [ rowIndex ] [ i ] = cellData ; }
public class LayerMap { /** * Remove a combination . * @ param it Plot item * @ param task Visualization task * @ return Previous value */ public Pair < Element , Visualization > remove ( PlotItem it , VisualizationTask task ) { } }
return map . remove ( key ( it , task ) ) ;
public class AddOnManager { /** * Returns the registered add - on identified by the identifier . * @ param pAddOnId The add - on ' s identifier . * @ return The registered add - on identified by the identifier . */ public AddOn getAddOn ( String pAddOnId ) { } }
if ( mRegisteredAddOns . containsKey ( pAddOnId ) ) { return mRegisteredAddOns . get ( pAddOnId ) ; } else { LOGGER . warn ( "Add-on " + pAddOnId + " is not loaded." ) ; return null ; }
public class JKObjectUtil { public static Object callStaticMethod ( Class clas , String methodName , Object ... params ) { } }
try { Class [ ] paramsTypes = new Class [ params . length ] ; for ( int i = 0 ; i < params . length ; i ++ ) { paramsTypes [ i ] = Object . class ; } Method method = clas . getMethod ( methodName , paramsTypes ) ; Object o = method . invoke ( null , params ) ; return o ; } catch ( Exception e ) { throw new JKException ( e ) ; }
public class Traverson { /** * Start traversal at the application / hal + json resource idenfied by { @ code uri } . * @ param uri the { @ code URI } of the initial HAL resource . * @ return Traverson initialized with the { @ link HalRepresentation } identified by { @ code uri } . */ public Traverson startWith ( final String uri ) { } }
startWith = hrefToUrl ( uri ) ; contextUrl = startWith ; lastResult = null ; return this ;
public class DescribeMyUserProfileRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( DescribeMyUserProfileRequest describeMyUserProfileRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( describeMyUserProfileRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class SesClient { /** * Verify the domain * When this API is called successfully , it will return a token which is used to verify domain . You should put the * token into DSN setting as a TXT record , then SES can finish verifying automatically . * @ param request The request object which includes the domain to verify * @ see com . baidubce . services . ses . model . VerifyDomainRequest */ public VerifyDomainResponse verifyDomain ( VerifyDomainRequest request ) { } }
checkNotNull ( request , "object request should not be null." ) ; assertStringNotNullOrEmpty ( request . getDomainName ( ) , "object domainName should not be null or empty" ) ; InternalRequest internalRequest = this . createRequest ( "verifiedDomain" , request , HttpMethodName . PUT , request . getDomainName ( ) ) ; return this . invokeHttpClient ( internalRequest , VerifyDomainResponse . class ) ;
public class RecordSetsInner { /** * Lists all record sets in a Private DNS zone . * @ param resourceGroupName The name of the resource group . * @ param privateZoneName The name of the Private DNS zone ( without a terminating dot ) . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; RecordSetInner & gt ; object */ public Observable < Page < RecordSetInner > > listAsync ( final String resourceGroupName , final String privateZoneName ) { } }
return listWithServiceResponseAsync ( resourceGroupName , privateZoneName ) . map ( new Func1 < ServiceResponse < Page < RecordSetInner > > , Page < RecordSetInner > > ( ) { @ Override public Page < RecordSetInner > call ( ServiceResponse < Page < RecordSetInner > > response ) { return response . body ( ) ; } } ) ;
public class JSLocalConsumerPoint { /** * Unlock all messages which have been locked to this LCP but not consumed * @ throws SIResourceException Thrown if there is a problem in the msgStore */ @ Override public void unlockAll ( ) throws SISessionUnavailableException , SIResourceException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "unlockAll" , this ) ; synchronized ( _asynchConsumerBusyLock ) { this . lock ( ) ; try { // Only valid if the consumer session is still open checkNotClosed ( ) ; try { // Unlock the messages ( take this lock to ensure we don ' t have a problem // getting it if we need it to re - deliver the unlocked messages to ourselves ) _allLockedMessages . unlockAll ( ) ; } catch ( SIMPMessageNotLockedException e ) { // No FFDC code needed // This exception has occurred beause someone has deleted the // message ( s ) . Ignore this exception as it is unlocked anyway } } finally { this . unlock ( ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "unlockAll" ) ;
public class MStress_Client { /** * Parses the plan file that contains parameters for the benchmark . */ private static int parsePlanFile ( ) { } }
int ret = - 1 ; try { FileInputStream fis = new FileInputStream ( planfilePath_ ) ; DataInputStream dis = new DataInputStream ( fis ) ; BufferedReader br = new BufferedReader ( new InputStreamReader ( dis ) ) ; if ( prefix_ . isEmpty ( ) ) { prefix_ = "PATH_PREFIX_" ; } String line ; while ( ( line = br . readLine ( ) ) != null ) { if ( line . length ( ) == 0 || line . startsWith ( "#" ) ) { continue ; } if ( line . startsWith ( "type=" ) ) { type_ = line . substring ( 5 ) ; continue ; } if ( line . startsWith ( "levels=" ) ) { levels_ = Integer . parseInt ( line . substring ( 7 ) ) ; continue ; } if ( line . startsWith ( "inodes=" ) ) { inodesPerLevel_ = Integer . parseInt ( line . substring ( 7 ) ) ; continue ; } if ( line . startsWith ( "nstat=" ) ) { pathsToStat_ = Integer . parseInt ( line . substring ( 6 ) ) ; continue ; } } dis . close ( ) ; if ( levels_ > 0 && ! type_ . isEmpty ( ) && inodesPerLevel_ > 0 && pathsToStat_ > 0 ) { ret = 0 ; } } catch ( Exception e ) { System . out . println ( "Error: " + e . getMessage ( ) ) ; } return ret ;
public class CassandraEmbeddedConfigParameters { /** * Default values */ static TypedMap mergeWithDefaultParameters ( TypedMap parameters ) { } }
TypedMap defaultParams = new TypedMap ( ) ; defaultParams . put ( LISTEN_ADDRESS , DEFAULT_CASSANDRA_EMBEDDED_LISTEN_ADDRESS ) ; defaultParams . put ( RPC_ADDRESS , DEFAULT_CASSANDRA_EMBEDDED_RPC_ADDRESS ) ; defaultParams . put ( BROADCAST_ADDRESS , DEFAULT_CASSANDRA_EMBEDDED_BROADCAST_ADDRESS ) ; defaultParams . put ( BROADCAST_RPC_ADDRESS , DEFAULT_CASSANDRA_EMBEDDED_BROADCAST_RPC_ADDRESS ) ; defaultParams . put ( USE_UNSAFE_CASSANDRA_DAEMON , false ) ; defaultParams . put ( CLEAN_CASSANDRA_DATA_FILES , true ) ; defaultParams . put ( CLEAN_CASSANDRA_CONFIG_FILE , true ) ; defaultParams . put ( DATA_FILE_FOLDER , DEFAULT_ACHILLES_TEST_DATA_FOLDER ) ; defaultParams . put ( COMMIT_LOG_FOLDER , DEFAULT_ACHILLES_TEST_COMMIT_LOG_FOLDER ) ; defaultParams . put ( SAVED_CACHES_FOLDER , DEFAULT_ACHILLES_TEST_SAVED_CACHES_FOLDER ) ; defaultParams . put ( HINTS_FOLDER , DEFAULT_ACHILLES_TEST_HINTS_FOLDER ) ; defaultParams . put ( CDC_RAW_FOLDER , DEFAULT_ACHILLES_TEST_CDC_RAW_FOLDER ) ; defaultParams . put ( LOGBACK_FILE , DEFAULT_CASSANDRA_EMBEDDED_LOGBACK_FILE ) ; defaultParams . put ( CLUSTER_NAME , DEFAULT_CASSANDRA_EMBEDDED_CLUSTER_NAME ) ; defaultParams . put ( DEFAULT_KEYSPACE_NAME , DEFAULT_CASSANDRA_EMBEDDED_KEYSPACE_NAME ) ; defaultParams . put ( KEYSPACE_DURABLE_WRITE , DEFAULT_CASSANDRA_EMBEDDED_KEYSPACE_DURABLE_WRITE ) ; defaultParams . put ( COMPRESSION_TYPE , ProtocolOptions . Compression . NONE ) ; defaultParams . put ( LOAD_BALANCING_POLICY , Policies . defaultLoadBalancingPolicy ( ) ) ; defaultParams . put ( RETRY_POLICY , Policies . defaultRetryPolicy ( ) ) ; defaultParams . put ( RECONNECTION_POLICY , Policies . defaultReconnectionPolicy ( ) ) ; defaultParams . putAll ( parameters ) ; return defaultParams ;
public class Types { /** * / * Should check for strict java here and limit to isJavaAssignable ( ) */ static boolean isSignatureAssignable ( Class [ ] from , Class [ ] to , int round ) { } }
if ( round != JAVA_VARARGS_ASSIGNABLE && from . length != to . length ) return false ; switch ( round ) { case JAVA_BASE_ASSIGNABLE : for ( int i = 0 ; i < from . length ; i ++ ) if ( ! isJavaBaseAssignable ( to [ i ] , from [ i ] ) ) return false ; return true ; case JAVA_BOX_TYPES_ASSIGABLE : for ( int i = 0 ; i < from . length ; i ++ ) if ( ! isJavaBoxTypesAssignable ( to [ i ] , from [ i ] ) ) return false ; return true ; case JAVA_VARARGS_ASSIGNABLE : return isSignatureVarargsAssignable ( from , to ) ; case BSH_ASSIGNABLE : for ( int i = 0 ; i < from . length ; i ++ ) if ( ! isBshAssignable ( to [ i ] , from [ i ] ) ) return false ; return true ; default : throw new InterpreterError ( "bad case" ) ; }
public class BasePath { /** * Checks to see if this path is a prefix of ( or equals ) another path . * @ param path the path to check against * @ return true if current path is a prefix of the other path . */ boolean isPrefixOf ( BasePath < B > path ) { } }
ImmutableList < String > prefixSegments = getSegments ( ) ; ImmutableList < String > childSegments = path . getSegments ( ) ; if ( prefixSegments . size ( ) > path . getSegments ( ) . size ( ) ) { return false ; } for ( int i = 0 ; i < prefixSegments . size ( ) ; i ++ ) { if ( ! prefixSegments . get ( i ) . equals ( childSegments . get ( i ) ) ) { return false ; } } return true ;
public class Tomcat5ServerXML { /** * Sets the port and keystore information on the SSL connector if it already * exists ; creates a new SSL connector , otherwise . Also sets the * redirectPort on the non - SSL connector to match . * @ throws InstallationFailedException */ public void setSSLPort ( ) throws InstallationFailedException { } }
Element httpsConnector = ( Element ) getDocument ( ) . selectSingleNode ( HTTPS_CONNECTOR_XPATH ) ; if ( options . getBooleanValue ( InstallOptions . SSL_AVAILABLE , true ) ) { if ( httpsConnector == null ) { Element service = ( Element ) getDocument ( ) . selectSingleNode ( "/Server/Service[@name='Catalina']" ) ; httpsConnector = service . addElement ( "Connector" ) ; httpsConnector . addAttribute ( "maxThreads" , "150" ) ; httpsConnector . addAttribute ( "minSpareThreads" , "25" ) ; httpsConnector . addAttribute ( "maxSpareThreads" , "75" ) ; httpsConnector . addAttribute ( "disableUploadTimeout" , "true" ) ; httpsConnector . addAttribute ( "acceptCount" , "100" ) ; httpsConnector . addAttribute ( "debug" , "0" ) ; httpsConnector . addAttribute ( "scheme" , "https" ) ; httpsConnector . addAttribute ( "secure" , "true" ) ; httpsConnector . addAttribute ( "clientAuth" , "false" ) ; httpsConnector . addAttribute ( "sslProtocol" , "TLS" ) ; } httpsConnector . addAttribute ( "port" , options . getValue ( InstallOptions . TOMCAT_SSL_PORT ) ) ; httpsConnector . addAttribute ( "enableLookups" , "true" ) ; // supports client dns / fqdn in xacml authz policies String keystore = options . getValue ( InstallOptions . KEYSTORE_FILE ) ; if ( keystore . equals ( InstallOptions . INCLUDED ) ) { keystore = KEYSTORE_LOCATION ; } addAttribute ( httpsConnector , "keystoreFile" , keystore , InstallOptions . DEFAULT ) ; addAttribute ( httpsConnector , "keystorePass" , options . getValue ( InstallOptions . KEYSTORE_PASSWORD ) , KEYSTORE_PASSWORD_DEFAULT ) ; addAttribute ( httpsConnector , "keystoreType" , options . getValue ( InstallOptions . KEYSTORE_TYPE ) , KEYSTORE_TYPE_DEFAULT ) ; // The redirectPort for the non - SSL connector should match the port on // the SSL connector , per : // http : / / tomcat . apache . org / tomcat - 5.0 - doc / ssl - howto . html Element httpConnector = ( Element ) getDocument ( ) . selectSingleNode ( HTTP_CONNECTOR_XPATH ) ; if ( httpConnector != null ) { httpConnector . addAttribute ( "redirectPort" , options . getValue ( InstallOptions . TOMCAT_SSL_PORT ) ) ; } else { throw new InstallationFailedException ( "Unable to set server.xml SSL Port. XPath for Connector element failed." ) ; } } else if ( httpsConnector != null ) { httpsConnector . getParent ( ) . remove ( httpsConnector ) ; }
public class CreateCacheClusterRequest { /** * A list of the Availability Zones in which cache nodes are created . The order of the zones in the list is not * important . * This option is only supported on Memcached . * < note > * If you are creating your cluster in an Amazon VPC ( recommended ) you can only locate nodes in Availability Zones * that are associated with the subnets in the selected subnet group . * The number of Availability Zones listed must equal the value of < code > NumCacheNodes < / code > . * < / note > * If you want all the nodes in the same Availability Zone , use < code > PreferredAvailabilityZone < / code > instead , or * repeat the Availability Zone multiple times in the list . * Default : System chosen Availability Zones . * @ param preferredAvailabilityZones * A list of the Availability Zones in which cache nodes are created . The order of the zones in the list is * not important . < / p > * This option is only supported on Memcached . * < note > * If you are creating your cluster in an Amazon VPC ( recommended ) you can only locate nodes in Availability * Zones that are associated with the subnets in the selected subnet group . * The number of Availability Zones listed must equal the value of < code > NumCacheNodes < / code > . * < / note > * If you want all the nodes in the same Availability Zone , use < code > PreferredAvailabilityZone < / code > * instead , or repeat the Availability Zone multiple times in the list . * Default : System chosen Availability Zones . */ public void setPreferredAvailabilityZones ( java . util . Collection < String > preferredAvailabilityZones ) { } }
if ( preferredAvailabilityZones == null ) { this . preferredAvailabilityZones = null ; return ; } this . preferredAvailabilityZones = new com . amazonaws . internal . SdkInternalList < String > ( preferredAvailabilityZones ) ;
public class TypeLexer { /** * $ ANTLR start " BEGINTYPEPARAM " */ public final void mBEGINTYPEPARAM ( ) throws RecognitionException { } }
try { int _type = BEGINTYPEPARAM ; int _channel = DEFAULT_TOKEN_CHANNEL ; // org / javaruntype / type / parser / Type . g : 32:16 : ( ' < ' ) // org / javaruntype / type / parser / Type . g : 32:18 : ' < ' { match ( '<' ) ; } state . type = _type ; state . channel = _channel ; } finally { }
public class BackupRole { /** * Requests a restore from the primary . */ private void requestRestore ( MemberId primary ) { } }
context . protocol ( ) . restore ( primary , RestoreRequest . request ( context . descriptor ( ) , context . currentTerm ( ) ) ) . whenCompleteAsync ( ( response , error ) -> { if ( error == null && response . status ( ) == PrimaryBackupResponse . Status . OK ) { context . resetIndex ( response . index ( ) , response . timestamp ( ) ) ; Buffer buffer = HeapBuffer . wrap ( response . data ( ) ) ; int sessions = buffer . readInt ( ) ; for ( int i = 0 ; i < sessions ; i ++ ) { context . getOrCreateSession ( buffer . readLong ( ) , MemberId . from ( buffer . readString ( ) ) ) ; } context . service ( ) . restore ( new DefaultBackupInput ( buffer , context . service ( ) . serializer ( ) ) ) ; operations . clear ( ) ; } } , context . threadContext ( ) ) ;
public class LocalizationDefinitionImpl { public long getAlterationTime ( ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "getAlterationTime" , this . _alterationTime ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Long l = new Long ( _alterationTime ) ; SibTr . exit ( tc , "getAlterationTime" , l . toString ( ) ) ; } return _alterationTime ;
public class CmsXmlContentEditor { /** * Generates the JavaScript initialization calls for the used widgets in the editor form . < p > * @ return the JavaScript initialization calls for the used widgets * @ throws JspException if including the error page fails */ public String getXmlEditorInitCalls ( ) throws JspException { } }
StringBuffer result = new StringBuffer ( 512 ) ; try { // iterate over unique widgets from collector Iterator < I_CmsWidget > i = getWidgetCollector ( ) . getUniqueWidgets ( ) . iterator ( ) ; while ( i . hasNext ( ) ) { I_CmsWidget widget = i . next ( ) ; result . append ( widget . getDialogInitCall ( getCms ( ) , this ) ) ; } } catch ( Exception e ) { showErrorPage ( e ) ; } return result . toString ( ) ;
public class VueGWTTools { /** * Proxy a field on an instance . Use the same mechanism as Vue . js proxy method . For example : * this . _ data . myField can be proxied as this . myTargetField . Getting / Setting this . myTargetField * will be the same as getting / setting this . _ data . myField . * @ param target Object we proxy on * @ param sourceKey The key in target from which we want to proxy a property * @ param sourceProperty The source property we want to proxy * @ param targetProperty The name of the target property on target */ public static void proxyField ( Object target , String sourceKey , String sourceProperty , String targetProperty ) { } }
PROXY_SHARED_DEFINITION . set ( "set" , new Function ( "this[\"" + sourceKey + "\"][\"" + sourceProperty + "\"] = arguments[0];" ) ) ; PROXY_SHARED_DEFINITION . set ( "get" , new Function ( "return this[\"" + sourceKey + "\"][\"" + sourceProperty + "\"];" ) ) ; JsObject . defineProperty ( target , targetProperty , PROXY_SHARED_DEFINITION ) ;
public class DiskMarshaller { /** * Marshall the given parameter object . */ public void marshall ( Disk disk , ProtocolMarshaller protocolMarshaller ) { } }
if ( disk == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( disk . getName ( ) , NAME_BINDING ) ; protocolMarshaller . marshall ( disk . getArn ( ) , ARN_BINDING ) ; protocolMarshaller . marshall ( disk . getSupportCode ( ) , SUPPORTCODE_BINDING ) ; protocolMarshaller . marshall ( disk . getCreatedAt ( ) , CREATEDAT_BINDING ) ; protocolMarshaller . marshall ( disk . getLocation ( ) , LOCATION_BINDING ) ; protocolMarshaller . marshall ( disk . getResourceType ( ) , RESOURCETYPE_BINDING ) ; protocolMarshaller . marshall ( disk . getTags ( ) , TAGS_BINDING ) ; protocolMarshaller . marshall ( disk . getSizeInGb ( ) , SIZEINGB_BINDING ) ; protocolMarshaller . marshall ( disk . getIsSystemDisk ( ) , ISSYSTEMDISK_BINDING ) ; protocolMarshaller . marshall ( disk . getIops ( ) , IOPS_BINDING ) ; protocolMarshaller . marshall ( disk . getPath ( ) , PATH_BINDING ) ; protocolMarshaller . marshall ( disk . getState ( ) , STATE_BINDING ) ; protocolMarshaller . marshall ( disk . getAttachedTo ( ) , ATTACHEDTO_BINDING ) ; protocolMarshaller . marshall ( disk . getIsAttached ( ) , ISATTACHED_BINDING ) ; protocolMarshaller . marshall ( disk . getAttachmentState ( ) , ATTACHMENTSTATE_BINDING ) ; protocolMarshaller . marshall ( disk . getGbInUse ( ) , GBINUSE_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class S3CryptoModuleBase { /** * { @ inheritDoc } * < b > NOTE : < / b > Because the encryption process requires context from * previous blocks , parts uploaded with the AmazonS3EncryptionClient ( as * opposed to the normal AmazonS3Client ) must be uploaded serially , and in * order . Otherwise , the previous encryption context isn ' t available to use * when encrypting the current part . */ @ Override public UploadPartResult uploadPartSecurely ( UploadPartRequest req ) { } }
appendUserAgent ( req , USER_AGENT ) ; final int blockSize = contentCryptoScheme . getBlockSizeInBytes ( ) ; final boolean isLastPart = req . isLastPart ( ) ; final String uploadId = req . getUploadId ( ) ; final long partSize = req . getPartSize ( ) ; final boolean partSizeMultipleOfCipherBlockSize = 0 == ( partSize % blockSize ) ; if ( ! isLastPart && ! partSizeMultipleOfCipherBlockSize ) { throw new SdkClientException ( "Invalid part size: part sizes for encrypted multipart uploads must be multiples " + "of the cipher block size (" + blockSize + ") with the exception of the last part." ) ; } final T uploadContext = multipartUploadContexts . get ( uploadId ) ; if ( uploadContext == null ) { throw new SdkClientException ( "No client-side information available on upload ID " + uploadId ) ; } final UploadPartResult result ; // Checks the parts are uploaded in series uploadContext . beginPartUpload ( req . getPartNumber ( ) ) ; CipherLite cipherLite = cipherLiteForNextPart ( uploadContext ) ; final File fileOrig = req . getFile ( ) ; final InputStream isOrig = req . getInputStream ( ) ; SdkFilterInputStream isCurr = null ; try { CipherLiteInputStream clis = newMultipartS3CipherInputStream ( req , cipherLite ) ; isCurr = clis ; // so the clis will be closed ( in the finally block below ) upon // unexpected failure should we opened a file undereath isCurr = wrapForMultipart ( clis , partSize ) ; req . setInputStream ( isCurr ) ; // Treat all encryption requests as input stream upload requests , // not as file upload requests . req . setFile ( null ) ; req . setFileOffset ( 0 ) ; // The last part of the multipart upload will contain an extra // 16 - byte mac if ( isLastPart ) { // We only change the size of the last part long lastPartSize = computeLastPartSize ( req ) ; if ( lastPartSize > - 1 ) req . setPartSize ( lastPartSize ) ; if ( uploadContext . hasFinalPartBeenSeen ( ) ) { throw new SdkClientException ( "This part was specified as the last part in a multipart upload, but a previous part was already marked as the last part. " + "Only the last part of the upload should be marked as the last part." ) ; } } result = s3 . uploadPart ( req ) ; } finally { cleanupDataSource ( req , fileOrig , isOrig , isCurr , log ) ; uploadContext . endPartUpload ( ) ; } if ( isLastPart ) uploadContext . setHasFinalPartBeenSeen ( true ) ; updateUploadContext ( uploadContext , isCurr ) ; return result ;
public class Admin { /** * @ throws PageException */ private void doGetDatasources ( ) throws PageException { } }
Map ds = config . getDataSourcesAsMap ( ) ; Iterator it = ds . keySet ( ) . iterator ( ) ; lucee . runtime . type . Query qry = new QueryImpl ( new String [ ] { "name" , "host" , "classname" , "bundleName" , "bundleVersion" , "dsn" , "DsnTranslated" , "database" , "port" , "timezone" , "username" , "password" , "passwordEncrypted" , "readonly" , "grant" , "drop" , "create" , "revoke" , "alter" , "select" , "delete" , "update" , "insert" , "connectionLimit" , "openConnections" , "connectionTimeout" , "clob" , "blob" , "validate" , "storage" , "customSettings" , "metaCacheTimeout" } , ds . size ( ) , "query" ) ; int row = 0 ; while ( it . hasNext ( ) ) { Object key = it . next ( ) ; DataSource d = ( DataSource ) ds . get ( key ) ; row ++ ; qry . setAt ( KeyConstants . _name , row , key ) ; qry . setAt ( KeyConstants . _host , row , d . getHost ( ) ) ; qry . setAt ( "classname" , row , d . getClassDefinition ( ) . getClassName ( ) ) ; qry . setAt ( "bundleName" , row , d . getClassDefinition ( ) . getName ( ) ) ; qry . setAt ( "bundleVersion" , row , d . getClassDefinition ( ) . getVersionAsString ( ) ) ; qry . setAt ( "dsn" , row , d . getDsnOriginal ( ) ) ; qry . setAt ( "database" , row , d . getDatabase ( ) ) ; qry . setAt ( KeyConstants . _port , row , d . getPort ( ) < 1 ? "" : Caster . toString ( d . getPort ( ) ) ) ; qry . setAt ( "dsnTranslated" , row , d . getDsnTranslated ( ) ) ; qry . setAt ( "timezone" , row , toStringTimeZone ( d . getTimeZone ( ) ) ) ; qry . setAt ( KeyConstants . _password , row , d . getPassword ( ) ) ; qry . setAt ( "passwordEncrypted" , row , ConfigWebUtil . encrypt ( d . getPassword ( ) ) ) ; qry . setAt ( KeyConstants . _username , row , d . getUsername ( ) ) ; qry . setAt ( KeyConstants . _readonly , row , Caster . toBoolean ( d . isReadOnly ( ) ) ) ; qry . setAt ( KeyConstants . _select , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_SELECT ) ) ) ; qry . setAt ( KeyConstants . _delete , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_DELETE ) ) ) ; qry . setAt ( KeyConstants . _update , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_UPDATE ) ) ) ; qry . setAt ( KeyConstants . _create , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_CREATE ) ) ) ; qry . setAt ( KeyConstants . _insert , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_INSERT ) ) ) ; qry . setAt ( KeyConstants . _drop , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_DROP ) ) ) ; qry . setAt ( KeyConstants . _grant , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_GRANT ) ) ) ; qry . setAt ( KeyConstants . _revoke , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_REVOKE ) ) ) ; qry . setAt ( KeyConstants . _alter , row , Boolean . valueOf ( d . hasAllow ( DataSource . ALLOW_ALTER ) ) ) ; int oc = config . getDatasourceConnectionPool ( ) . openConnections ( key . toString ( ) ) ; qry . setAt ( "openConnections" , row , oc < 0 ? 0 : oc ) ; qry . setAt ( "connectionLimit" , row , d . getConnectionLimit ( ) < 1 ? "" : Caster . toString ( d . getConnectionLimit ( ) ) ) ; qry . setAt ( "connectionTimeout" , row , d . getConnectionTimeout ( ) < 1 ? "" : Caster . toString ( d . getConnectionTimeout ( ) ) ) ; qry . setAt ( "customSettings" , row , d . getCustoms ( ) ) ; qry . setAt ( "blob" , row , Boolean . valueOf ( d . isBlob ( ) ) ) ; qry . setAt ( "clob" , row , Boolean . valueOf ( d . isClob ( ) ) ) ; qry . setAt ( "validate" , row , Boolean . valueOf ( d . validate ( ) ) ) ; qry . setAt ( "storage" , row , Boolean . valueOf ( d . isStorage ( ) ) ) ; qry . setAt ( "metaCacheTimeout" , row , Caster . toDouble ( d . getMetaCacheTimeout ( ) ) ) ; } pageContext . setVariable ( getString ( "admin" , action , "returnVariable" ) , qry ) ;
public class LightWeightHashSet { /** * Remove all elements from the set and return them . Clear the entries . */ public List < T > pollAll ( ) { } }
List < T > retList = new ArrayList < T > ( size ) ; for ( int i = 0 ; i < entries . length ; i ++ ) { LinkedElement < T > current = entries [ i ] ; while ( current != null ) { retList . add ( current . element ) ; current = current . next ; } } this . clear ( ) ; return retList ;
public class ClassLister { /** * Updates cache for the superclass . * @ param cachethe cache to update * @ param superclassthe superclass * @ param namesthe names to add */ protected void updateClasses ( HashMap < String , HashSet < Class > > cache , String superclass , HashSet < Class > names ) { } }
if ( ! cache . containsKey ( superclass ) ) cache . put ( superclass , names ) ; else cache . get ( superclass ) . addAll ( names ) ;
public class FormatterFacade { /** * Format the given code . * @ param sarlCode the code to format . * @ param resourceSet the resource set that sohuld contains the code . This resource set may be * used for resolving types by the underlying code . * @ return the code to format . */ @ Pure public String format ( String sarlCode , ResourceSet resourceSet ) { } }
try { final URI createURI = URI . createURI ( "synthetic://to-be-formatted." + this . fileExtension ) ; // $ NON - NLS - 1 $ final Resource res = this . resourceFactory . createResource ( createURI ) ; if ( res instanceof XtextResource ) { final XtextResource resource = ( XtextResource ) res ; final EList < Resource > resources = resourceSet . getResources ( ) ; resources . add ( resource ) ; try ( StringInputStream stringInputStream = new StringInputStream ( sarlCode ) ) { resource . load ( stringInputStream , Collections . emptyMap ( ) ) ; return formatResource ( resource ) ; } finally { resources . remove ( resource ) ; } } return sarlCode ; } catch ( Exception exception ) { throw Exceptions . sneakyThrow ( exception ) ; }
public class ToolboxAvailable { /** * Checks if the toolbox if available . * If it is , the toolbox is started . * If it isn ' t , an error message is shown . */ public static void main ( String [ ] args ) { } }
if ( GraphicsEnvironment . isHeadless ( ) ) { System . out . println ( Document . getVersion ( ) + " Toolbox error: headless display" ) ; } else try { Class c = Class . forName ( "com.lowagie.toolbox.Toolbox" ) ; Method toolboxMain = c . getMethod ( "main" , new Class [ ] { args . getClass ( ) } ) ; toolboxMain . invoke ( null , new Object [ ] { args } ) ; } catch ( Exception e ) { JOptionPane . showMessageDialog ( null , "You need the iText-toolbox.jar with class com.lowagie.toolbox.Toolbox to use the iText Toolbox." , Document . getVersion ( ) + " Toolbox error" , JOptionPane . ERROR_MESSAGE ) ; }
public class CommonUtils { /** * - - - COMPRESSS / DECOMPRESS - - - */ public static final byte [ ] compress ( byte [ ] data , int level ) throws IOException { } }
final Deflater deflater = new Deflater ( level , true ) ; deflater . setInput ( data ) ; deflater . finish ( ) ; final byte [ ] buffer = new byte [ data . length + 128 ] ; final int length = deflater . deflate ( buffer ) ; final byte [ ] compressed = new byte [ length ] ; System . arraycopy ( buffer , 0 , compressed , 0 , length ) ; return compressed ;
public class ModelExt { /** * Clone new instance [ wrapper clone ] , just link attrs values * @ param attrs */ public M cp ( String ... attrs ) { } }
M m = this . cp ( false ) ; for ( String attr : attrs ) { m . set ( attr , this . get ( attr ) ) ; } return m ;
public class JsonConfig { /** * Registers a PropertyNameProcessor . < br > * [ JSON - & gt ; Java ] * @ param target the class to use as key * @ param propertyNameProcessor the processor to register */ public void registerJavaPropertyNameProcessor ( Class target , PropertyNameProcessor propertyNameProcessor ) { } }
if ( target != null && propertyNameProcessor != null ) { javaPropertyNameProcessorMap . put ( target , propertyNameProcessor ) ; }
public class PartiturVisualizer { /** * We need to know , in which place of DOM the media visulizer are plugged in , * so we could call the seekAndPlay ( ) function with the help of * PartiturVisualizer . js * @ param mediaIDs * @ return a string which represents a javascript array */ private String convertToJavacSriptArray ( List < String > mediaIDs ) { } }
// in case there is no media visualizer do not build an array if ( mediaIDs == null ) { return "" ; } StringBuilder sb = new StringBuilder ( "\nvar mediaIDs = [ " ) ; int size = mediaIDs . size ( ) ; for ( int i = 0 ; i < size ; i ++ ) { sb . append ( "\"" ) ; sb . append ( mediaIDs . get ( i ) ) ; sb . append ( "\"" ) ; if ( ! ( size - 1 - i == 0 ) ) { sb . append ( ", " ) ; } } return sb . append ( " ];\n" ) . toString ( ) ;
public class AbstractBpmnActivityBehavior { /** * Takes an { @ link ActivityExecution } and an { @ link Callable } and wraps * the call to the Callable with the proper error propagation . This method * also makes sure that exceptions not caught by following activities in the * process will be thrown and not propagated . * @ param execution * @ param toExecute * @ throws Exception */ protected void executeWithErrorPropagation ( ActivityExecution execution , Callable < Void > toExecute ) throws Exception { } }
String activityInstanceId = execution . getActivityInstanceId ( ) ; try { toExecute . call ( ) ; } catch ( Exception ex ) { if ( activityInstanceId . equals ( execution . getActivityInstanceId ( ) ) ) { try { propagateException ( execution , ex ) ; } catch ( ErrorPropagationException e ) { LOG . errorPropagationException ( activityInstanceId , e . getCause ( ) ) ; // re - throw the original exception so that it is logged // and set as cause of the failure throw ex ; } } else { throw ex ; } }
public class HeadendResultConverter { /** * Method that can be used to find out actual input ( source ) type ; this * usually can be determined from type parameters , but may need * to be implemented differently from programmatically defined * converters ( which can not change static type parameter bindings ) . * @ param typeFactory * @ since 2.2 */ @ Override public JavaType getInputType ( TypeFactory typeFactory ) { } }
return typeFactory . constructMapType ( HashMap . class , String . class , Headend . class ) ;
public class HBaseClientTemplate { /** * Register a GetModifier to be called before every Get is executed on HBase . * This GetModifier will be replaced if already registered and added if not . * Equality is checked by calling the equals ( ) on the getModifier passed . * GetModifiers will be called in the order they are added to the * template , so if any modifier is destructive , it must be added in the right * order . * Not Thread Safe * @ param getModifier The GetModifier to register . */ public void registerGetModifier ( GetModifier getModifier ) { } }
int currentIndex = getModifiers . indexOf ( getModifier ) ; if ( currentIndex == - 1 ) { getModifiers . add ( getModifier ) ; } else { getModifiers . set ( currentIndex , getModifier ) ; }
public class UnusedCodeTracker { /** * Mark all ancestor classes of ( sub ) class as used */ public void markParentClasses ( TypeElement type ) { } }
while ( type != null ) { String typeID = ElementReferenceMapper . stitchClassIdentifier ( type , env . elementUtil ( ) ) ; ReferenceNode node = elementReferenceMap . get ( typeID ) ; if ( node == null ) { ErrorUtil . warning ( "Encountered .class parent class while accessing: " + typeID ) ; return ; } if ( node . reachable ) { return ; } node . reachable = true ; if ( ElementUtil . isStatic ( type ) ) { return ; } type = ElementUtil . getDeclaringClass ( type ) ; }
public class DataUtil { /** * big - endian or motorola format . */ public static void writeUnsignedShortBigEndian ( IO . WritableByteStream io , int value ) throws IOException { } }
io . write ( ( byte ) ( ( value >> 8 ) & 0xFF ) ) ; io . write ( ( byte ) ( value & 0xFF ) ) ;
public class TaskInProgress { /** * Creates a " status report " for this task . Includes the * task ID and overall status , plus reports for all the * component task - threads that have ever been started . */ synchronized TaskReport generateSingleReport ( ) { } }
ArrayList < String > diagnostics = new ArrayList < String > ( ) ; for ( List < String > l : taskDiagnosticData . values ( ) ) { diagnostics . addAll ( l ) ; } TIPStatus currentStatus = null ; if ( isRunning ( ) && ! isComplete ( ) ) { currentStatus = TIPStatus . RUNNING ; } else if ( isComplete ( ) ) { currentStatus = TIPStatus . COMPLETE ; } else if ( wasKilled ( ) ) { currentStatus = TIPStatus . KILLED ; } else if ( isFailed ( ) ) { currentStatus = TIPStatus . FAILED ; } else if ( ! ( isComplete ( ) || isRunning ( ) || wasKilled ( ) ) ) { currentStatus = TIPStatus . PENDING ; } TaskReport report = new TaskReport ( getTIPId ( ) , ( float ) progress , state , diagnostics . toArray ( new String [ diagnostics . size ( ) ] ) , currentStatus , execStartTime , execFinishTime , counters ) ; if ( currentStatus == TIPStatus . RUNNING ) { report . setRunningTaskAttempts ( activeTasks . keySet ( ) ) ; } else if ( currentStatus == TIPStatus . COMPLETE ) { report . setSuccessfulAttempt ( getSuccessfulTaskid ( ) ) ; } return report ;
public class JmxMonitorRegistry { /** * Register a new monitor in the registry . */ @ Override public void register ( Monitor < ? > monitor ) { } }
try { List < MonitorMBean > beans = MonitorMBean . createMBeans ( name , monitor , mapper ) ; for ( MonitorMBean bean : beans ) { register ( bean . getObjectName ( ) , bean ) ; } monitors . put ( monitor . getConfig ( ) , monitor ) ; updatePending . set ( true ) ; } catch ( Exception e ) { LOG . warn ( "Unable to register Monitor:" + monitor . getConfig ( ) , e ) ; }
public class MSPDIReader { /** * This method extracts data for a single predecessor from an MSPDI file . * @ param currTask Current task object * @ param link Predecessor data */ private void readPredecessor ( Task currTask , Project . Tasks . Task . PredecessorLink link ) { } }
BigInteger uid = link . getPredecessorUID ( ) ; if ( uid != null ) { Task prevTask = m_projectFile . getTaskByUniqueID ( Integer . valueOf ( uid . intValue ( ) ) ) ; if ( prevTask != null ) { RelationType type ; if ( link . getType ( ) != null ) { type = RelationType . getInstance ( link . getType ( ) . intValue ( ) ) ; } else { type = RelationType . FINISH_START ; } TimeUnit lagUnits = DatatypeConverter . parseDurationTimeUnits ( link . getLagFormat ( ) ) ; Duration lagDuration ; int lag = NumberHelper . getInt ( link . getLinkLag ( ) ) ; if ( lag == 0 ) { lagDuration = Duration . getInstance ( 0 , lagUnits ) ; } else { if ( lagUnits == TimeUnit . PERCENT || lagUnits == TimeUnit . ELAPSED_PERCENT ) { lagDuration = Duration . getInstance ( lag , lagUnits ) ; } else { lagDuration = Duration . convertUnits ( lag / 10.0 , TimeUnit . MINUTES , lagUnits , m_projectFile . getProjectProperties ( ) ) ; } } Relation relation = currTask . addPredecessor ( prevTask , type , lagDuration ) ; m_eventManager . fireRelationReadEvent ( relation ) ; } }
public class KeyVaultClientBaseImpl { /** * Imports a certificate into a specified key vault . * Imports an existing valid certificate , containing a private key , into Azure Key Vault . The certificate to be imported can be in either PFX or PEM format . If the certificate is in PEM format the PEM file must contain the key as well as x509 certificates . This operation requires the certificates / import permission . * @ param vaultBaseUrl The vault name , for example https : / / myvault . vault . azure . net . * @ param certificateName The name of the certificate . * @ param base64EncodedCertificate Base64 encoded representation of the certificate object to import . This certificate needs to contain the private key . * @ param password If the private key in base64EncodedCertificate is encrypted , the password used for encryption . * @ param certificatePolicy The management policy for the certificate . * @ param certificateAttributes The attributes of the certificate ( optional ) . * @ param tags Application specific metadata in the form of key - value pairs . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws KeyVaultErrorException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the CertificateBundle object if successful . */ public CertificateBundle importCertificate ( String vaultBaseUrl , String certificateName , String base64EncodedCertificate , String password , CertificatePolicy certificatePolicy , CertificateAttributes certificateAttributes , Map < String , String > tags ) { } }
return importCertificateWithServiceResponseAsync ( vaultBaseUrl , certificateName , base64EncodedCertificate , password , certificatePolicy , certificateAttributes , tags ) . toBlocking ( ) . single ( ) . body ( ) ;
public class CacheFactory { /** * 创建 SQL 缓存 * @ param sqlCacheClass * @ return */ public static < K , V > Cache < K , V > createCache ( String sqlCacheClass , String prefix , Properties properties ) { } }
if ( StringUtil . isEmpty ( sqlCacheClass ) ) { try { Class . forName ( "com.google.common.cache.Cache" ) ; return new GuavaCache < K , V > ( properties , prefix ) ; } catch ( Throwable t ) { return new SimpleCache < K , V > ( properties , prefix ) ; } } else { try { Class < ? extends Cache > clazz = ( Class < ? extends Cache > ) Class . forName ( sqlCacheClass ) ; try { Constructor < ? extends Cache > constructor = clazz . getConstructor ( Properties . class , String . class ) ; return constructor . newInstance ( properties , prefix ) ; } catch ( Exception e ) { return clazz . newInstance ( ) ; } } catch ( Throwable t ) { throw new PageException ( "Created Sql Cache [" + sqlCacheClass + "] Error" , t ) ; } }
public class DefaultEquationSupport { /** * { @ inheritDoc } */ @ Override public void assignVariable ( final String variableName , final Object value ) throws Exception { } }
assignVariable ( variableName , value , false ) ;
public class CommandFactory { /** * Trainer only command . * It turns on or off the sending of auditory information to the trainer . * @ param earOn True to turn auditory information on , false to turn it off . */ public void addEarCommand ( boolean earOn ) { } }
StringBuilder buf = new StringBuilder ( ) ; buf . append ( "(ear " ) ; if ( earOn ) { buf . append ( "on" ) ; } else { buf . append ( "off" ) ; } buf . append ( ')' ) ; fifo . add ( fifo . size ( ) , buf . toString ( ) ) ;
public class SyncGroupsInner { /** * Gets a collection of sync group logs . * @ param resourceGroupName The name of the resource group that contains the resource . You can obtain this value from the Azure Resource Manager API or the portal . * @ param serverName The name of the server . * @ param databaseName The name of the database on which the sync group is hosted . * @ param syncGroupName The name of the sync group . * @ param startTime Get logs generated after this time . * @ param endTime Get logs generated before this time . * @ param type The types of logs to retrieve . Possible values include : ' All ' , ' Error ' , ' Warning ' , ' Success ' * @ param continuationToken The continuation token for this operation . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ return the observable to the PagedList & lt ; SyncGroupLogPropertiesInner & gt ; object */ public Observable < ServiceResponse < Page < SyncGroupLogPropertiesInner > > > listLogsWithServiceResponseAsync ( final String resourceGroupName , final String serverName , final String databaseName , final String syncGroupName , final String startTime , final String endTime , final String type , final String continuationToken ) { } }
return listLogsSinglePageAsync ( resourceGroupName , serverName , databaseName , syncGroupName , startTime , endTime , type , continuationToken ) . concatMap ( new Func1 < ServiceResponse < Page < SyncGroupLogPropertiesInner > > , Observable < ServiceResponse < Page < SyncGroupLogPropertiesInner > > > > ( ) { @ Override public Observable < ServiceResponse < Page < SyncGroupLogPropertiesInner > > > call ( ServiceResponse < Page < SyncGroupLogPropertiesInner > > page ) { String nextPageLink = page . body ( ) . nextPageLink ( ) ; if ( nextPageLink == null ) { return Observable . just ( page ) ; } return Observable . just ( page ) . concatWith ( listLogsNextWithServiceResponseAsync ( nextPageLink ) ) ; } } ) ;
public class AbstractTreeNode { /** * Fire the event for the changes node parents . * @ param event the event . */ void firePropertyParentChanged ( TreeNodeParentChangedEvent event ) { } }
if ( this . nodeListeners != null ) { for ( final TreeNodeListener listener : this . nodeListeners ) { if ( listener != null ) { listener . treeNodeParentChanged ( event ) ; } } } final N parentNode = getParentNode ( ) ; if ( parentNode != null ) { parentNode . firePropertyParentChanged ( event ) ; }
public class GeometryUtilities { /** * Uses the cosine rule to find an angle in radiants of a triangle defined by the length of its sides . * < p > The calculated angle is the one between the two adjacent sides a and b . < / p > * @ param a adjacent side 1 length . * @ param b adjacent side 2 length . * @ param c opposite side length . * @ return the angle in radiants . */ public static double getAngleInTriangle ( double a , double b , double c ) { } }
double angle = Math . acos ( ( a * a + b * b - c * c ) / ( 2.0 * a * b ) ) ; return angle ;
public class MemberUtils { /** * Checks if the the given method was user - generated . This takes into * account for instance lambda methods , that even though they are marked as * " synthetic " , they are user - generated , and therefore interesting to * analysis . * @ param m The field or method to check . * @ return True if the given member is user generated , false otherwise . */ public static boolean isUserGenerated ( final ClassMember m ) { } }
return ! m . isSynthetic ( ) || ( m instanceof XMethod && couldBeLambda ( ( XMethod ) m ) ) ;
public class ExtensionPopupMenu { /** * Defaults to call the method { @ link ExtensionPopupMenuComponent # dismissed ( ExtensionPopupMenuComponent ) * dismissed ( ExtensionPopupMenuComponent ) } on all child { @ code ExtensionPopupMenuComponent } s . * @ since 2.4.0 */ @ Override public void dismissed ( ExtensionPopupMenuComponent selectedMenuComponent ) { } }
for ( int i = 0 ; i < getMenuComponentCount ( ) ; ++ i ) { Component menuComponent = getMenuComponent ( i ) ; if ( PopupMenuUtils . isExtensionPopupMenuComponent ( menuComponent ) ) { ( ( ExtensionPopupMenuComponent ) menuComponent ) . dismissed ( selectedMenuComponent ) ; } }
public class Descriptor { /** * indexed getter for incomingLinks - gets an indexed value - List of incoming links ( from other Wikipedia pages ) associated with a Wikipedia page . * @ generated * @ param i index in the array to get * @ return value of the element at index i */ public Title getIncomingLinks ( int i ) { } }
if ( Descriptor_Type . featOkTst && ( ( Descriptor_Type ) jcasType ) . casFeat_incomingLinks == null ) jcasType . jcas . throwFeatMissing ( "incomingLinks" , "de.julielab.jules.types.wikipedia.Descriptor" ) ; jcasType . jcas . checkArrayBounds ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Descriptor_Type ) jcasType ) . casFeatCode_incomingLinks ) , i ) ; return ( Title ) ( jcasType . ll_cas . ll_getFSForRef ( jcasType . ll_cas . ll_getRefArrayValue ( jcasType . ll_cas . ll_getRefValue ( addr , ( ( Descriptor_Type ) jcasType ) . casFeatCode_incomingLinks ) , i ) ) ) ;
public class CheckTemplateHeaderVarsPass { /** * Implementations for specific nodes . */ private void checkTemplate ( TemplateNode node , TemplateRegistry templateRegistry ) { } }
ListMultimap < String , SourceLocation > dataKeys = ArrayListMultimap . create ( ) ; for ( VarRefNode varRefNode : SoyTreeUtils . getAllNodesOfType ( node , VarRefNode . class ) ) { if ( varRefNode . isPossibleHeaderVar ( ) ) { dataKeys . put ( varRefNode . getName ( ) , varRefNode . getSourceLocation ( ) ) ; } } IndirectParamsInfo ipi = new IndirectParamsCalculator ( templateRegistry ) . calculateIndirectParams ( templateRegistry . getMetadata ( node ) ) ; Set < String > allHeaderVarNames = new HashSet < > ( ) ; List < TemplateHeaderVarDefn > unusedParams = new ArrayList < > ( ) ; // Process @ param header variables . for ( TemplateParam param : node . getAllParams ( ) ) { allHeaderVarNames . add ( param . name ( ) ) ; if ( dataKeys . containsKey ( param . name ( ) ) ) { // Good : Declared and referenced in template . We remove these from dataKeys so // that at the end of the for - loop , dataKeys will only contain the keys that are referenced // but not declared in SoyDoc . dataKeys . removeAll ( param . name ( ) ) ; } else if ( ipi . paramKeyToCalleesMultimap . containsKey ( param . name ( ) ) || ipi . mayHaveIndirectParamsInExternalCalls || ipi . mayHaveIndirectParamsInExternalDelCalls ) { // Good : Declared in SoyDoc and either ( a ) used in a call that passes all data or ( b ) used // in an external call or delcall that passes all data , which may need the param ( we can ' t // verify ) . } else { // Bad : Declared in SoyDoc but not referenced in template . unusedParams . add ( param ) ; } } List < TemplateHeaderVarDefn > unusedStateVars = new ArrayList < > ( ) ; // Process @ state header variables . if ( node instanceof TemplateElementNode ) { TemplateElementNode el = ( TemplateElementNode ) node ; for ( TemplateStateVar stateVar : el . getStateVars ( ) ) { allHeaderVarNames . add ( stateVar . name ( ) ) ; if ( dataKeys . containsKey ( stateVar . name ( ) ) ) { // Good : declared and referenced in the template . dataKeys . removeAll ( stateVar . name ( ) ) ; } else { // Bad : declared in the header , but not used . unusedStateVars . add ( stateVar ) ; } } } // At this point , the only keys left in dataKeys are undeclared . for ( Entry < String , SourceLocation > undeclared : dataKeys . entries ( ) ) { String extraErrorMessage = SoyErrors . getDidYouMeanMessage ( allHeaderVarNames , undeclared . getKey ( ) ) ; errorReporter . report ( undeclared . getValue ( ) , UNDECLARED_DATA_KEY , undeclared . getKey ( ) , extraErrorMessage ) ; } // Delegate templates can declare unused params because other implementations // of the same delegate may need to use those params . if ( node instanceof TemplateBasicNode ) { reportUnusedHeaderVars ( errorReporter , unusedParams , UNUSED_PARAM ) ; } if ( node instanceof TemplateElementNode ) { reportUnusedHeaderVars ( errorReporter , unusedStateVars , UNUSED_STATE ) ; }
public class OnlineUsers { /** * Removes the user from the map . This method shell be invoked when the session is unbounded * from the Application . In wicket is the best way to do that in the * { @ code WebApplication # sessionUnbound ( String ) } . * @ param user * the user * @ return the session id */ public synchronized ID removeOnline ( final USER user ) { } }
final ID sessionId = usersOnline . remove ( user ) ; if ( sessionId != null ) { sessionIdToUser . remove ( sessionId ) ; } return sessionId ;
public class StreamSegmentStorageReader { /** * Reads a range of bytes from a Segment in Storage . * @ param handle A SegmentHandle pointing to the Segment to read from . * @ param startOffset The first offset within the Segment to read from . * @ param maxReadLength The maximum number of bytes to read . * @ param readBlockSize The maximum number of bytes to read at once ( the returned StreamSegmentReadResult will be * broken down into Entries smaller than or equal to this size ) . * @ param storage A ReadOnlyStorage to execute the reads against . * @ return A StreamSegmentReadResult that can be used to process the data . This will be made up of ReadResultEntries * of the following types : Storage , Truncated or EndOfSegment . */ public static StreamSegmentReadResult read ( SegmentHandle handle , long startOffset , int maxReadLength , int readBlockSize , ReadOnlyStorage storage ) { } }
Exceptions . checkArgument ( startOffset >= 0 , "startOffset" , "startOffset must be a non-negative number." ) ; Exceptions . checkArgument ( maxReadLength >= 0 , "maxReadLength" , "maxReadLength must be a non-negative number." ) ; Preconditions . checkNotNull ( handle , "handle" ) ; Preconditions . checkNotNull ( storage , "storage" ) ; String traceId = String . format ( "Read[%s]" , handle . getSegmentName ( ) ) ; // Build a SegmentInfo using the information we are given . If startOffset or length are incorrect , the underlying // ReadOnlyStorage will throw appropriate exceptions at the caller . StreamSegmentInformation segmentInfo = StreamSegmentInformation . builder ( ) . name ( handle . getSegmentName ( ) ) . startOffset ( startOffset ) . length ( startOffset + maxReadLength ) . build ( ) ; return new StreamSegmentReadResult ( startOffset , maxReadLength , new SegmentReader ( segmentInfo , handle , readBlockSize , storage ) , traceId ) ;
public class TaskContext { /** * Get a { @ link DataWriterBuilder } for building a { @ link org . apache . gobblin . writer . DataWriter } . * @ param branches number of forked branches * @ param index branch index * @ return a { @ link DataWriterBuilder } */ public DataWriterBuilder getDataWriterBuilder ( int branches , int index ) { } }
String writerBuilderPropertyName = ForkOperatorUtils . getPropertyNameForBranch ( ConfigurationKeys . WRITER_BUILDER_CLASS , branches , index ) ; log . debug ( "Using property {} to get a writer builder for branches:{}, index:{}" , writerBuilderPropertyName , branches , index ) ; String dataWriterBuilderClassName = this . taskState . getProp ( writerBuilderPropertyName , null ) ; if ( dataWriterBuilderClassName == null ) { dataWriterBuilderClassName = ConfigurationKeys . DEFAULT_WRITER_BUILDER_CLASS ; log . info ( "No configured writer builder found, using {} as the default builder" , dataWriterBuilderClassName ) ; } else { log . info ( "Found configured writer builder as {}" , dataWriterBuilderClassName ) ; } try { return DataWriterBuilder . class . cast ( Class . forName ( dataWriterBuilderClassName ) . newInstance ( ) ) ; } catch ( ClassNotFoundException cnfe ) { throw new RuntimeException ( cnfe ) ; } catch ( InstantiationException ie ) { throw new RuntimeException ( ie ) ; } catch ( IllegalAccessException iae ) { throw new RuntimeException ( iae ) ; }
public class DCacheBase { /** * This is a helper method to remove change listener for all entries . */ public synchronized boolean removeChangeListener ( ChangeListener listener ) { } }
if ( bEnableListener && listener != null ) { eventSource . removeListener ( listener ) ; return true ; } return false ;
public class DependencyRandomIndexingMain { /** * Begin processing with { @ code FlyingHermit } . */ public static void main ( String [ ] args ) { } }
DependencyRandomIndexingMain drim = new DependencyRandomIndexingMain ( ) ; try { drim . run ( args ) ; } catch ( Throwable t ) { t . printStackTrace ( ) ; }
public class ContentSceneController { /** * Called on system resume . The top content scene in theh stack moves to visible state - - see * { @ link ContentScene # show ( ) } */ public void resume ( ) { } }
new ExecutionChain ( mGvrContext ) . runOnMainThread ( new Runnable ( ) { @ Override public void run ( ) { if ( ! mContentSceneViewStack . isEmpty ( ) ) { ContentScene contentScene = mContentSceneViewStack . peek ( ) ; contentScene . show ( ) ; WidgetLib . getTouchManager ( ) . setFlingHandler ( contentScene . getFlingHandler ( ) ) ; } } } ) . execute ( ) ;
public class VersionParser { /** * Parses the build metadata . * @ param build the build metadata string to parse * @ return a valid build metadata object * @ throws IllegalArgumentException if the input string is { @ code NULL } or empty * @ throws ParseException when there is a grammar error * @ throws UnexpectedCharacterException when encounters an unexpected character type */ static MetadataVersion parseBuild ( String build ) { } }
VersionParser parser = new VersionParser ( build ) ; return parser . parseBuild ( ) ;
public class ConstraintNetwork { /** * Add an interval to this graph . * @ param i * an interval . * @ return < code > true < / code > if successful , < code > false < / code > if the * interval could not be added . If there was a problem adding the * interval , then the constraint network may be in an inconsistent * state ( e . g . , part of the interval got added ) . */ synchronized boolean addInterval ( Interval i ) { } }
if ( i == null || containsInterval ( i ) || ! intervals . add ( i ) ) { return false ; } Object iStart = i . getStart ( ) ; Object iFinish = i . getFinish ( ) ; directedGraph . add ( iStart ) ; directedGraph . add ( iFinish ) ; Weight mindur = i . getSpecifiedMinimumLength ( ) ; Weight maxdur = i . getSpecifiedMaximumLength ( ) ; directedGraph . setEdge ( iStart , iFinish , maxdur ) ; directedGraph . setEdge ( iFinish , iStart , mindur . invertSign ( ) ) ; Weight minstart = i . getSpecifiedMinimumStart ( ) ; Weight maxstart = i . getSpecifiedMaximumStart ( ) ; directedGraph . setEdge ( timeZero , iStart , maxstart ) ; directedGraph . setEdge ( iStart , timeZero , minstart . invertSign ( ) ) ; Weight minfinish = i . getSpecifiedMinimumFinish ( ) ; Weight maxfinish = i . getSpecifiedMaximumFinish ( ) ; directedGraph . setEdge ( timeZero , iFinish , maxfinish ) ; directedGraph . setEdge ( iFinish , timeZero , minfinish . invertSign ( ) ) ; calcMinDuration = null ; calcMaxDuration = null ; calcMinFinish = null ; calcMaxFinish = null ; calcMinStart = null ; calcMaxStart = null ; shortestDistancesFromTimeZeroSource = null ; shortestDistancesFromTimeZeroDestination = null ; return true ;
public class CrossRateCalculator { /** * Calculate the cross rate , use this only if required . * @ param targetPair the currency pair we want Bid / Ask for * @ param fx1 one rate involving either targetPair . ccy1 or targetPair . ccy2 * @ param fx2 one rate involving either targetPair . ccy1 or targetPair . ccy2 * @ param precision required in case we need to divide rates * @ param ranking link to algorithm to determine if the targetPair will be market convention or not * @ return a new instance of FxRate * @ throws IllegalArgumentException if the 2 fx1 and fx2 do not share a common cross currency or either currencies in the targetPair */ public static FxRate calculateCross ( final CurrencyPair targetPair , final FxRate fx1 , final FxRate fx2 , final int precision , final int precisionForInverseFxRate , final MajorCurrencyRanking ranking , final int bidRounding , final int askRounding , CurrencyProvider currencyProvider ) { } }
final Optional < String > crossCcy = fx1 . getCurrencyPair ( ) . findCommonCcy ( fx2 . getCurrencyPair ( ) ) ; final String xCcy = crossCcy . orElseThrow ( ( ) -> new IllegalArgumentException ( "The 2 FXRates do not share a ccy " + fx1 . getCurrencyPair ( ) + " " + fx2 . getCurrencyPair ( ) ) ) ; if ( crossCcy . isPresent ( ) && targetPair . containsCcy ( crossCcy . get ( ) ) ) { throw new IllegalArgumentException ( "The target currency pair " + targetPair + " contains the common ccy " + crossCcy . get ( ) ) ; } final String fx1Ccy1 = fx1 . getCurrencyPair ( ) . getCcy1 ( ) ; final String fx2Ccy1 = fx2 . getCurrencyPair ( ) . getCcy1 ( ) ; final String fx1Ccy2 = fx1 . getCurrencyPair ( ) . getCcy2 ( ) ; final String fx2Ccy2 = fx2 . getCurrencyPair ( ) . getCcy2 ( ) ; // what if it is both ccy2? final boolean shouldDivide = fx1Ccy1 . equals ( xCcy ) && fx2Ccy1 . equals ( xCcy ) || fx1Ccy2 . equals ( xCcy ) && fx2Ccy2 . equals ( xCcy ) ; FxRateImpl crossRate = null ; if ( shouldDivide ) { final FxRate numeratorFx = targetPair . getCcy1 ( ) . equals ( fx2Ccy2 ) || targetPair . getCcy1 ( ) . equals ( fx1Ccy1 ) ? fx1 : fx2 ; final FxRate denominatorFx = numeratorFx == fx1 ? fx2 : fx1 ; LOG . debug ( "CALC {} / {}" , numeratorFx , denominatorFx ) ; BigDecimal bid = BigDecimalUtil . divide ( precision , numeratorFx . getBid ( ) , denominatorFx . getAsk ( ) , bidRounding ) ; BigDecimal ask = BigDecimalUtil . divide ( precision , numeratorFx . getAsk ( ) , denominatorFx . getBid ( ) , askRounding ) ; crossRate = new FxRateImpl ( targetPair , xCcy , ranking . isMarketConvention ( targetPair ) , bid , ask , currencyProvider ) ; } else { crossRate = calculateWithDivide ( targetPair , fx1 , fx2 , precision , precisionForInverseFxRate , ranking , bidRounding , askRounding , currencyProvider , xCcy , fx1Ccy2 , fx2Ccy2 ) ; } LOG . debug ( "X RATE {}" , crossRate ) ; LOG . debug ( crossRate . getDescription ( ) ) ; return crossRate ;
public class Env { /** * ( to avoid dangling pointers stretching lifetimes ) . */ void pop ( Env global ) { } }
assert _sp > _display [ _tod ] ; // Do not over - pop current scope _sp -- ; _fcn [ _sp ] = global . subRef ( _fcn [ _sp ] ) ; _ary [ _sp ] = global . subRef ( _ary [ _sp ] , _key [ _sp ] ) ; assert _sp == 0 || _ary [ 0 ] == null || check_refcnt ( _ary [ 0 ] . anyVec ( ) ) ;
public class Loader { /** * Sets the MetaStore URI in the given Configuration , if there is a host in * the match arguments . If there is no host , then the conf is not changed . * @ param conf a Configuration that will be used to connect to the MetaStore * @ param match URIPattern match results */ private static void setMetaStoreURI ( Configuration conf , Map < String , String > match ) { } }
try { // If the host is set , construct a new MetaStore URI and set the property // in the Configuration . Otherwise , do not change the MetaStore URI . String host = match . get ( URIPattern . HOST ) ; if ( host != null && ! NOT_SET . equals ( host ) ) { int port ; try { port = Integer . parseInt ( match . get ( URIPattern . PORT ) ) ; } catch ( NumberFormatException e ) { port = UNSPECIFIED_PORT ; } conf . set ( HIVE_METASTORE_URI_PROP , new URI ( "thrift" , null , host , port , null , null , null ) . toString ( ) ) ; } } catch ( URISyntaxException ex ) { throw new DatasetOperationException ( "Could not build metastore URI" , ex ) ; }
public class DatasourceJBossASClient { /** * Returns a ModelNode that can be used to create an XA datasource . Callers are free to tweek the datasource request * that is returned , if they so choose , before asking the client to execute the request . * @ param name name of the XA datasource * @ param blockingTimeoutWaitMillis see datasource documentation for meaning of this setting * @ param driverName see datasource documentation for meaning of this setting * @ param xaDataSourceClass see datasource documentation for meaning of this setting * @ param exceptionSorterClassName see datasource documentation for meaning of this setting * @ param idleTimeoutMinutes see datasource documentation for meaning of this setting * @ param minPoolSize see datasource documentation for meaning of this setting * @ param maxPoolSize see datasource documentation for meaning of this setting * @ param noRecovery optional , left unset if null * @ param noTxSeparatePool optional , left unset if null * @ param preparedStatementCacheSize see datasource documentation for meaning of this setting * @ param recoveryPluginClassName optional , left unset if null * @ param securityDomain see datasource documentation for meaning of this setting * @ param staleConnectionCheckerClassName optional , left unset if null * @ param transactionIsolation see datasource documentation for meaning of this setting * @ param validConnectionCheckerClassName see datasource documentation for meaning of this setting * @ param xaDatasourceProperties see datasource documentation for meaning of this setting * @ return the request that can be used to create the XA datasource */ public ModelNode createNewXADatasourceRequest ( String name , int blockingTimeoutWaitMillis , String driverName , String xaDataSourceClass , String exceptionSorterClassName , int idleTimeoutMinutes , int minPoolSize , int maxPoolSize , Boolean noRecovery , Boolean noTxSeparatePool , int preparedStatementCacheSize , String recoveryPluginClassName , String securityDomain , String staleConnectionCheckerClassName , String transactionIsolation , String validConnectionCheckerClassName , Map < String , String > xaDatasourceProperties ) { } }
String jndiName = "java:jboss/datasources/" + name ; String dmrTemplate = "" + "{" + "\"xa-datasource-class\" => \"%s\"" + ", \"blocking-timeout-wait-millis\" => %dL " + ", \"driver-name\" => \"%s\" " + ", \"exception-sorter-class-name\" => \"%s\" " + ", \"idle-timeout-minutes\" => %dL " + ", \"jndi-name\" => \"%s\" " + ", \"jta\" => true " + ", \"min-pool-size\" => %d " + ", \"max-pool-size\" => %d " + ", \"no-recovery\" => %b " + ", \"no-tx-separate-pool\" => %b " + ", \"prepared-statements-cache-size\" => %dL " + ", \"recovery-plugin-class-name\" => \"%s\" " + ", \"security-domain\" => \"%s\" " + ", \"stale-connection-checker-class-name\" => \"%s\" " + ", \"transaction-isolation\" => \"%s\" " + ", \"use-java-context\" => true " + ", \"valid-connection-checker-class-name\" => \"%s\" " + "}" ; String dmr = String . format ( dmrTemplate , xaDataSourceClass , blockingTimeoutWaitMillis , driverName , exceptionSorterClassName , idleTimeoutMinutes , jndiName , minPoolSize , maxPoolSize , noRecovery , noTxSeparatePool , preparedStatementCacheSize , recoveryPluginClassName , securityDomain , staleConnectionCheckerClassName , transactionIsolation , validConnectionCheckerClassName ) ; Address addr = Address . root ( ) . add ( SUBSYSTEM , SUBSYSTEM_DATASOURCES , XA_DATA_SOURCE , name ) ; final ModelNode request1 = ModelNode . fromString ( dmr ) ; request1 . get ( OPERATION ) . set ( ADD ) ; request1 . get ( ADDRESS ) . set ( addr . getAddressNode ( ) ) ; // if no xa datasource properties , no need to create a batch request , there is only one ADD request to make if ( xaDatasourceProperties == null || xaDatasourceProperties . size ( ) == 0 ) { return request1 ; } // create a batch of requests - the first is the main one , the rest create each conn property ModelNode [ ] batch = new ModelNode [ 1 + xaDatasourceProperties . size ( ) ] ; batch [ 0 ] = request1 ; int n = 1 ; for ( Map . Entry < String , String > entry : xaDatasourceProperties . entrySet ( ) ) { addr = Address . root ( ) . add ( SUBSYSTEM , SUBSYSTEM_DATASOURCES , XA_DATA_SOURCE , name , XA_DATASOURCE_PROPERTIES , entry . getKey ( ) ) ; final ModelNode requestN = new ModelNode ( ) ; requestN . get ( OPERATION ) . set ( ADD ) ; requestN . get ( ADDRESS ) . set ( addr . getAddressNode ( ) ) ; setPossibleExpression ( requestN , VALUE , entry . getValue ( ) ) ; batch [ n ++ ] = requestN ; } ModelNode result = createBatchRequest ( batch ) ; // remove unset args if ( null == noRecovery ) { result . get ( "steps" ) . get ( 0 ) . remove ( "no-recovery" ) ; } if ( null == noTxSeparatePool ) { result . get ( "steps" ) . get ( 0 ) . remove ( "no-tx-separate-pool" ) ; } if ( null == recoveryPluginClassName ) { result . get ( "steps" ) . get ( 0 ) . remove ( "recovery-plugin-class-name" ) ; } if ( null == staleConnectionCheckerClassName ) { result . get ( "steps" ) . get ( 0 ) . remove ( "stale-connection-checker-class-name" ) ; } return result ;
public class ge_p1p1_to_p2 { /** * r = p */ public static void ge_p1p1_to_p2 ( ge_p2 r , ge_p1p1 p ) { } }
fe_mul . fe_mul ( r . X , p . X , p . T ) ; fe_mul . fe_mul ( r . Y , p . Y , p . Z ) ; fe_mul . fe_mul ( r . Z , p . Z , p . T ) ;
public class PlaybackService { /** * Play a track . * This method ensures that the media player will be in the right state to be able to play a new * datasource . * @ param track track url . */ private void playTrack ( SoundCloudTrack track ) { } }
pauseTimer ( ) ; try { // acquire lock on wifi . mWifiLock . acquire ( ) ; mIsPaused = false ; mIsPausedAfterAudioFocusChanged = false ; mHasAlreadyPlayed = true ; // 1 - UPDATE ALL VISUAL CALLBACK FIRST TO IMPROVE USER EXPERIENCE updateNotification ( ) ; // update playback state as well as meta data . mMediaSession . setPlaybackState ( MediaSessionWrapper . PLAYBACK_STATE_PLAYING ) ; // start loading of the artwork . loadArtwork ( this , SoundCloudArtworkHelper . getArtworkUrl ( track , SoundCloudArtworkHelper . XXXLARGE ) ) ; // broadcast events Intent intent = new Intent ( PlaybackListener . ACTION_ON_TRACK_PLAYED ) ; intent . putExtra ( PlaybackListener . EXTRA_KEY_TRACK , track ) ; mLocalBroadcastManager . sendBroadcast ( intent ) ; Intent bufferingStart = new Intent ( PlaybackListener . ACTION_ON_BUFFERING_STARTED ) ; mLocalBroadcastManager . sendBroadcast ( bufferingStart ) ; // 2 - THEN PREPARE THE TRACK STREAMING // set media player to stop state in order to be able to call prepare . mMediaPlayer . reset ( ) ; // set new data source mMediaPlayer . setDataSource ( track . getStreamUrl ( ) + SOUND_CLOUD_CLIENT_ID_PARAM + mSoundCloundClientId ) ; // Try to gain the audio focus before preparing and starting the media player . if ( mAudioManager . requestAudioFocus ( this , AudioManager . STREAM_MUSIC , AudioManager . AUDIOFOCUS_GAIN ) == AudioManager . AUDIOFOCUS_REQUEST_GRANTED ) { // prepare asynchronously the stream to be able to handle new action on the // service thread such as a pause command . mIsPreparing = true ; mMediaPlayer . prepareAsync ( ) ; } } catch ( IOException e ) { Log . e ( TAG , "File referencing not exist : " + track ) ; }
public class RowCombiningTimeAndDimsIterator { /** * Clear the info about which rows ( in which original iterators and which row nums within them ) were combined on * the previous step . */ private void clearCombinedRowsInfo ( ) { } }
for ( int originalIteratorIndex = indexesOfCurrentlyCombinedOriginalIterators . nextSetBit ( 0 ) ; originalIteratorIndex >= 0 ; originalIteratorIndex = indexesOfCurrentlyCombinedOriginalIterators . nextSetBit ( originalIteratorIndex + 1 ) ) { minCurrentlyCombinedRowNumByOriginalIteratorIndex [ originalIteratorIndex ] = MIN_CURRENTLY_COMBINED_ROW_NUM_UNSET_VALUE ; } indexesOfCurrentlyCombinedOriginalIterators . clear ( ) ;
public class GsonCache { /** * Get an object from Reservoir with the given key . This a blocking IO operation . * @ param key the key string . * @ param classOfT the Class type of the expected return object . * @ return the object of the given type if it exists . */ public static < T > T read ( String key , Class < T > classOfT ) throws Exception { } }
String json = cache . getString ( key ) . getString ( ) ; T value = new Gson ( ) . fromJson ( json , classOfT ) ; if ( value == null ) { throw new NullPointerException ( ) ; } return value ;
public class RestfulObjectsSpecEventSerializer { /** * region > supporting methods */ JsonRepresentation asEventRepr ( EventMetadata metadata , final JsonRepresentation payloadRepr ) { } }
final JsonRepresentation eventRepr = JsonRepresentation . newMap ( ) ; final JsonRepresentation metadataRepr = JsonRepresentation . newMap ( ) ; eventRepr . mapPut ( "metadata" , metadataRepr ) ; metadataRepr . mapPut ( "id" , metadata . getId ( ) ) ; metadataRepr . mapPut ( "transactionId" , metadata . getTransactionId ( ) ) ; metadataRepr . mapPut ( "sequence" , metadata . getSequence ( ) ) ; metadataRepr . mapPut ( "eventType" , metadata . getEventType ( ) ) ; metadataRepr . mapPut ( "user" , metadata . getUser ( ) ) ; metadataRepr . mapPut ( "timestamp" , metadata . getTimestamp ( ) ) ; eventRepr . mapPut ( "payload" , payloadRepr ) ; return eventRepr ;
public class InjectorUtils { /** * Builds a new Pubsub client and returns it . */ public static Pubsub getClient ( final HttpTransport httpTransport , final JsonFactory jsonFactory ) throws IOException { } }
checkNotNull ( httpTransport ) ; checkNotNull ( jsonFactory ) ; GoogleCredential credential = GoogleCredential . getApplicationDefault ( httpTransport , jsonFactory ) ; if ( credential . createScopedRequired ( ) ) { credential = credential . createScoped ( PubsubScopes . all ( ) ) ; } if ( credential . getClientAuthentication ( ) != null ) { System . out . println ( "\n***Warning! You are not using service account credentials to " + "authenticate.\nYou need to use service account credentials for this example," + "\nsince user-level credentials do not have enough pubsub quota,\nand so you will run " + "out of PubSub quota very quickly.\nSee " + "https://developers.google.com/identity/protocols/application-default-credentials." ) ; System . exit ( 1 ) ; } HttpRequestInitializer initializer = new RetryHttpInitializerWrapper ( credential ) ; return new Pubsub . Builder ( httpTransport , jsonFactory , initializer ) . setApplicationName ( APP_NAME ) . build ( ) ;
public class Mapper { /** * Returns the entries { @ link Gather # loop ( Decision ) } * @ param decision * @ return */ public Gather < Entry < K , V > > entryLoop ( Decision < Entry < K , V > > decision ) { } }
return entryGather ( ) . loop ( decision ) ;