signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class PhotonHTMLHelper { /** * Merge external CSS and JS contents to a single resource for improved * browser performance . All source nodes are taken from the head and all * target nodes are written to the head . * @ param aRequestScope * Current request scope . Never < code > null < / code > . * @ param aHead * The HTML head object . Never < code > null < / code > . * @ param bMergeCSS * < code > true < / code > to aggregate CSS entries . * @ param bMergeJS * < code > true < / code > to aggregate JS entries . * @ param aWSRBMgr * The resource bundle provider . May not be < code > null < / code > . */ public static void mergeExternalCSSAndJSNodes ( @ Nonnull final IRequestWebScopeWithoutResponse aRequestScope , @ Nonnull final HCHead aHead , final boolean bMergeCSS , final boolean bMergeJS , @ Nonnull final IWebSiteResourceBundleProvider aWSRBMgr ) { } }
if ( ! bMergeCSS && ! bMergeJS ) { // Nothing to do return ; } final boolean bRegular = HCSettings . isUseRegularResources ( ) ; if ( bMergeCSS ) { // Extract all CSS nodes for merging final ICommonsList < IHCNode > aCSSNodes = new CommonsArrayList < > ( ) ; aHead . getAllAndRemoveAllCSSNodes ( aCSSNodes ) ; final ICommonsList < WebSiteResourceWithCondition > aCSSs = new CommonsArrayList < > ( ) ; for ( final IHCNode aNode : aCSSNodes ) { boolean bStartMerge = true ; if ( HCCSSNodeDetector . isDirectCSSFileNode ( aNode ) ) { final ICSSPathProvider aPathProvider = ( ( HCLink ) aNode ) . getPathProvider ( ) ; if ( aPathProvider != null ) { aCSSs . add ( WebSiteResourceWithCondition . createForCSS ( aPathProvider , bRegular ) ) ; bStartMerge = false ; } } if ( bStartMerge ) { if ( ! aCSSs . isEmpty ( ) ) { for ( final WebSiteResourceBundleSerialized aBundle : aWSRBMgr . getResourceBundles ( aCSSs , bRegular ) ) aHead . addCSS ( aBundle . createNode ( aRequestScope ) ) ; aCSSs . clear ( ) ; } // Add the current ( non - mergable ) node again to head after merging aHead . addCSS ( aNode ) ; } } // Add the remaining nodes ( if any ) if ( ! aCSSs . isEmpty ( ) ) for ( final WebSiteResourceBundleSerialized aBundle : aWSRBMgr . getResourceBundles ( aCSSs , bRegular ) ) aHead . addCSS ( aBundle . createNode ( aRequestScope ) ) ; } if ( bMergeJS ) { // Extract all JS nodes for merging final ICommonsList < IHCNode > aJSNodes = new CommonsArrayList < > ( ) ; aHead . getAllAndRemoveAllJSNodes ( aJSNodes ) ; final ICommonsList < WebSiteResourceWithCondition > aJSs = new CommonsArrayList < > ( ) ; for ( final IHCNode aNode : aJSNodes ) { boolean bStartMerge = true ; if ( HCJSNodeDetector . isDirectJSFileNode ( aNode ) ) { final IJSPathProvider aPathProvider = ( ( HCScriptFile ) aNode ) . getPathProvider ( ) ; if ( aPathProvider != null ) { aJSs . add ( WebSiteResourceWithCondition . createForJS ( aPathProvider , bRegular ) ) ; bStartMerge = false ; } } if ( bStartMerge ) { if ( ! aJSs . isEmpty ( ) ) { for ( final WebSiteResourceBundleSerialized aBundle : aWSRBMgr . getResourceBundles ( aJSs , bRegular ) ) aHead . addJS ( aBundle . createNode ( aRequestScope ) ) ; aJSs . clear ( ) ; } // Add the current ( non - mergable ) node again to head after merging aHead . addJS ( aNode ) ; } } // Add the remaining nodes ( if any ) if ( ! aJSs . isEmpty ( ) ) for ( final WebSiteResourceBundleSerialized aBundle : aWSRBMgr . getResourceBundles ( aJSs , bRegular ) ) aHead . addJS ( aBundle . createNode ( aRequestScope ) ) ; }
public class JettyStarter { /** * Set the port to be used to run the application . Defaults to * { @ value # DEFAULT _ PORT } * @ param nPort * The port to be used . Must be & gt ; 0. * @ return this for chaining */ @ Nonnull public final JettyStarter setPort ( @ Nonnegative final int nPort ) { } }
ValueEnforcer . isGT0 ( nPort , "Port" ) ; m_nPort = nPort ; return this ;
public class AbstractCasWebflowConfigurer { /** * Gets expression string from action . * @ param act the act * @ return the expression string from action */ public Expression getExpressionStringFromAction ( final EvaluateAction act ) { } }
val field = ReflectionUtils . findField ( act . getClass ( ) , "expression" ) ; ReflectionUtils . makeAccessible ( field ) ; return ( Expression ) ReflectionUtils . getField ( field , act ) ;
public class ScriptContext { /** * Sets a { @ link DataSet } entry from a configuration property . * @ param configProperty * the configuration key * @ param dataSetKey * the { @ link DataSet } key * @ param entryKey * the key of the { @ link DataSet } entry */ @ Cmd public void setToFormEntry ( final String configProperty , final String dataSetKey , final String entryKey ) { } }
String resolvedConfigProperty = resolveProperty ( configProperty ) ; String resolvedDataSetKey = resolveProperty ( dataSetKey ) ; String resolvedEntryKey = resolveProperty ( entryKey ) ; DataSet dataSet = dataSourceProvider . get ( ) . getCurrentDataSet ( resolvedDataSetKey ) ; if ( dataSet == null ) { throw new IllegalStateException ( "DataSet " + resolvedDataSetKey + " was null." ) ; } String value = dataSet . getValue ( resolvedEntryKey ) ; log . info ( "Setting property '{}' to '{}'" , resolvedConfigProperty , value ) ; config . put ( resolvedConfigProperty , value ) ;
public class QueryQuestionCommentController { /** * Creates new query question comment * @ param queryReply query reply * @ param queryPage query page * @ param parentComment parent comment * @ param comment comment contents * @ param hidden whether comment should be hidden * @ param creator creator * @ param created create time * @ return created comment */ public QueryQuestionComment createQueryQuestionComment ( QueryReply queryReply , QueryPage queryPage , QueryQuestionComment parentComment , String comment , Boolean hidden , User creator , Date created ) { } }
return queryQuestionCommentDAO . create ( queryReply , queryPage , parentComment , comment , hidden , creator , created , creator , created ) ;
public class CmsClientAliasImportResult { /** * Extracts alias import results from a JSON array . < p > * @ param array the JSON array * @ return the alias import results from the array */ public static List < CmsClientAliasImportResult > parseArray ( JSONArray array ) { } }
List < CmsClientAliasImportResult > result = new ArrayList < CmsClientAliasImportResult > ( ) ; for ( int i = 0 ; i < array . size ( ) ; i ++ ) { JSONValue lineVal = array . get ( i ) ; JSONObject lineObj = ( JSONObject ) lineVal ; CmsClientAliasImportResult singleResult = parse ( lineObj ) ; result . add ( singleResult ) ; } return result ;
public class CodecCollector { /** * Creates the stats . * @ param statsSpanList * the stats span list * @ param positionsData * the positions data * @ param spansNumberData * the spans number data * @ param docSet * the doc set * @ throws IOException * Signals that an I / O exception has occurred . */ private static void createStats ( List < ComponentSpan > statsSpanList , Map < Integer , Integer > positionsData , Map < MtasSpanQuery , Map < Integer , Integer > > spansNumberData , Integer [ ] docSet ) throws IOException { } }
if ( statsSpanList != null ) { for ( ComponentSpan span : statsSpanList ) { if ( span . parser . needArgumentsNumber ( ) > span . queries . length ) { throw new IOException ( "function " + span . parser + " expects (at least) " + span . parser . needArgumentsNumber ( ) + " queries" ) ; } // collect Map < Integer , long [ ] > args = computeArguments ( spansNumberData , span . queries , docSet ) ; if ( span . dataType . equals ( CodecUtil . DATA_TYPE_LONG ) ) { // try to call functionParser as little as possible if ( span . statsType . equals ( CodecUtil . STATS_BASIC ) && ( span . minimumLong == null ) && ( span . maximumLong == null ) && ( span . functions == null || ( span . functionBasic ( ) && span . functionSumRule ( ) && ! span . functionNeedPositions ( ) ) ) ) { // initialise int length = span . parser . needArgumentsNumber ( ) ; long [ ] valueSum = new long [ length ] ; long valuePositions = 0 ; // collect if ( docSet . length > 0 ) { long [ ] tmpArgs ; for ( int docId : docSet ) { tmpArgs = args . get ( docId ) ; valuePositions += ( positionsData == null ) ? 0 : positionsData . get ( docId ) ; if ( tmpArgs != null ) { for ( int i = 0 ; i < length ; i ++ ) { valueSum [ i ] += tmpArgs [ i ] ; } } } long valueLong ; span . dataCollector . initNewList ( 1 ) ; try { valueLong = span . parser . getValueLong ( valueSum , valuePositions ) ; span . dataCollector . add ( valueLong , docSet . length ) ; } catch ( IOException e ) { log . debug ( e ) ; span . dataCollector . error ( e . getMessage ( ) ) ; } if ( span . functions != null ) { for ( SubComponentFunction function : span . functions ) { function . dataCollector . initNewList ( 1 ) ; if ( function . dataType . equals ( CodecUtil . DATA_TYPE_LONG ) ) { try { valueLong = function . parserFunction . getValueLong ( valueSum , valuePositions ) ; function . dataCollector . add ( valueLong , docSet . length ) ; } catch ( IOException e ) { log . debug ( e ) ; function . dataCollector . error ( e . getMessage ( ) ) ; } } else if ( function . dataType . equals ( CodecUtil . DATA_TYPE_DOUBLE ) ) { try { double valueDouble = function . parserFunction . getValueDouble ( valueSum , valuePositions ) ; function . dataCollector . add ( valueDouble , docSet . length ) ; } catch ( IOException e ) { log . debug ( e ) ; function . dataCollector . error ( e . getMessage ( ) ) ; } } else { throw new IOException ( "can't handle function dataType " + function . dataType ) ; } function . dataCollector . closeNewList ( ) ; } } span . dataCollector . closeNewList ( ) ; } } else { // collect if ( docSet . length > 0 ) { int number = 0 ; int positions ; long valueLong ; double valueDouble ; long [ ] values = new long [ docSet . length ] ; long [ ] [ ] functionValuesLong = null ; double [ ] [ ] functionValuesDouble = null ; span . dataCollector . initNewList ( 1 ) ; if ( span . functions != null ) { functionValuesLong = new long [ span . functions . size ( ) ] [ ] ; functionValuesDouble = new double [ span . functions . size ( ) ] [ ] ; for ( int i = 0 ; i < span . functions . size ( ) ; i ++ ) { SubComponentFunction function = span . functions . get ( i ) ; if ( function . dataType . equals ( CodecUtil . DATA_TYPE_LONG ) ) { functionValuesLong [ i ] = new long [ docSet . length ] ; functionValuesDouble [ i ] = null ; } else if ( function . dataType . equals ( CodecUtil . DATA_TYPE_DOUBLE ) ) { functionValuesLong [ i ] = null ; functionValuesDouble [ i ] = new double [ docSet . length ] ; } function . dataCollector . initNewList ( 1 ) ; } } for ( int docId : docSet ) { if ( positionsData == null ) { positions = 0 ; } else { positions = ( positionsData . get ( docId ) == null ? 0 : positionsData . get ( docId ) ) ; } valueLong = span . parser . getValueLong ( args . get ( docId ) , positions ) ; if ( ( ( span . minimumLong == null ) || ( valueLong >= span . minimumLong ) ) && ( ( span . maximumLong == null ) || ( valueLong <= span . maximumLong ) ) ) { values [ number ] = valueLong ; if ( span . functions != null ) { for ( int i = 0 ; i < span . functions . size ( ) ; i ++ ) { SubComponentFunction function = span . functions . get ( i ) ; try { if ( function . dataType . equals ( CodecUtil . DATA_TYPE_LONG ) ) { valueLong = function . parserFunction . getValueLong ( args . get ( docId ) , positions ) ; functionValuesLong [ i ] [ number ] = valueLong ; } else if ( function . dataType . equals ( CodecUtil . DATA_TYPE_DOUBLE ) ) { valueDouble = function . parserFunction . getValueDouble ( args . get ( docId ) , positions ) ; functionValuesDouble [ i ] [ number ] = valueDouble ; } } catch ( IOException e ) { log . debug ( e ) ; function . dataCollector . error ( e . getMessage ( ) ) ; } } } number ++ ; } } if ( number > 0 ) { span . dataCollector . add ( values , number ) ; if ( span . functions != null ) { for ( int i = 0 ; i < span . functions . size ( ) ; i ++ ) { SubComponentFunction function = span . functions . get ( i ) ; if ( function . dataType . equals ( CodecUtil . DATA_TYPE_LONG ) ) { function . dataCollector . add ( functionValuesLong [ i ] , number ) ; } else if ( function . dataType . equals ( CodecUtil . DATA_TYPE_DOUBLE ) ) { function . dataCollector . add ( functionValuesDouble [ i ] , number ) ; } } } } span . dataCollector . closeNewList ( ) ; if ( span . functions != null ) { for ( SubComponentFunction function : span . functions ) { function . dataCollector . closeNewList ( ) ; } } } } } else { throw new IOException ( "unexpected dataType " + span . dataType ) ; } } }
public class ProgressEvent { /** * Returns the unique event code identifying the type of event this object * represents . * @ return The unique event code that identifies what type of specific type * of event this object represents . * @ deprecated Use { @ link # getEventType ( ) } instead . */ @ Deprecated public int getEventCode ( ) { } }
Integer legacyCode = legacyEventCodes . get ( eventType ) ; // Returns - 1 if the event type does not have a legacy event code return legacyCode == null ? - 1 : legacyCode ;
public class Tokenizer { /** * Gets the remaining string tokens until an EOL / EOF is seen , concatenates * them together , and converts the base64 encoded data to a byte array . * @ param required If true , an exception will be thrown if no strings remain ; * otherwise null be be returned . * @ return The byte array containing the decoded strings , or null if there * were no strings to decode . * @ throws TextParseException The input was invalid . * @ throws IOException An I / O error occurred . */ public byte [ ] getBase64 ( boolean required ) throws IOException { } }
String s = remainingStrings ( ) ; if ( s == null ) { if ( required ) throw exception ( "expected base64 encoded string" ) ; else return null ; } byte [ ] array = base64 . fromString ( s ) ; if ( array == null ) throw exception ( "invalid base64 encoding" ) ; return array ;
public class QuerysImpl { /** * Execute an Analytics query . * Executes an Analytics query for data . [ Here ] ( https : / / dev . applicationinsights . io / documentation / Using - the - API / Query ) is an example for using POST with an Analytics query . * @ param appId ID of the application . This is Application ID from the API Access settings blade in the Azure portal . * @ param body The Analytics query . Learn more about the [ Analytics query syntax ] ( https : / / azure . microsoft . com / documentation / articles / app - insights - analytics - reference / ) * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws ErrorResponseException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @ return the QueryResults object if successful . */ public QueryResults execute ( String appId , QueryBody body ) { } }
return executeWithServiceResponseAsync ( appId , body ) . toBlocking ( ) . single ( ) . body ( ) ;
public class JCudaDriver { /** * Launches a CUDA function . * < div > * < div > * < table > * < tr > * < td > CUresult cuLaunchKernel < / td > * < td > ( < / td > * < td > CUfunction & nbsp ; < / td > * < td > < em > f < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > unsigned int & nbsp ; < / td > * < td > < em > gridDimX < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > unsigned int & nbsp ; < / td > * < td > < em > gridDimY < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > unsigned int & nbsp ; < / td > * < td > < em > gridDimZ < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > unsigned int & nbsp ; < / td > * < td > < em > blockDimX < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > unsigned int & nbsp ; < / td > * < td > < em > blockDimY < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > unsigned int & nbsp ; < / td > * < td > < em > blockDimZ < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > unsigned int & nbsp ; < / td > * < td > < em > sharedMemBytes < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > CUstream & nbsp ; < / td > * < td > < em > hStream < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > void * * & nbsp ; < / td > * < td > < em > kernelParams < / em > , < / td > * < / tr > * < tr > * < td > < / td > * < td > < / td > * < td > void * * & nbsp ; < / td > * < td > < em > extra < / em > < / td > * < td > & nbsp ; < / td > * < / tr > * < tr > * < td > < / td > * < td > ) < / td > * < td > < / td > * < td > < / td > * < td > < / td > * < / tr > * < / table > * < / div > * < div > * Invokes the kernel < code > f < / code > on a < code > gridDimX < / code > x * < code > gridDimY < / code > x < code > gridDimZ < / code > grid of blocks . Each * block contains < code > blockDimX < / code > x < code > blockDimY < / code > x * < code > blockDimZ < / code > threads . * < code > sharedMemBytes < / code > sets the amount of dynamic shared memory * that will be available to each thread block . * cuLaunchKernel ( ) can optionally be associated to a stream by passing a * non - zero < code > hStream < / code > argument . * Kernel parameters to < code > f < / code > can be specified in one of two * ways : * 1 ) Kernel parameters can be specified via < code > kernelParams < / code > . * If < code > f < / code > has N parameters , then < code > kernelParams < / code > * needs to be an array of N pointers . Each of < code > kernelParams < / code > [ 0] * through < code > kernelParams < / code > [ N - 1 ] must point to a region of memory * from which the actual kernel parameter will be copied . The number of * kernel parameters and their offsets and sizes do not need to be * specified as that information is retrieved directly from the kernel ' s * image . * 2 ) Kernel parameters can also be packaged by the application into a * single buffer that is passed in via the < code > extra < / code > parameter . * This places the burden on the application of knowing each kernel * parameter ' s size and alignment / padding within the buffer . Here is an * example of using the < code > extra < / code > parameter in this manner : * < div > * < pre > < span > size _ t < / span > argBufferSize ; * < span > char < / span > argBuffer [ 256 ] ; * < span > / / populate argBuffer and argBufferSize < / span > * < span > void < / span > * config [ ] = { * CU _ LAUNCH _ PARAM _ BUFFER _ POINTER , argBuffer , * CU _ LAUNCH _ PARAM _ BUFFER _ SIZE , & amp ; argBufferSize , * CU _ LAUNCH _ PARAM _ END * status = cuLaunchKernel ( f , gx , gy , gz , bx , by , bz , sh , s , NULL , * config ) ; * < / pre > * < / div > * The < code > extra < / code > parameter exists to allow cuLaunchKernel to take * additional less commonly used arguments . < code > extra < / code > specifies * a list of names of extra settings and their corresponding values . Each * extra setting name is immediately followed by the corresponding value . * The list must be terminated with either NULL or * CU _ LAUNCH _ PARAM _ END . * < ul > * < li > CU _ LAUNCH _ PARAM _ END , which indicates the end of the < code > extra < / code > * array ; * < / li > * < li > CU _ LAUNCH _ PARAM _ BUFFER _ POINTER , which specifies that * the next value in < code > extra < / code > will be a pointer to a buffer * containing all the kernel parameters for launching kernel * < code > f < / code > ; * < / li > * < li > CU _ LAUNCH _ PARAM _ BUFFER _ SIZE , which specifies * that the next value in < code > extra < / code > will be a pointer to a size _ t * containing the size of the buffer specified with * CU _ LAUNCH _ PARAM _ BUFFER _ POINTER ; * < / li > * < / ul > * The error CUDA _ ERROR _ INVALID _ VALUE will be returned if kernel parameters * are specified with both < code > kernelParams < / code > and < code > extra < / code > * ( i . e . both < code > kernelParams < / code > and < code > extra < / code > are * non - NULL ) . * Calling cuLaunchKernel ( ) sets persistent function state that is the * same as function state set through the following deprecated APIs : * cuFuncSetBlockShape ( ) cuFuncSetSharedSize ( ) cuParamSetSize ( ) * cuParamSeti ( ) cuParamSetf ( ) cuParamSetv ( ) * When the kernel < code > f < / code > is launched via cuLaunchKernel ( ) , the * previous block shape , shared size and parameter info associated with * < code > f < / code > is overwritten . * Note that to use cuLaunchKernel ( ) , the kernel < code > f < / code > must * either have been compiled with toolchain version 3.2 or later so that * it will contain kernel parameter information , or have no kernel * parameters . If either of these conditions is not met , then * cuLaunchKernel ( ) will return CUDA _ ERROR _ INVALID _ IMAGE . * < / div > * < / div > * @ return CUDA _ SUCCESS , CUDA _ ERROR _ DEINITIALIZED , CUDA _ ERROR _ NOT _ INITIALIZED , * CUDA _ ERROR _ INVALID _ CONTEXT , CUDA _ ERROR _ INVALID _ HANDLE , * CUDA _ ERROR _ INVALID _ IMAGE , CUDA _ ERROR _ INVALID _ VALUE , * CUDA _ ERROR _ LAUNCH _ FAILED , CUDA _ ERROR _ LAUNCH _ OUT _ OF _ RESOURCES , * CUDA _ ERROR _ LAUNCH _ TIMEOUT , CUDA _ ERROR _ LAUNCH _ INCOMPATIBLE _ TEXTURING , * CUDA _ ERROR _ SHARED _ OBJECT _ INIT _ FAILED * @ see JCudaDriver # cuCtxGetCacheConfig * @ see JCudaDriver # cuCtxSetCacheConfig * @ see JCudaDriver # cuFuncSetCacheConfig * @ see JCudaDriver # cuFuncGetAttribute */ public static int cuLaunchKernel ( CUfunction f , int gridDimX , int gridDimY , int gridDimZ , int blockDimX , int blockDimY , int blockDimZ , int sharedMemBytes , CUstream hStream , Pointer kernelParams , Pointer extra ) { } }
return checkResult ( cuLaunchKernelNative ( f , gridDimX , gridDimY , gridDimZ , blockDimX , blockDimY , blockDimZ , sharedMemBytes , hStream , kernelParams , extra ) ) ;
public class MapUtil { /** * 根据等号左边的类型 , 构造类型正确的HashMap . * 同时初始化元素 . */ public static < K , V > HashMap < K , V > newHashMap ( @ NotNull final K [ ] keys , @ NotNull final V [ ] values ) { } }
Validate . isTrue ( keys . length == values . length , "keys.length is %d but values.length is %d" , keys . length , values . length ) ; HashMap < K , V > map = new HashMap < K , V > ( keys . length * 2 ) ; for ( int i = 0 ; i < keys . length ; i ++ ) { map . put ( keys [ i ] , values [ i ] ) ; } return map ;
public class ProcessorDef { /** * Adds a < compilerarg > or < linkerarg > * @ param param * command line argument , must not be null * @ throws NullPointerException * if arg is null * @ throws BuildException * if this definition is a reference */ protected void addConfiguredProcessorParam ( final ProcessorParam param ) throws NullPointerException , BuildException { } }
if ( param == null ) { throw new NullPointerException ( "param" ) ; } if ( isReference ( ) ) { throw noChildrenAllowed ( ) ; } this . processorParams . addElement ( param ) ;
public class FieldUtils { /** * Verify that input values are within specified bounds . * @ param value the value to check * @ param lowerBound the lower bound allowed for value * @ param upperBound the upper bound allowed for value * @ throws IllegalFieldValueException if value is not in the specified bounds */ public static void verifyValueBounds ( String fieldName , int value , int lowerBound , int upperBound ) { } }
if ( ( value < lowerBound ) || ( value > upperBound ) ) { throw new IllegalFieldValueException ( fieldName , Integer . valueOf ( value ) , Integer . valueOf ( lowerBound ) , Integer . valueOf ( upperBound ) ) ; }
public class TrunkUpdater { /** * Add the requested post parameters to the Request . * @ param request Request to add post params to */ private void addPostParams ( final Request request ) { } }
if ( friendlyName != null ) { request . addPostParam ( "FriendlyName" , friendlyName ) ; } if ( domainName != null ) { request . addPostParam ( "DomainName" , domainName ) ; } if ( disasterRecoveryUrl != null ) { request . addPostParam ( "DisasterRecoveryUrl" , disasterRecoveryUrl . toString ( ) ) ; } if ( disasterRecoveryMethod != null ) { request . addPostParam ( "DisasterRecoveryMethod" , disasterRecoveryMethod . toString ( ) ) ; } if ( recording != null ) { request . addPostParam ( "Recording" , recording . toString ( ) ) ; } if ( secure != null ) { request . addPostParam ( "Secure" , secure . toString ( ) ) ; } if ( cnamLookupEnabled != null ) { request . addPostParam ( "CnamLookupEnabled" , cnamLookupEnabled . toString ( ) ) ; }
public class SsecAccess { /** * Execute a login attempt , given a user name and password . If a user has already logged in , a logout will be called * first . * @ param userId The unique user ID . * @ param password The user ' s password . * @ param tokenHandler actual handler whoch is used to change the token */ public static void login ( final String userId , final String password , final TokenChangedHandler tokenHandler ) { } }
if ( userId != null ) { logout ( ) ; } loginOnly ( userId , password , tokenHandler ) ;
public class Misc { /** * Convert byte array to a " hexa " - string . * @ param data Byte array to convert . * @ return " Hexa - string " representation . */ static String toHexString ( byte [ ] data ) { } }
char [ ] chArray = new char [ data . length * 2 ] ; int pos = 0 ; for ( byte b : data ) { chArray [ pos ++ ] = HEX_CHARS [ ( b >> 4 ) & 0x0f ] ; chArray [ pos ++ ] = HEX_CHARS [ b & 0x0f ] ; } return new String ( chArray ) ;
public class PdfContentByte { /** * Sets the fill color to a spot color . * @ param sp the spot color * @ param tint the tint for the spot color . 0 is no color and 1 * is 100 % color */ public void setColorFill ( PdfSpotColor sp , float tint ) { } }
checkWriter ( ) ; state . colorDetails = writer . addSimple ( sp ) ; PageResources prs = getPageResources ( ) ; PdfName name = state . colorDetails . getColorName ( ) ; name = prs . addColor ( name , state . colorDetails . getIndirectReference ( ) ) ; content . append ( name . getBytes ( ) ) . append ( " cs " ) . append ( tint ) . append ( " scn" ) . append_i ( separator ) ;
public class IndexFieldTypePollerPeriodical { /** * Removes the index field type data for the deleted index . * @ param event index deletion event */ @ Subscribe public void handleIndexDeletion ( final IndicesDeletedEvent event ) { } }
event . indices ( ) . forEach ( indexName -> { LOG . debug ( "Removing field type information for deleted index <{}>" , indexName ) ; dbService . delete ( indexName ) ; } ) ;
public class CmsDriverManager { /** * Tries to add sub - resources of moved folders to the publish list and throws an exception if the publish list still does * not contain some sub - resources of the moved folders . < p > * @ param cms the current CMS context * @ param dbc the current database context * @ param pubList the publish list * @ throws CmsException if something goes wrong */ protected void ensureSubResourcesOfMovedFoldersPublished ( CmsObject cms , CmsDbContext dbc , CmsPublishList pubList ) throws CmsException { } }
List < CmsResource > topMovedFolders = pubList . getTopMovedFolders ( cms ) ; Iterator < CmsResource > folderIt = topMovedFolders . iterator ( ) ; while ( folderIt . hasNext ( ) ) { CmsResource folder = folderIt . next ( ) ; addSubResources ( dbc , pubList , folder ) ; } List < CmsResource > missingSubResources = pubList . getMissingSubResources ( cms , topMovedFolders ) ; if ( missingSubResources . isEmpty ( ) ) { return ; } StringBuffer pathBuffer = new StringBuffer ( ) ; for ( CmsResource missing : missingSubResources ) { pathBuffer . append ( missing . getRootPath ( ) ) ; pathBuffer . append ( " " ) ; } throw new CmsVfsException ( Messages . get ( ) . container ( Messages . RPT_CHILDREN_OF_MOVED_FOLDER_NOT_PUBLISHED_1 , pathBuffer . toString ( ) ) ) ;
public class Application { /** * < p > < span class = " changed _ added _ 2_0 " > Register < / span > a new mapping * of behavior id to the name of the corresponding * { @ link Behavior } class . This allows subsequent calls * to < code > createBehavior ( ) < / code > to serve as a factory for * { @ link Behavior } instances . < / p > * @ param behaviorId The behavior id to be registered * @ param behaviorClass The fully qualified class name of the * corresponding { @ link Behavior } implementation * @ throws NullPointerException if < code > behaviorId < / code > * or < code > behaviorClass < / code > is < code > null < / code > * @ since 2.0 */ public void addBehavior ( String behaviorId , String behaviorClass ) { } }
if ( defaultApplication != null ) { defaultApplication . addBehavior ( behaviorId , behaviorClass ) ; }
public class NumberList { /** * This assumes both maps contain the same keys . If they don ' t then keys will be lost . * @ param currentMap * @ param previousMap * @ return */ public static NumberList delta ( Map < String , Object > currentMap , Map < String , Object > previousMap ) { } }
LinkedHashMap < String , Long > values = new LinkedHashMap < String , Long > ( currentMap . size ( ) ) ; if ( currentMap . size ( ) != previousMap . size ( ) ) { throw new IllegalArgumentException ( "Maps must have the same keys" ) ; } for ( Entry < String , Object > k : currentMap . entrySet ( ) ) { Object v = k . getValue ( ) ; Number current = getNumber ( v ) ; Object p = previousMap . get ( k . getKey ( ) ) ; Number previous = null ; if ( p == null ) { previous = 0 ; } else { previous = getNumber ( p ) ; } long d = ( current . longValue ( ) - previous . longValue ( ) ) ; values . put ( k . getKey ( ) , d ) ; } return new NumberList ( values ) ;
public class Traits { /** * Returns true if the specified class node is annotated with the { @ link Trait } interface . * @ param cNode a class node * @ return true if the specified class node is annotated with the { @ link Trait } interface . */ public static boolean isAnnotatedWithTrait ( final ClassNode cNode ) { } }
List < AnnotationNode > traitAnn = cNode . getAnnotations ( Traits . TRAIT_CLASSNODE ) ; return traitAnn != null && ! traitAnn . isEmpty ( ) ;
public class CachingLinkGenerator { /** * Based on DGM toMapString , but with StringBuilder instead of StringBuffer */ protected void appendMapKey ( StringBuilder buffer , Map < String , Object > params ) { } }
if ( params == null || params . isEmpty ( ) ) { buffer . append ( EMPTY_MAP_STRING ) ; buffer . append ( OPENING_BRACKET ) ; } else { buffer . append ( OPENING_BRACKET ) ; Map map = new LinkedHashMap < > ( params ) ; final String requestControllerName = getRequestStateLookupStrategy ( ) . getControllerName ( ) ; if ( map . get ( UrlMapping . ACTION ) != null && map . get ( UrlMapping . CONTROLLER ) == null && map . get ( RESOURCE_PREFIX ) == null ) { Object action = map . remove ( UrlMapping . ACTION ) ; map . put ( UrlMapping . CONTROLLER , requestControllerName ) ; map . put ( UrlMapping . ACTION , action ) ; } if ( map . get ( UrlMapping . NAMESPACE ) == null && map . get ( UrlMapping . CONTROLLER ) == requestControllerName ) { String namespace = getRequestStateLookupStrategy ( ) . getControllerNamespace ( ) ; if ( GrailsStringUtils . isNotEmpty ( namespace ) ) { map . put ( UrlMapping . NAMESPACE , namespace ) ; } } boolean first = true ; for ( Object o : map . entrySet ( ) ) { Map . Entry entry = ( Map . Entry ) o ; Object value = entry . getValue ( ) ; if ( value == null ) continue ; first = appendCommaIfNotFirst ( buffer , first ) ; Object key = entry . getKey ( ) ; if ( RESOURCE_PREFIX . equals ( key ) ) { value = getCacheKeyValueForResource ( value ) ; } appendKeyValue ( buffer , map , key , value ) ; } } buffer . append ( CLOSING_BRACKET ) ;
public class HdfsStatsKeyConverter { /** * Returns the byte encoded representation of a HdfsStatsKey * @ param hdfsStats the HdfsStatsKey to serialize * @ return the byte encoded representation of the HdfsStatsKey */ @ Override public byte [ ] toBytes ( HdfsStatsKey hdfsStatsKey ) { } }
if ( hdfsStatsKey == null || hdfsStatsKey . getQualifiedPathKey ( ) == null ) { return HdfsConstants . EMPTY_BYTES ; } else { if ( StringUtils . isBlank ( hdfsStatsKey . getQualifiedPathKey ( ) . getNamespace ( ) ) ) { // hadoop1 or non federated namespace return ByteUtil . join ( HdfsConstants . SEP_BYTES , Bytes . toBytes ( Long . toString ( hdfsStatsKey . getEncodedRunId ( ) ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getCluster ( ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getPath ( ) ) ) ; } else { // include federated namespace return ByteUtil . join ( HdfsConstants . SEP_BYTES , Bytes . toBytes ( Long . toString ( hdfsStatsKey . getEncodedRunId ( ) ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getCluster ( ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getPath ( ) ) , Bytes . toBytes ( hdfsStatsKey . getQualifiedPathKey ( ) . getNamespace ( ) ) ) ; } }
public class Utility { /** * Replace the { } resources in this string . * @ param reg * @ param map A map of key / values * @ param strResource * @ return */ public static String replaceResources ( String string , ResourceBundle reg , Map < String , Object > map , PropertyOwner propertyOwner , boolean systemProperties ) { } }
if ( string != null ) if ( string . indexOf ( '{' ) == - 1 ) return string ; return Utility . replaceResources ( new StringBuilder ( string ) , reg , map , propertyOwner , systemProperties ) . toString ( ) ;
public class ZealotKhala { /** * 生成带 " AND " 前缀小于查询的SQL片段 . * @ param field 数据库字段 * @ param value 值 * @ return ZealotKhala实例 */ public ZealotKhala andLessThan ( String field , Object value ) { } }
return this . doNormal ( ZealotConst . AND_PREFIX , field , value , ZealotConst . LT_SUFFIX , true ) ;
public class ProtoNetworkBuilder { /** * Processes an { @ link AnnotationGroup } for citations , evidence , and * user - defined annotations . * @ param protoNetwork { @ link ProtoNetwork } , the proto network * @ param annotationMap { @ link Map } , the annotations map to add to * @ param ag { @ link AnnotationGroup } , the annotation group to process */ protected void handleAnnotationGroup ( ProtoNetwork protoNetwork , Map < String , Annotation > annotationMap , AnnotationGroup ag ) { } }
if ( ag != null ) { // handle citation if ( ag . getCitation ( ) != null ) { handleCitationAnnotations ( protoNetwork , ag . getCitation ( ) , annotationMap ) ; } // handle evidence if ( ag . getEvidence ( ) != null ) { handleEvidenceAnnotations ( protoNetwork , ag . getEvidence ( ) , annotationMap ) ; } // handle user annotations ( which were already defined ) if ( hasItems ( ag . getAnnotations ( ) ) ) { for ( Annotation a : ag . getAnnotations ( ) ) { annotationMap . put ( a . getDefinition ( ) . getId ( ) , a ) ; } } }
public class TransientPortletEntityDao { /** * / * ( non - Javadoc ) * @ see org . apereo . portal . portlet . dao . IPortletEntityDao # createPortletEntity ( org . apereo . portal . portlet . om . IPortletDefinitionId , java . lang . String , int ) */ @ Override public IPortletEntity createPortletEntity ( IPortletDefinitionId portletDefinitionId , String layoutNodeId , int userId ) { } }
if ( layoutNodeId . startsWith ( TransientUserLayoutManagerWrapper . SUBSCRIBE_PREFIX ) ) { final String transientLayoutNodeId = layoutNodeId ; layoutNodeId = this . getPersistentLayoutNodeId ( portletDefinitionId ) ; final IPortletEntity portletEntity = this . delegatePortletEntityDao . createPortletEntity ( portletDefinitionId , layoutNodeId , userId ) ; return new TransientPortletEntity ( portletEntity , transientLayoutNodeId ) ; } return this . delegatePortletEntityDao . createPortletEntity ( portletDefinitionId , layoutNodeId , userId ) ;
public class FnJodaTimeUtils { /** * It converts a { @ link Calendar } into a { @ link DateTime } in the given { @ link DateTimeZone } * @ param dateTimeZone the the time zone ( { @ link DateTimeZone } ) to be used * @ return the { @ link DateTime } created from the input and arguments */ public static final < T extends Calendar > Function < T , DateTime > calendarToDateTime ( DateTimeZone dateTimeZone ) { } }
return FnDateTime . calendarToDateTime ( dateTimeZone ) ;
public class SendUsersMessageRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( SendUsersMessageRequest sendUsersMessageRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( sendUsersMessageRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( sendUsersMessageRequest . getContext ( ) , CONTEXT_BINDING ) ; protocolMarshaller . marshall ( sendUsersMessageRequest . getMessageConfiguration ( ) , MESSAGECONFIGURATION_BINDING ) ; protocolMarshaller . marshall ( sendUsersMessageRequest . getTraceId ( ) , TRACEID_BINDING ) ; protocolMarshaller . marshall ( sendUsersMessageRequest . getUsers ( ) , USERS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MasterProtocol { /** * Reinitialize loopAddresses with all hosts : all servers in randomize order without connected * host . * @ param listener current listener * @ param loopAddresses the list to reinitialize */ private static void resetHostList ( Listener listener , Deque < HostAddress > loopAddresses ) { } }
// if all servers have been connected without result // add back all servers List < HostAddress > servers = new ArrayList < > ( ) ; servers . addAll ( listener . getUrlParser ( ) . getHostAddresses ( ) ) ; Collections . shuffle ( servers ) ; loopAddresses . clear ( ) ; loopAddresses . addAll ( servers ) ;
public class PCapPacketImpl { /** * { @ inheritDoc } */ @ Override public long getArrivalTime ( ) { } }
final long multiplier = pcapGlobalHeader . timestampsInNs ( ) ? 1000000000 : 1000000 ; return this . pcapHeader . getTimeStampSeconds ( ) * multiplier + this . pcapHeader . getTimeStampMicroOrNanoSeconds ( ) ;
public class HadoopBlockLocation { /** * Looks for a domain suffix in a FQDN and strips it if present . * @ param originalHostname * the original hostname , possibly an FQDN * @ return the stripped hostname without the domain suffix */ private static String stripHostname ( final String originalHostname ) { } }
// Check if the hostname domains the domain separator character final int index = originalHostname . indexOf ( DOMAIN_SEPARATOR ) ; if ( index == - 1 ) { return originalHostname ; } // Make sure we are not stripping an IPv4 address final Matcher matcher = IPV4_PATTERN . matcher ( originalHostname ) ; if ( matcher . matches ( ) ) { return originalHostname ; } if ( index == 0 ) { throw new IllegalStateException ( "Hostname " + originalHostname + " starts with a " + DOMAIN_SEPARATOR ) ; } return originalHostname . substring ( 0 , index ) ;
public class SSLFactoryJsse { /** * Creates the SSL ServerSocket . */ public ServerSocketBar create ( InetAddress host , int port ) throws IOException , GeneralSecurityException { } }
SSLServerSocketFactory ssFactory = null ; if ( _keyStore != null ) { SSLContext sslContext = SSLContext . getInstance ( _sslContext ) ; KeyManagerFactory kmf = KeyManagerFactory . getInstance ( keyManagerFactory ( ) ) ; kmf . init ( _keyStore , keyStorePassword ( ) . toCharArray ( ) ) ; sslContext . init ( kmf . getKeyManagers ( ) , null , null ) ; /* if ( _ cipherSuites ! = null ) sslContext . createSSLEngine ( ) . setEnabledCipherSuites ( _ cipherSuites ) ; if ( _ protocols ! = null ) sslContext . createSSLEngine ( ) . setEnabledProtocols ( _ protocols ) ; */ SSLEngine engine = sslContext . createSSLEngine ( ) ; engine . setEnabledProtocols ( enabledProtocols ( engine . getSupportedProtocols ( ) ) ) ; ssFactory = sslContext . getServerSocketFactory ( ) ; } else { ssFactory = createAnonymousServerFactory ( host , port ) ; } ServerSocket serverSocket ; int listen = 100 ; if ( host == null ) serverSocket = ssFactory . createServerSocket ( port , listen ) ; else serverSocket = ssFactory . createServerSocket ( port , listen , host ) ; SSLServerSocket sslServerSocket = ( SSLServerSocket ) serverSocket ; if ( _cipherSuites != null ) { sslServerSocket . setEnabledCipherSuites ( _cipherSuites ) ; } if ( _cipherSuitesForbidden != null ) { String [ ] cipherSuites = sslServerSocket . getEnabledCipherSuites ( ) ; if ( cipherSuites == null ) cipherSuites = sslServerSocket . getSupportedCipherSuites ( ) ; ArrayList < String > cipherList = new ArrayList < String > ( ) ; for ( String cipher : cipherSuites ) { if ( ! isCipherForbidden ( cipher , _cipherSuitesForbidden ) ) { cipherList . add ( cipher ) ; } } cipherSuites = new String [ cipherList . size ( ) ] ; cipherList . toArray ( cipherSuites ) ; sslServerSocket . setEnabledCipherSuites ( cipherSuites ) ; } sslServerSocket . setEnabledProtocols ( enabledProtocols ( sslServerSocket . getSupportedProtocols ( ) ) ) ; if ( "required" . equals ( _verifyClient ) ) sslServerSocket . setNeedClientAuth ( true ) ; else if ( "optional" . equals ( _verifyClient ) ) sslServerSocket . setWantClientAuth ( true ) ; return new ServerSocketWrapper ( serverSocket ) ;
public class PrefixedPropertiesPersister { /** * Load from json . * @ param props * the props * @ param rd * the rd * @ throws IOException * Signals that an I / O exception has occurred . */ public void loadFromYAML ( final Properties props , final Reader rd ) throws IOException { } }
try { ( ( PrefixedProperties ) props ) . loadFromYAML ( rd ) ; } catch ( final NoSuchMethodError err ) { throw new IOException ( "Cannot load properties JSON file - not using PrefixedProperties: " + err . getMessage ( ) ) ; }
public class StratifiedSampling { /** * Samples nh ids from each strata based on their Frequency Table * @ param strataFrequencyTable * @ param nh * @ param withReplacement * @ return */ public static TransposeDataCollection weightedProbabilitySampling ( AssociativeArray2D strataFrequencyTable , AssociativeArray nh , boolean withReplacement ) { } }
TransposeDataCollection sampledIds = new TransposeDataCollection ( ) ; for ( Map . Entry < Object , AssociativeArray > entry : strataFrequencyTable . entrySet ( ) ) { Object strata = entry . getKey ( ) ; Number sampleN = ( ( Number ) nh . get ( strata ) ) ; if ( sampleN == null ) { continue ; } sampledIds . put ( strata , SimpleRandomSampling . weightedSampling ( entry . getValue ( ) , sampleN . intValue ( ) , withReplacement ) ) ; } return sampledIds ;
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - > * < ! - - end - user - doc - - > * @ generated */ public String convertIfcWindowStyleConstructionEnumToString ( EDataType eDataType , Object instanceValue ) { } }
return instanceValue == null ? null : instanceValue . toString ( ) ;
public class SDBaseOps { /** * Greater than or equal to operation : elementwise x > = y < br > * If x and y arrays have equal shape , the output shape is the same as these inputs . < br > * Note : supports broadcasting if x and y have different shapes and are broadcastable . < br > * Returns an array with values 1 where condition is satisfied , or value 0 otherwise . * @ param x Input 1 * @ param y Input 2 * @ return Output SDVariable with values 0 and 1 based on where the condition is satisfied */ public SDVariable gte ( SDVariable x , SDVariable y ) { } }
return gte ( null , x , y ) ;
public class SendMessageBatchRequest { /** * A list of < code > < a > SendMessageBatchRequestEntry < / a > < / code > items . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setEntries ( java . util . Collection ) } or { @ link # withEntries ( java . util . Collection ) } if you want to override * the existing values . * @ param entries * A list of < code > < a > SendMessageBatchRequestEntry < / a > < / code > items . * @ return Returns a reference to this object so that method calls can be chained together . */ public SendMessageBatchRequest withEntries ( SendMessageBatchRequestEntry ... entries ) { } }
if ( this . entries == null ) { setEntries ( new com . amazonaws . internal . SdkInternalList < SendMessageBatchRequestEntry > ( entries . length ) ) ; } for ( SendMessageBatchRequestEntry ele : entries ) { this . entries . add ( ele ) ; } return this ;
public class DocFileFactory { /** * Get the appropriate factory , based on the file manager given in the * configuration . * @ param configuration the configuration for this doclet * @ return the factory associated with this configuration */ public static synchronized DocFileFactory getFactory ( Configuration configuration ) { } }
DocFileFactory f = configuration . docFileFactory ; if ( f == null ) { JavaFileManager fm = configuration . getFileManager ( ) ; if ( fm instanceof StandardJavaFileManager ) { f = new StandardDocFileFactory ( configuration ) ; } else { throw new IllegalStateException ( ) ; } configuration . docFileFactory = f ; } return f ;
public class MappingUtil { /** * Get the specified item by its key / id * @ param columnFamily * column family of the item * @ param id * id / key of the item * @ param itemClass * item ' s class * @ return new instance with the item ' s columns propagated * @ throws Exception * errors */ public < T , K > T get ( ColumnFamily < K , String > columnFamily , K id , Class < T > itemClass ) throws Exception { } }
Mapping < T > mapping = getMapping ( itemClass ) ; ColumnList < String > result = keyspace . prepareQuery ( columnFamily ) . getKey ( id ) . execute ( ) . getResult ( ) ; return mapping . newInstance ( result ) ;
public class Transition { /** * This is a utility method used by subclasses to handle standard parts of * setting up and running an Animator : it sets the { @ link # getDuration ( ) * duration } and the { @ link # getStartDelay ( ) startDelay } , starts the * animation , and , when the animator ends , calls { @ link # end ( ) } . * @ param animator The Animator to be run during this transition . * @ hide */ protected void animate ( @ Nullable Animator animator ) { } }
// TODO : maybe pass auto - end as a boolean parameter ? if ( animator == null ) { end ( ) ; } else { if ( getDuration ( ) >= 0 ) { animator . setDuration ( getDuration ( ) ) ; } if ( getStartDelay ( ) >= 0 ) { animator . setStartDelay ( getStartDelay ( ) + animator . getStartDelay ( ) ) ; } if ( getInterpolator ( ) != null ) { animator . setInterpolator ( getInterpolator ( ) ) ; } animator . addListener ( new AnimatorListenerAdapter ( ) { @ Override public void onAnimationEnd ( @ NonNull Animator animation ) { end ( ) ; animation . removeListener ( this ) ; } } ) ; animator . start ( ) ; }
public class AbstractDocumentQuery { /** * Specifies a boost weight to the last where clause . * The higher the boost factor , the more relevant the term will be . * boosting factor where 1.0 is default , less than 1.0 is lower weight , greater than 1.0 is higher weight * http : / / lucene . apache . org / java / 2_4_0 / queryparsersyntax . html # Boosting % 20a % 20Term * @ param boost Boost value */ @ Override public void _boost ( double boost ) { } }
if ( boost == 1.0 ) { return ; } List < QueryToken > tokens = getCurrentWhereTokens ( ) ; if ( tokens . isEmpty ( ) ) { throw new IllegalStateException ( "Missing where clause" ) ; } QueryToken whereToken = tokens . get ( tokens . size ( ) - 1 ) ; if ( ! ( whereToken instanceof WhereToken ) ) { throw new IllegalStateException ( "Missing where clause" ) ; } if ( boost < 0.0 ) { throw new IllegalArgumentException ( "Boost factor must be a non-negative number" ) ; } ( ( WhereToken ) whereToken ) . getOptions ( ) . setBoost ( boost ) ;
public class JSON { /** * Method for reading content as a JSON Tree ( of type that configured * { @ link TreeCodec } , see { @ link # with ( TreeCodec ) } ) supports . */ @ SuppressWarnings ( "unchecked" ) public < T extends TreeNode > TreeNode treeFrom ( Object source ) throws IOException , JSONObjectException { } }
if ( _treeCodec == null ) { _noTreeCodec ( "read TreeNode" ) ; } if ( source instanceof JsonParser ) { JsonParser p = _initForReading ( ( JsonParser ) source ) ; T result = ( T ) _treeCodec . readTree ( p ) ; p . clearCurrentToken ( ) ; return result ; } JsonParser p = _parser ( source ) ; try { _initForReading ( _config ( p ) ) ; T result = ( T ) _treeCodec . readTree ( p ) ; JsonParser p0 = p ; p = null ; _close ( p0 , null ) ; return result ; } catch ( Exception e ) { _close ( p , e ) ; return null ; }
public class CryptUtils { /** * 读取数字签名文件 根据公匙 , 签名 , 信息验证信息的合法性 * @ return true 验证成功 false 验证失败 */ public static boolean validateSign ( String signfile ) { } }
// 读取公匙 PublicKey mypubkey = ( PublicKey ) getObjFromFile ( signfile , 2 ) ; // 读取签名 byte [ ] signed = ( byte [ ] ) getObjFromFile ( signfile , 1 ) ; // 读取信息 String info = ( String ) getObjFromFile ( signfile , 3 ) ; try { // 初始一个Signature对象 , 并用公钥和签名进行验证 Signature signetcheck = Signature . getInstance ( "DSA" ) ; // 初始化验证签名的公钥 signetcheck . initVerify ( mypubkey ) ; // 使用指定的 byte 数组更新要签名或验证的数据 signetcheck . update ( info . getBytes ( ) ) ; System . out . println ( info ) ; // 验证传入的签名 return signetcheck . verify ( signed ) ; } catch ( Exception e ) { e . printStackTrace ( ) ; return false ; }
public class FileUtils { /** * save bytes into file * @ param bytes * @ param filePath * @ param fileName * @ throws IOException */ public static void save ( byte [ ] bytes , String filePath , String fileName ) throws IOException { } }
Path path = Paths . get ( filePath , fileName ) ; mkirDirs ( path . getParent ( ) ) ; String pathStr = path . toString ( ) ; File file = new File ( pathStr ) ; write ( bytes , file ) ;
public class BmrClient { /** * List all the steps of the target BMR cluster . * @ param clusterId The ID of the target BMR cluster . * @ param marker The start record of steps . * @ param maxKeys The maximum number of steps returned . * @ return The response containing a list of the BMR steps owned by the cluster . * The steps ' records start from the marker and the size of list is limited below maxKeys . */ public ListStepsResponse listSteps ( String clusterId , String marker , int maxKeys ) { } }
return listSteps ( new ListStepsRequest ( ) . withClusterId ( clusterId ) . withMaxKeys ( maxKeys ) . withMarker ( marker ) ) ;
public class ServletSupport { /** * Obtains binary resource from classpath and writes it to http response . * @ param response * @ param path file name of the resource to write to http servlet response * @ param contentType */ public static void writeClassPathResource ( HttpServletResponse response , String path , String contentType ) throws IOException { } }
if ( path . startsWith ( "/" ) ) { path = path . substring ( 1 ) ; } // System . out . println ( new LogEntry ( " trying to retrieve " + path + " from classloader " ) ; InputStream input = ServletSupport . class . getClassLoader ( ) . getResourceAsStream ( path ) ; if ( input != null ) { response . setContentType ( contentType ) ; OutputStream out = response . getOutputStream ( ) ; int available = input . available ( ) ; while ( available > 0 ) { byte [ ] buf = new byte [ available ] ; input . read ( buf ) ; out . write ( buf ) ; available = input . available ( ) ; } } else { System . out . println ( new LogEntry ( "resource " + path + " not found" ) ) ; }
public class ObjectFactory { /** * Create an instance of { @ link JAXBElement } { @ code < } { @ link LineStringPropertyType } { @ code > } * @ param value * Java instance representing xml element ' s value . * @ return * the new instance of { @ link JAXBElement } { @ code < } { @ link LineStringPropertyType } { @ code > } */ @ XmlElementDecl ( namespace = "http://www.opengis.net/gml" , name = "lineStringMember" ) public JAXBElement < LineStringPropertyType > createLineStringMember ( LineStringPropertyType value ) { } }
return new JAXBElement < LineStringPropertyType > ( _LineStringMember_QNAME , LineStringPropertyType . class , null , value ) ;
public class UpdatableResultSet { /** * { inheritDoc } . */ public void updateCharacterStream ( int columnIndex , Reader value ) throws SQLException { } }
updateCharacterStream ( columnIndex , value , Long . MAX_VALUE ) ;
public class DescribePlacementGroupsRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional * parameters to enable operation dry - run . */ @ Override public Request < DescribePlacementGroupsRequest > getDryRunRequest ( ) { } }
Request < DescribePlacementGroupsRequest > request = new DescribePlacementGroupsRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ;
public class NettyClientHandler { /** * Handler for commands sent from the stream . */ @ Override public void write ( ChannelHandlerContext ctx , Object msg , ChannelPromise promise ) throws Exception { } }
if ( msg instanceof CreateStreamCommand ) { createStream ( ( CreateStreamCommand ) msg , promise ) ; } else if ( msg instanceof SendGrpcFrameCommand ) { sendGrpcFrame ( ctx , ( SendGrpcFrameCommand ) msg , promise ) ; } else if ( msg instanceof CancelClientStreamCommand ) { cancelStream ( ctx , ( CancelClientStreamCommand ) msg , promise ) ; } else if ( msg instanceof SendPingCommand ) { sendPingFrame ( ctx , ( SendPingCommand ) msg , promise ) ; } else if ( msg instanceof GracefulCloseCommand ) { gracefulClose ( ctx , ( GracefulCloseCommand ) msg , promise ) ; } else if ( msg instanceof ForcefulCloseCommand ) { forcefulClose ( ctx , ( ForcefulCloseCommand ) msg , promise ) ; } else if ( msg == NOOP_MESSAGE ) { ctx . write ( Unpooled . EMPTY_BUFFER , promise ) ; } else { throw new AssertionError ( "Write called for unexpected type: " + msg . getClass ( ) . getName ( ) ) ; }
public class WeeklyCalendar { /** * Determine the next time ( in milliseconds ) that is ' included ' by the Calendar * after the given time . Return the original value if timeStamp is included . * Return 0 if all days are excluded . * Note that this Calendar is only has full - day precision . */ @ Override public long getNextIncludedTime ( final long nTimeStamp ) { } }
if ( m_bExcludeAll == true ) { return 0 ; } // Call base calendar implementation first long timeStamp = nTimeStamp ; final long baseTime = super . getNextIncludedTime ( timeStamp ) ; if ( ( baseTime > 0 ) && ( baseTime > timeStamp ) ) { timeStamp = baseTime ; } // Get timestamp for 00:00:00 final Calendar cl = getStartOfDayJavaCalendar ( timeStamp ) ; int wday = cl . get ( Calendar . DAY_OF_WEEK ) ; if ( ! isDayExcluded ( wday ) ) { return timeStamp ; // return the original value } while ( isDayExcluded ( wday ) == true ) { cl . add ( Calendar . DATE , 1 ) ; wday = cl . get ( Calendar . DAY_OF_WEEK ) ; } return cl . getTime ( ) . getTime ( ) ;
public class CmsLoginManager { /** * Resets lock from user . < p > * @ param username to reset lock for */ public void resetUserTempDisable ( String username ) { } }
Set < CmsUserData > data = TEMP_DISABLED_USER . get ( username ) ; if ( data == null ) { return ; } for ( CmsUserData userData : data ) { userData . reset ( ) ; } TEMP_DISABLED_USER . remove ( username ) ;
public class SparkJobContext { /** * Appliesc compopnent indices via the component metadata to enable proper functioning of the * { @ link # getComponentByKey ( String ) } and { @ link # getComponentKey ( ComponentJob ) } methods . * @ param analysisJobBuilder * @ param currentComponentIndex */ private void applyComponentIndexForKeyLookups ( final AnalysisJobBuilder analysisJobBuilder , final AtomicInteger currentComponentIndex ) { } }
final Collection < ComponentBuilder > componentBuilders = analysisJobBuilder . getComponentBuilders ( ) ; for ( final ComponentBuilder componentBuilder : componentBuilders ) { componentBuilder . setMetadataProperty ( METADATA_PROPERTY_COMPONENT_INDEX , Integer . toString ( currentComponentIndex . getAndIncrement ( ) ) ) ; } final List < AnalysisJobBuilder > childJobBuilders = analysisJobBuilder . getConsumedOutputDataStreamsJobBuilders ( ) ; for ( final AnalysisJobBuilder childJobBuilder : childJobBuilders ) { applyComponentIndexForKeyLookups ( childJobBuilder , currentComponentIndex ) ; }
public class RecoveredXid { /** * - - - Routines for converting xid to string and back . */ public static String xidToString ( Xid xid ) { } }
return xid . getFormatId ( ) + "_" + Base64 . encodeBytes ( xid . getGlobalTransactionId ( ) , Base64 . DONT_BREAK_LINES ) + "_" + Base64 . encodeBytes ( xid . getBranchQualifier ( ) , Base64 . DONT_BREAK_LINES ) ;
public class Position { /** * Calculates the two - dimensional great circle distance ( haversine ) * @ param other position to which we calculate the distance * @ return distance between the this and other position in meters */ public Double haversine ( Position other ) { } }
double lon0r = toRadians ( this . longitude ) ; double lat0r = toRadians ( this . latitude ) ; double lon1r = toRadians ( other . longitude ) ; double lat1r = toRadians ( other . latitude ) ; double a = pow ( sin ( ( lat1r - lat0r ) / 2.0 ) , 2 ) ; double b = cos ( lat0r ) * cos ( lat1r ) * pow ( sin ( ( lon1r - lon0r ) / 2.0 ) , 2 ) ; return 6371000.0 * 2 * asin ( sqrt ( a + b ) ) ;
public class Configuration { /** * Gets directory where the attachments are stored . * @ return directory for attachment */ public File getEmbeddingDirectory ( ) { } }
return new File ( getReportDirectory ( ) . getAbsolutePath ( ) , ReportBuilder . BASE_DIRECTORY + File . separatorChar + Configuration . EMBEDDINGS_DIRECTORY ) ;
public class TldRegionTracker { /** * Declares internal data structures based on the input image pyramid */ protected void declareDataStructures ( PyramidDiscrete < I > image ) { } }
numPyramidLayers = image . getNumLayers ( ) ; previousDerivX = ( D [ ] ) Array . newInstance ( derivType , image . getNumLayers ( ) ) ; previousDerivY = ( D [ ] ) Array . newInstance ( derivType , image . getNumLayers ( ) ) ; currentDerivX = ( D [ ] ) Array . newInstance ( derivType , image . getNumLayers ( ) ) ; currentDerivY = ( D [ ] ) Array . newInstance ( derivType , image . getNumLayers ( ) ) ; for ( int i = 0 ; i < image . getNumLayers ( ) ; i ++ ) { int w = image . getWidth ( i ) ; int h = image . getHeight ( i ) ; previousDerivX [ i ] = GeneralizedImageOps . createSingleBand ( derivType , w , h ) ; previousDerivY [ i ] = GeneralizedImageOps . createSingleBand ( derivType , w , h ) ; currentDerivX [ i ] = GeneralizedImageOps . createSingleBand ( derivType , w , h ) ; currentDerivY [ i ] = GeneralizedImageOps . createSingleBand ( derivType , w , h ) ; } Class imageClass = image . getImageType ( ) . getImageClass ( ) ; previousImage = FactoryPyramid . discreteGaussian ( image . getScales ( ) , - 1 , 1 , false , ImageType . single ( imageClass ) ) ; previousImage . initialize ( image . getInputWidth ( ) , image . getInputHeight ( ) ) ; for ( int i = 0 ; i < tracks . length ; i ++ ) { Track t = new Track ( ) ; t . klt = new PyramidKltFeature ( numPyramidLayers , featureRadius ) ; tracks [ i ] = t ; }
public class HttpURI { public void parse ( String uri , int offset , int length ) { } }
clear ( ) ; int end = offset + length ; _uri = uri . substring ( offset , end ) ; parse ( State . START , uri , offset , end ) ;
public class TypeEnter { /** * If a list of annotations contains a reference to java . lang . Deprecated , * set the DEPRECATED flag . * If the annotation is marked forRemoval = true , also set DEPRECATED _ REMOVAL . */ private void handleDeprecatedAnnotations ( List < JCAnnotation > annotations , Symbol sym ) { } }
for ( List < JCAnnotation > al = annotations ; ! al . isEmpty ( ) ; al = al . tail ) { JCAnnotation a = al . head ; if ( a . annotationType . type == syms . deprecatedType ) { sym . flags_field |= ( Flags . DEPRECATED | Flags . DEPRECATED_ANNOTATION ) ; a . args . stream ( ) . filter ( e -> e . hasTag ( ASSIGN ) ) . map ( e -> ( JCAssign ) e ) . filter ( assign -> TreeInfo . name ( assign . lhs ) == names . forRemoval ) . findFirst ( ) . ifPresent ( assign -> { JCExpression rhs = TreeInfo . skipParens ( assign . rhs ) ; if ( rhs . hasTag ( LITERAL ) && Boolean . TRUE . equals ( ( ( JCLiteral ) rhs ) . getValue ( ) ) ) { sym . flags_field |= DEPRECATED_REMOVAL ; } } ) ; } }
public class ContentRest { /** * see ContentResource . copyContent ( ) * @ return 201 response indicating content copied successfully */ private Response copyContent ( String spaceID , String contentID , String storeID , String sourceStoreID , String copySource ) { } }
StringBuilder msg = new StringBuilder ( "copying content from (" ) ; msg . append ( copySource ) ; msg . append ( ") to (" ) ; msg . append ( spaceID ) ; msg . append ( " / " ) ; msg . append ( contentID ) ; msg . append ( ", " ) ; msg . append ( storeID ) ; msg . append ( ")" ) ; log . info ( msg . toString ( ) ) ; try { return doCopyContent ( spaceID , contentID , storeID , sourceStoreID , copySource ) ; } catch ( InvalidIdException e ) { return responseBad ( msg . toString ( ) , e , BAD_REQUEST ) ; } catch ( InvalidRequestException e ) { return responseBad ( msg . toString ( ) , e , BAD_REQUEST ) ; } catch ( ResourceNotFoundException e ) { return responseNotFound ( msg . toString ( ) , e , NOT_FOUND ) ; } catch ( ResourceStateException e ) { return responseBad ( msg . toString ( ) , e , CONFLICT ) ; } catch ( ResourceException e ) { return responseBad ( msg . toString ( ) , e , INTERNAL_SERVER_ERROR ) ; } catch ( Exception e ) { return responseBad ( msg . toString ( ) , e , INTERNAL_SERVER_ERROR ) ; }
public class PreferenceActivity { /** * Sets the elevation of the button bar , which is shown when using the activity as a wizard . * @ param elevation * The elevation , which should be set , in dp as an { @ link Integer } value . The elevation * must be at least 0 and at maximum 16 */ public final void setButtonBarElevation ( final int elevation ) { } }
Condition . INSTANCE . ensureAtLeast ( elevation , 0 , "The elevation must be at least 0" ) ; Condition . INSTANCE . ensureAtMaximum ( elevation , ElevationUtil . MAX_ELEVATION , "The elevation must be at maximum " + ElevationUtil . MAX_ELEVATION ) ; this . buttonBarElevation = elevation ; adaptButtonBarElevation ( ) ;
public class FormattedWriter { /** * Write out a one - line XML tag with a Object datatype , for instance & lttag & gtobject & lt / tag & lt * @ param tag The name of the tag to be written * @ param value The data value to be written * @ throws IOException If an I / O error occurs while attempting to write the characters */ public final void taggedValue ( String tag , Object value ) throws IOException { } }
startTag ( tag ) ; if ( value == null ) { write ( "null" ) ; } else { write ( value . toString ( ) ) ; } endTag ( tag ) ;
public class MemorySession { /** * To get the application Name and Session ID for logging purposes . This will remain the same throughout * this session ' s lifetime , so we just need to get this string once per session . * For a backedSession , this needs to be called during initSession as well . */ public String getAppNameAndID ( ) { } }
StringBuffer sb = new StringBuffer ( " AppName=" ) ; sb . append ( _appName ) ; sb . append ( "; Id=" ) . append ( _sessionId ) ; return sb . toString ( ) ;
public class TmsConfigurationService { /** * Transform a TMS layer description object into a raster layer info object . * @ param tileMap * The TMS layer description object . * @ return The raster layer info object as used by Geomajas . */ public RasterLayerInfo asLayerInfo ( TileMap tileMap ) { } }
RasterLayerInfo layerInfo = new RasterLayerInfo ( ) ; layerInfo . setCrs ( tileMap . getSrs ( ) ) ; layerInfo . setDataSourceName ( tileMap . getTitle ( ) ) ; layerInfo . setLayerType ( LayerType . RASTER ) ; layerInfo . setMaxExtent ( asBbox ( tileMap . getBoundingBox ( ) ) ) ; layerInfo . setTileHeight ( tileMap . getTileFormat ( ) . getHeight ( ) ) ; layerInfo . setTileWidth ( tileMap . getTileFormat ( ) . getWidth ( ) ) ; List < ScaleInfo > zoomLevels = new ArrayList < ScaleInfo > ( tileMap . getTileSets ( ) . getTileSets ( ) . size ( ) ) ; for ( TileSet tileSet : tileMap . getTileSets ( ) . getTileSets ( ) ) { zoomLevels . add ( asScaleInfo ( tileSet ) ) ; } layerInfo . setZoomLevels ( zoomLevels ) ; return layerInfo ;
public class BaseEncoding { /** * Decodes the specified character sequence , and returns the resulting { @ code byte [ ] } . * This is the inverse operation to { @ link # encode ( byte [ ] ) } . * @ throws DecodingException if the input is not a valid encoded string according to this * encoding . */ final byte [ ] decodeChecked ( CharSequence chars ) throws DecodingException { } }
chars = padding ( ) . trimTrailingFrom ( chars ) ; ByteInput decodedInput = decodingStream ( asCharInput ( chars ) ) ; byte [ ] tmp = new byte [ maxDecodedSize ( chars . length ( ) ) ] ; int index = 0 ; try { for ( int i = decodedInput . read ( ) ; i != - 1 ; i = decodedInput . read ( ) ) { tmp [ index ++ ] = ( byte ) i ; } } catch ( DecodingException badInput ) { throw badInput ; } catch ( IOException impossible ) { throw new AssertionError ( impossible ) ; } return extract ( tmp , index ) ;
public class WebSocketService { /** * Connect to a WebSocket server . * @ throws ConnectException thrown if failed to connect to the server via WebSocket protocol */ public void connect ( ) throws ConnectException { } }
try { connectToWebSocket ( ) ; setWebSocketListener ( ) ; } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; log . warn ( "Interrupted while connecting via WebSocket protocol" ) ; }
public class CmsJspNavBuilder { /** * This method builds a complete navigation tree with entries of all branches * from the specified folder . < p > * @ param folder folder the root folder of the navigation tree * @ param endLevel the end level of the navigation * @ return list of navigation elements , in depth first order */ public List < CmsJspNavElement > getSiteNavigation ( String folder , int endLevel ) { } }
return getSiteNavigation ( folder , Visibility . navigation , endLevel ) ;
public class KerasDense { /** * Set weights for layer . * @ param weights Dense layer weights */ @ Override public void setWeights ( Map < String , INDArray > weights ) throws InvalidKerasConfigurationException { } }
this . weights = new HashMap < > ( ) ; if ( weights . containsKey ( conf . getKERAS_PARAM_NAME_W ( ) ) ) this . weights . put ( DefaultParamInitializer . WEIGHT_KEY , weights . get ( conf . getKERAS_PARAM_NAME_W ( ) ) ) ; else throw new InvalidKerasConfigurationException ( "Parameter " + conf . getKERAS_PARAM_NAME_W ( ) + " does not exist in weights" ) ; if ( hasBias ) { if ( weights . containsKey ( conf . getKERAS_PARAM_NAME_B ( ) ) ) this . weights . put ( DefaultParamInitializer . BIAS_KEY , weights . get ( conf . getKERAS_PARAM_NAME_B ( ) ) ) ; else throw new InvalidKerasConfigurationException ( "Parameter " + conf . getKERAS_PARAM_NAME_B ( ) + " does not exist in weights" ) ; } removeDefaultWeights ( weights , conf ) ;
public class ServerBuilder { /** * Configures SSL or TLS of the default { @ link VirtualHost } from the specified { @ code keyCertChainFile } , * { @ code keyFile } and { @ code keyPassword } . * @ throws IllegalStateException if the default { @ link VirtualHost } has been set via * { @ link # defaultVirtualHost ( VirtualHost ) } already */ public ServerBuilder tls ( File keyCertChainFile , File keyFile , @ Nullable String keyPassword ) throws SSLException { } }
defaultVirtualHostBuilderUpdated ( ) ; defaultVirtualHostBuilder . tls ( keyCertChainFile , keyFile , keyPassword ) ; return this ;
public class Tools { /** * 处理浮点数 , 保留小数点后两位 , 不足的补0 * @ param count 浮点数 * @ return 处理结果 , 例如 : 1.00、1.10 */ public static String dealDouble ( double count ) { } }
if ( count < 0.005 && count > - 0.005 ) { return "0.00" ; } String result = NUMFORMAT . format ( count ) ; return result . startsWith ( "." ) ? "0" + result : result ;
public class LinearSearch { /** * Search for the minimum element in the array . * @ param byteArray array that we are searching in . * @ return the minimum element in the array . */ public static byte searchMin ( byte [ ] byteArray ) { } }
if ( byteArray . length == 0 ) { throw new IllegalArgumentException ( "The array you provided does not have any elements" ) ; } byte min = byteArray [ 0 ] ; for ( int i = 1 ; i < byteArray . length ; i ++ ) { if ( byteArray [ i ] < min ) { min = byteArray [ i ] ; } } return min ;
public class TimedMetadataInsertionMarshaller { /** * Marshall the given parameter object . */ public void marshall ( TimedMetadataInsertion timedMetadataInsertion , ProtocolMarshaller protocolMarshaller ) { } }
if ( timedMetadataInsertion == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( timedMetadataInsertion . getId3Insertions ( ) , ID3INSERTIONS_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FlowTypeCheck { /** * From an arbitrary type , extract the array type it represents which is either * readable or writeable depending on the context . */ public SemanticType . Array extractArrayType ( SemanticType type , Environment environment , ReadWriteTypeExtractor . Combinator < SemanticType . Array > combinator , SyntacticItem item ) { } }
if ( type != null ) { SemanticType . Array sourceArrayT = rwTypeExtractor . apply ( type , environment , combinator ) ; if ( sourceArrayT == null ) { syntaxError ( item , EXPECTED_ARRAY ) ; } else { return sourceArrayT ; } } return null ;
public class CommerceNotificationTemplateUserSegmentRelLocalServiceBaseImpl { /** * Returns the number of rows matching the dynamic query . * @ param dynamicQuery the dynamic query * @ param projection the projection to apply to the query * @ return the number of rows matching the dynamic query */ @ Override public long dynamicQueryCount ( DynamicQuery dynamicQuery , Projection projection ) { } }
return commerceNotificationTemplateUserSegmentRelPersistence . countWithDynamicQuery ( dynamicQuery , projection ) ;
public class SSTableRewriter { /** * attempts to append the row , if fails resets the writer position */ public RowIndexEntry tryAppend ( AbstractCompactedRow row ) { } }
writer . mark ( ) ; try { return append ( row ) ; } catch ( Throwable t ) { writer . resetAndTruncate ( ) ; throw t ; }
public class LoggingLogProvider { /** * documentation inherited from interface */ public void log ( int level , String moduleName , String message ) { } }
getLogger ( moduleName ) . log ( getLevel ( level ) , message ) ;
public class ClickEvent { /** * Creates a click event that changes to a page . * @ param page the page to change to * @ return a click event */ public static @ NonNull ClickEvent changePage ( final @ NonNull String page ) { } }
return new ClickEvent ( Action . CHANGE_PAGE , page ) ;
public class snmpalarm { /** * Use this API to update snmpalarm resources . */ public static base_responses update ( nitro_service client , snmpalarm resources [ ] ) throws Exception { } }
base_responses result = null ; if ( resources != null && resources . length > 0 ) { snmpalarm updateresources [ ] = new snmpalarm [ resources . length ] ; for ( int i = 0 ; i < resources . length ; i ++ ) { updateresources [ i ] = new snmpalarm ( ) ; updateresources [ i ] . trapname = resources [ i ] . trapname ; updateresources [ i ] . thresholdvalue = resources [ i ] . thresholdvalue ; updateresources [ i ] . normalvalue = resources [ i ] . normalvalue ; updateresources [ i ] . time = resources [ i ] . time ; updateresources [ i ] . state = resources [ i ] . state ; updateresources [ i ] . severity = resources [ i ] . severity ; updateresources [ i ] . logging = resources [ i ] . logging ; } result = update_bulk_request ( client , updateresources ) ; } return result ;
public class AzureClient { /** * Polls from the location header and updates the polling state with the * polling response for a PUT operation . * @ param pollingState the polling state for the current operation . * @ param < T > the return type of the caller . */ private < T > Observable < PollingState < T > > updateStateFromLocationHeaderOnPutAsync ( final PollingState < T > pollingState ) { } }
return pollAsync ( pollingState . locationHeaderLink ( ) , pollingState . loggingContext ( ) ) . flatMap ( new Func1 < Response < ResponseBody > , Observable < PollingState < T > > > ( ) { @ Override public Observable < PollingState < T > > call ( Response < ResponseBody > response ) { int statusCode = response . code ( ) ; if ( statusCode == 202 ) { pollingState . withResponse ( response ) ; pollingState . withStatus ( AzureAsyncOperation . IN_PROGRESS_STATUS , statusCode ) ; } else if ( statusCode == 200 || statusCode == 201 ) { try { pollingState . updateFromResponseOnPutPatch ( response ) ; } catch ( CloudException | IOException e ) { return Observable . error ( e ) ; } } return Observable . just ( pollingState ) ; } } ) ;
public class StringMan { /** * Concats elements in array between < tt > fromIndex < / tt > , inclusive , to < tt > toIndex < / tt > , * inclusive , inserting < tt > separator < / tt > between them . */ public static String concat ( String [ ] array , String separator , int fromIndex , int toIndex ) { } }
StringBuilder buf = new StringBuilder ( ) ; for ( int i = fromIndex ; i <= toIndex ; i ++ ) { if ( buf . length ( ) > 0 ) buf . append ( separator ) ; buf . append ( array [ i ] ) ; } return buf . toString ( ) ;
public class FileBlockInfo { /** * < code > optional . alluxio . grpc . BlockInfo blockInfo = 1 ; < / code > */ public alluxio . grpc . BlockInfoOrBuilder getBlockInfoOrBuilder ( ) { } }
return blockInfo_ == null ? alluxio . grpc . BlockInfo . getDefaultInstance ( ) : blockInfo_ ;
public class ExtensionHttpSessions { /** * Gets the http session tokens set for the first site matching a given Context . * @ param context the context * @ return the http session tokens set for context */ public HttpSessionTokensSet getHttpSessionTokensSetForContext ( Context context ) { } }
// TODO : Proper implementation . Hack for now for ( Entry < String , HttpSessionTokensSet > e : this . sessionTokens . entrySet ( ) ) { String siteName = e . getKey ( ) ; siteName = "http://" + siteName ; if ( context . isInContext ( siteName ) ) return e . getValue ( ) ; } return null ;
public class AmazonEC2Client { /** * Deletes the specified EBS volume . The volume must be in the < code > available < / code > state ( not attached to an * instance ) . * The volume can remain in the < code > deleting < / code > state for several minutes . * For more information , see < a * href = " https : / / docs . aws . amazon . com / AWSEC2 / latest / UserGuide / ebs - deleting - volume . html " > Deleting an Amazon EBS * Volume < / a > in the < i > Amazon Elastic Compute Cloud User Guide < / i > . * @ param deleteVolumeRequest * Contains the parameters for DeleteVolume . * @ return Result of the DeleteVolume operation returned by the service . * @ sample AmazonEC2 . DeleteVolume * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / ec2-2016-11-15 / DeleteVolume " target = " _ top " > AWS API * Documentation < / a > */ @ Override public DeleteVolumeResult deleteVolume ( DeleteVolumeRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDeleteVolume ( request ) ;
public class XMLUnit { /** * Compare XML documents provided by two Reader classes * @ param control Control document * @ param test Document to test * @ return Diff object describing differences in documents * @ throws SAXException * @ throws IOException */ public static Diff compareXML ( String control , Reader test ) throws SAXException , IOException { } }
return new Diff ( new StringReader ( control ) , test ) ;
public class SessionTransaction { /** * - - - - - public static methods - - - - - */ public static RuntimeException translateClientException ( final ClientException cex ) { } }
switch ( cex . code ( ) ) { case "Neo.ClientError.Schema.ConstraintValidationFailed" : throw new ConstraintViolationException ( cex , cex . code ( ) , cex . getMessage ( ) ) ; // add handlers / translated exceptions for ClientExceptions here . . } // wrap exception if no other cause could be found throw new UnknownClientException ( cex , cex . code ( ) , cex . getMessage ( ) ) ;
public class RoundRobinOperatorStateRepartitioner { /** * Collect union states from given parallelSubtaskStates . */ private Map < String , List < Tuple2 < StreamStateHandle , OperatorStateHandle . StateMetaInfo > > > collectUnionStates ( List < List < OperatorStateHandle > > parallelSubtaskStates ) { } }
Map < String , List < Tuple2 < StreamStateHandle , OperatorStateHandle . StateMetaInfo > > > unionStates = new HashMap < > ( parallelSubtaskStates . size ( ) ) ; for ( List < OperatorStateHandle > subTaskState : parallelSubtaskStates ) { for ( OperatorStateHandle operatorStateHandle : subTaskState ) { if ( operatorStateHandle == null ) { continue ; } final Set < Map . Entry < String , OperatorStateHandle . StateMetaInfo > > partitionOffsetEntries = operatorStateHandle . getStateNameToPartitionOffsets ( ) . entrySet ( ) ; partitionOffsetEntries . stream ( ) . filter ( entry -> entry . getValue ( ) . getDistributionMode ( ) . equals ( OperatorStateHandle . Mode . UNION ) ) . forEach ( entry -> { List < Tuple2 < StreamStateHandle , OperatorStateHandle . StateMetaInfo > > stateLocations = unionStates . computeIfAbsent ( entry . getKey ( ) , k -> new ArrayList < > ( parallelSubtaskStates . size ( ) * partitionOffsetEntries . size ( ) ) ) ; stateLocations . add ( Tuple2 . of ( operatorStateHandle . getDelegateStateHandle ( ) , entry . getValue ( ) ) ) ; } ) ; } } return unionStates ;
public class Base64 { /** * Serializes an object and returns the Base64 - encoded version of that * serialized object . * As of v 2.3 , if the object cannot be serialized or there is another * error , the method will throw an java . io . IOException . < b > This is new to * v2.3 ! < / b > In earlier versions , it just returned a null value , but in * retrospect that ' s a pretty poor way to handle it . * The object is not GZip - compressed before being encoded . * Example options : * < pre > * GZIP : gzip - compresses object before encoding it . * DO _ BREAK _ LINES : break lines at 76 characters * < / pre > * Example : < code > encodeObject ( myObj , Base64 . GZIP ) < / code > or * Example : * < code > encodeObject ( myObj , Base64 . GZIP | Base64 . DO _ BREAK _ LINES ) < / code > * @ param serializableObject * The object to encode * @ param options * Specified options * @ return The Base64 - encoded object * @ see Base64 # GZIP * @ see Base64 # DO _ BREAK _ LINES * @ throws java . io . IOException * if there is an error * @ since 2.0 */ public static String encodeObject ( java . io . Serializable serializableObject , int options ) throws java . io . IOException { } }
if ( serializableObject == null ) { throw new NullPointerException ( "Cannot serialize a null object." ) ; } // end if : null // Streams ReadableByteArrayOutputStream baos = null ; java . io . OutputStream b64os = null ; java . io . ObjectOutputStream oos = null ; try { // ObjectOutputStream - > ( GZIP ) - > Base64 - > ByteArrayOutputStream // Note that the optional GZIPping is handled by Base64 . OutputStream . baos = new ReadableByteArrayOutputStream ( ) ; b64os = new Base64 . OutputStream ( baos , ENCODE | options ) ; oos = new java . io . ObjectOutputStream ( b64os ) ; oos . writeObject ( serializableObject ) ; } // end try catch ( java . io . IOException e ) { // Catch it and then throw it immediately so that // the finally { } block is called for cleanup . throw e ; } // end catch finally { try { oos . close ( ) ; } catch ( Exception e ) { } try { b64os . close ( ) ; } catch ( Exception e ) { } try { baos . close ( ) ; } catch ( Exception e ) { } } // end finally // Return value according to relevant encoding . try { return baos . getString ( Charset . forName ( PREFERRED_ENCODING ) ) ; } // end try catch ( IllegalCharsetNameException uue ) { // Fall back to some Java default return new String ( baos . toByteArray ( ) ) ; } // end catch
public class NumbersToWords { /** * Convert . * @ param number the number * @ return the string */ public static String convert ( final long number ) { } }
// 0 to 999 999 999 999 if ( number == 0 ) { return "zero" ; } String snumber = Long . toString ( number ) ; // pad with " 0" final String mask = "000000000000" ; final DecimalFormat df = new DecimalFormat ( mask ) ; snumber = df . format ( number ) ; // XXXnnnnn final int billions = Integer . parseInt ( snumber . substring ( 0 , 3 ) ) ; // nnnXXXnnnnn final int millions = Integer . parseInt ( snumber . substring ( 3 , 6 ) ) ; // nnnnnXXXnnn final int hundredThousands = Integer . parseInt ( snumber . substring ( 6 , 9 ) ) ; // nnnnnXXX final int thousands = Integer . parseInt ( snumber . substring ( 9 , 12 ) ) ; String tradBillions ; switch ( billions ) { case 0 : tradBillions = "" ; break ; case 1 : tradBillions = convertLessThanOneThousand ( billions ) + " Billion " ; break ; default : tradBillions = convertLessThanOneThousand ( billions ) + " Billion " ; } String result = tradBillions ; String tradMillions ; switch ( millions ) { case 0 : tradMillions = "" ; break ; case 1 : tradMillions = convertLessThanOneThousand ( millions ) + " Million " ; break ; default : tradMillions = convertLessThanOneThousand ( millions ) + " Million " ; } result = result + tradMillions ; String tradHundredThousands ; switch ( hundredThousands ) { case 0 : tradHundredThousands = "" ; break ; case 1 : tradHundredThousands = "One Thousand " ; break ; default : tradHundredThousands = convertLessThanOneThousand ( hundredThousands ) + " Thousand " ; } result = result + tradHundredThousands ; String tradThousand ; tradThousand = convertLessThanOneThousand ( thousands ) ; result = result + tradThousand ; // remove extra spaces ! final String replaceAll = result . replaceAll ( "^\\s+" , "" ) . replaceAll ( "\\b\\s{2,}\\b" , " " ) ; return replaceAll . trim ( ) ; // return replaceAll . replace ( " " , " _ " ) ;
public class TransactionalProtocolHandlers { /** * Create a transactional protocol client . * @ param channelAssociation the channel handler * @ return the transactional protocol client */ public static TransactionalProtocolClient createClient ( final ManagementChannelHandler channelAssociation ) { } }
final TransactionalProtocolClientImpl client = new TransactionalProtocolClientImpl ( channelAssociation ) ; channelAssociation . addHandlerFactory ( client ) ; return client ;
public class Inet6AddressImpl { /** * Resolves a hostname to its IP addresses using a cache . * @ param host the hostname to resolve . * @ param netId the network to perform resolution upon . * @ return the IP addresses of the host . */ private static InetAddress [ ] lookupHostByName ( String host , int netId ) throws UnknownHostException { } }
BlockGuard . getThreadPolicy ( ) . onNetwork ( ) ; // Do we have a result cached ? Object cachedResult = addressCache . get ( host , netId ) ; if ( cachedResult != null ) { if ( cachedResult instanceof InetAddress [ ] ) { // A cached positive result . return ( InetAddress [ ] ) cachedResult ; } else { // A cached negative result . throw new UnknownHostException ( ( String ) cachedResult ) ; } } try { StructAddrinfo hints = new StructAddrinfo ( ) ; hints . ai_flags = AI_ADDRCONFIG ; hints . ai_family = AF_UNSPEC ; // If we don ' t specify a socket type , every address will appear twice , once // for SOCK _ STREAM and one for SOCK _ DGRAM . Since we do not return the family // anyway , just pick one . hints . ai_socktype = SOCK_STREAM ; InetAddress [ ] addresses = NetworkOs . getaddrinfo ( host , hints ) ; // TODO : should getaddrinfo set the hostname of the InetAddresses it returns ? for ( InetAddress address : addresses ) { address . holder ( ) . hostName = host ; } addressCache . put ( host , netId , addresses ) ; return addresses ; } catch ( GaiException gaiException ) { // If the failure appears to have been a lack of INTERNET permission , throw a clear // SecurityException to aid in debugging this common mistake . // http : / / code . google . com / p / android / issues / detail ? id = 15722 if ( gaiException . getCause ( ) instanceof ErrnoException ) { if ( ( ( ErrnoException ) gaiException . getCause ( ) ) . errno == EACCES ) { throw new SecurityException ( "Permission denied (missing INTERNET permission?)" , gaiException ) ; } } // Otherwise , throw an UnknownHostException . String detailMessage = "Unable to resolve host \"" + host + "\": " + NetworkOs . gai_strerror ( gaiException . error ) ; addressCache . putUnknownHost ( host , netId , detailMessage ) ; throw gaiException . rethrowAsUnknownHostException ( detailMessage ) ; }
public class PyExpressionGenerator { /** * Generate the given object . * @ param assertStatement the assert statement . * @ param it the target for the generated content . * @ param context the context . * @ return the statement . */ protected XExpression _generate ( SarlAssertExpression assertStatement , IAppendable it , IExtraLanguageGeneratorContext context ) { } }
final boolean haveAssert = ! assertStatement . isIsStatic ( ) && assertStatement . getCondition ( ) != null ; if ( haveAssert ) { it . append ( "assert (lambda:" ) ; // $ NON - NLS - 1 $ it . increaseIndentation ( ) . newLine ( ) ; generate ( assertStatement . getCondition ( ) , it , context ) ; it . decreaseIndentation ( ) . newLine ( ) ; it . append ( ")()" ) ; // $ NON - NLS - 1 $ } if ( context . getExpectedExpressionType ( ) != null ) { if ( haveAssert ) { it . newLine ( ) ; } it . append ( "return " ) . append ( toDefaultValue ( context . getExpectedExpressionType ( ) . toJavaCompliantTypeReference ( ) ) ) ; // $ NON - NLS - 1 $ } return assertStatement ;
public class FnShort { /** * Determines whether the target object and the specified object are NOT equal * by calling the < tt > equals < / tt > method on the target object . * @ param object the { @ link Short } to compare to the target * @ return false if both objects are equal , true if not . */ public static final Function < Short , Boolean > notEq ( final Short object ) { } }
return ( Function < Short , Boolean > ) ( ( Function ) FnObject . notEq ( object ) ) ;
public class BetweenFormater { /** * 格式化日期间隔输出 < br > * @ return 格式化后的字符串 */ public String format ( ) { } }
final StringBuilder sb = new StringBuilder ( ) ; if ( betweenMs > 0 ) { long day = betweenMs / DateUnit . DAY . getMillis ( ) ; long hour = betweenMs / DateUnit . HOUR . getMillis ( ) - day * 24 ; long minute = betweenMs / DateUnit . MINUTE . getMillis ( ) - day * 24 * 60 - hour * 60 ; long second = betweenMs / DateUnit . SECOND . getMillis ( ) - ( ( day * 24 + hour ) * 60 + minute ) * 60 ; long millisecond = betweenMs - ( ( ( day * 24 + hour ) * 60 + minute ) * 60 + second ) * 1000 ; final int level = this . level . ordinal ( ) ; int levelCount = 0 ; if ( isLevelCountValid ( levelCount ) && 0 != day && level >= Level . DAY . ordinal ( ) ) { sb . append ( day ) . append ( Level . DAY . name ) ; levelCount ++ ; } if ( isLevelCountValid ( levelCount ) && 0 != hour && level >= Level . HOUR . ordinal ( ) ) { sb . append ( hour ) . append ( Level . HOUR . name ) ; levelCount ++ ; } if ( isLevelCountValid ( levelCount ) && 0 != minute && level >= Level . MINUTE . ordinal ( ) ) { sb . append ( minute ) . append ( Level . MINUTE . name ) ; levelCount ++ ; } if ( isLevelCountValid ( levelCount ) && 0 != second && level >= Level . SECOND . ordinal ( ) ) { sb . append ( second ) . append ( Level . SECOND . name ) ; levelCount ++ ; } if ( isLevelCountValid ( levelCount ) && 0 != millisecond && level >= Level . MILLSECOND . ordinal ( ) ) { sb . append ( millisecond ) . append ( Level . MILLSECOND . name ) ; levelCount ++ ; } } if ( StrUtil . isEmpty ( sb ) ) { sb . append ( 0 ) . append ( this . level . name ) ; } return sb . toString ( ) ;
public class CmsSitemapView { /** * Sets the visibility of the normal siteamp header ( the header can be hidden so that the Vaadin code can display its own header ) . < p > * @ param visible true if the normal header should be visible */ public void setHeaderVisible ( boolean visible ) { } }
String style = I_CmsSitemapLayoutBundle . INSTANCE . sitemapCss ( ) . headerContainerVaadinMode ( ) ; if ( visible ) { m_headerContainer . removeStyleName ( style ) ; } else { m_headerContainer . addStyleName ( style ) ; }
public class AutoImplementASTTransformation { /** * Return all methods including abstract super / interface methods but only if not overridden * by a concrete declared / inherited method . */ private static Map < String , MethodNode > getAllCorrectedMethodsMap ( ClassNode cNode ) { } }
Map < String , MethodNode > result = new HashMap < String , MethodNode > ( ) ; for ( MethodNode mn : cNode . getMethods ( ) ) { result . put ( methodDescriptorWithoutReturnType ( mn ) , mn ) ; } ClassNode next = cNode ; while ( true ) { Map < String , ClassNode > genericsSpec = createGenericsSpec ( next ) ; for ( MethodNode mn : next . getMethods ( ) ) { MethodNode correctedMethod = correctToGenericsSpec ( genericsSpec , mn ) ; if ( next != cNode ) { ClassNode correctedClass = correctToGenericsSpecRecurse ( genericsSpec , next ) ; MethodNode found = getDeclaredMethodCorrected ( genericsSpec , correctedMethod , correctedClass ) ; if ( found != null ) { String td = methodDescriptorWithoutReturnType ( found ) ; if ( result . containsKey ( td ) && ! result . get ( td ) . isAbstract ( ) ) { continue ; } result . put ( td , found ) ; } } } List < ClassNode > interfaces = new ArrayList < ClassNode > ( Arrays . asList ( next . getInterfaces ( ) ) ) ; Map < String , ClassNode > updatedGenericsSpec = new HashMap < String , ClassNode > ( genericsSpec ) ; while ( ! interfaces . isEmpty ( ) ) { ClassNode origInterface = interfaces . remove ( 0 ) ; if ( ! origInterface . equals ( ClassHelper . OBJECT_TYPE ) ) { updatedGenericsSpec = createGenericsSpec ( origInterface , updatedGenericsSpec ) ; ClassNode correctedInterface = correctToGenericsSpecRecurse ( updatedGenericsSpec , origInterface ) ; for ( MethodNode nextMethod : correctedInterface . getMethods ( ) ) { MethodNode correctedMethod = correctToGenericsSpec ( genericsSpec , nextMethod ) ; MethodNode found = getDeclaredMethodCorrected ( updatedGenericsSpec , correctedMethod , correctedInterface ) ; if ( found != null ) { String td = methodDescriptorWithoutReturnType ( found ) ; if ( result . containsKey ( td ) && ! result . get ( td ) . isAbstract ( ) ) { continue ; } result . put ( td , found ) ; } } interfaces . addAll ( Arrays . asList ( correctedInterface . getInterfaces ( ) ) ) ; } } ClassNode superClass = next . getUnresolvedSuperClass ( ) ; if ( superClass == null ) { break ; } next = correctToGenericsSpecRecurse ( updatedGenericsSpec , superClass ) ; } return result ;
public class DesignSpec { /** * Creates a new { @ link DesignSpec } instance from a resource ID using a { @ link View } * that will provide the { @ link DesignSpec } ' s intrinsic dimensions . * @ param view The { @ link View } who will own the new { @ link DesignSpec } instance . * @ param resId The resource ID pointing to a raw JSON resource . * @ return The newly created { @ link DesignSpec } instance . */ public static DesignSpec fromResource ( View view , int resId ) { } }
final Resources resources = view . getResources ( ) ; final DesignSpec spec = new DesignSpec ( resources , view ) ; if ( resId == 0 ) { return spec ; } final JSONObject json ; try { json = RawResource . getAsJSON ( resources , resId ) ; } catch ( IOException e ) { throw new IllegalStateException ( "Could not read design spec resource" , e ) ; } final float density = resources . getDisplayMetrics ( ) . density ; spec . setBaselineGridCellSize ( density * json . optInt ( JSON_KEY_BASELINE_GRID_CELL_SIZE , DEFAULT_BASELINE_GRID_CELL_SIZE_DIP ) ) ; spec . setBaselineGridVisible ( json . optBoolean ( JSON_KEY_BASELINE_GRID_VISIBLE , DEFAULT_BASELINE_GRID_VISIBLE ) ) ; spec . setKeylinesVisible ( json . optBoolean ( JSON_KEY_KEYLINES_VISIBLE , DEFAULT_KEYLINES_VISIBLE ) ) ; spec . setSpacingsVisible ( json . optBoolean ( JSON_KEY_SPACINGS_VISIBLE , DEFAULT_SPACINGS_VISIBLE ) ) ; spec . setBaselineGridColor ( Color . parseColor ( json . optString ( JSON_KEY_BASELINE_GRID_COLOR , DEFAULT_BASELINE_GRID_COLOR ) ) ) ; spec . setKeylinesColor ( Color . parseColor ( json . optString ( JSON_KEY_KEYLINES_COLOR , DEFAULT_KEYLINE_COLOR ) ) ) ; spec . setSpacingsColor ( Color . parseColor ( json . optString ( JSON_KEY_SPACINGS_COLOR , DEFAULT_SPACING_COLOR ) ) ) ; final JSONArray keylines = json . optJSONArray ( JSON_KEY_KEYLINES ) ; if ( keylines != null ) { final int keylineCount = keylines . length ( ) ; for ( int i = 0 ; i < keylineCount ; i ++ ) { try { final JSONObject keyline = keylines . getJSONObject ( i ) ; spec . addKeyline ( keyline . getInt ( JSON_KEY_OFFSET ) , From . valueOf ( keyline . getString ( JSON_KEY_FROM ) . toUpperCase ( ) ) ) ; } catch ( JSONException e ) { continue ; } } } final JSONArray spacings = json . optJSONArray ( JSON_KEY_SPACINGS ) ; if ( spacings != null ) { final int spacingCount = spacings . length ( ) ; for ( int i = 0 ; i < spacingCount ; i ++ ) { try { final JSONObject spacing = spacings . getJSONObject ( i ) ; spec . addSpacing ( spacing . getInt ( JSON_KEY_OFFSET ) , spacing . getInt ( JSON_KEY_SIZE ) , From . valueOf ( spacing . getString ( JSON_KEY_FROM ) . toUpperCase ( ) ) ) ; } catch ( JSONException e ) { continue ; } } } return spec ;
public class ResourceIndexImpl { /** * Gets a localized URIReference based on the given Node . */ private URIReference getLocalizedResource ( Node n ) throws GraphElementFactoryException { } }
if ( n instanceof URIReference ) { URIReference u = ( URIReference ) n ; return _connector . getElementFactory ( ) . createResource ( u . getURI ( ) ) ; } else { throw new RuntimeException ( "Error localizing triple; " + n . getClass ( ) . getName ( ) + " is not a URIReference" ) ; }