signature
stringlengths
43
39.1k
implementation
stringlengths
0
450k
public class Environment { /** * Runs a shell command in the system and provides a StringBuffer * with the output of the command . * @ param cmd an array of string that form the command to run * @ return a StringBuffer that contains the output of the command */ public static StringBuffer runCommand ( String [ ] cmd ) { } }
StringBuffer retval = new StringBuffer ( MAX_OUTPUT_LENGTH ) ; Process p ; try { p = Runtime . getRuntime ( ) . exec ( cmd ) ; InputStream tmp = p . getInputStream ( ) ; p . waitFor ( ) ; int c ; while ( ( c = tmp . read ( ) ) != - 1 ) retval . append ( ( char ) c ) ; } catch ( IOException e ) { e . printStackTrace ( ) ; } catch ( InterruptedException e ) { e . printStackTrace ( ) ; } return retval ;
public class WordVectorSerializer { /** * This method loads full w2v model , previously saved with writeFullMethod call * Deprecation note : Please , consider using readWord2VecModel ( ) or loadStaticModel ( ) method instead * @ param path - path to previously stored w2v json model * @ return - Word2Vec instance * @ deprecated Use readWord2VecModel ( ) or loadStaticModel ( ) method instead */ @ Deprecated public static Word2Vec loadFullModel ( @ NonNull String path ) throws FileNotFoundException { } }
/* / / TODO : implementation is in process We need to restore : 1 . WeightLookupTable , including syn0 and syn1 matrices 2 . VocabCache + mark it as SPECIAL , to avoid accidental word removals */ BasicLineIterator iterator = new BasicLineIterator ( new File ( path ) ) ; // first 3 lines should be processed separately String confJson = iterator . nextSentence ( ) ; log . info ( "Word2Vec conf. JSON: " + confJson ) ; VectorsConfiguration configuration = VectorsConfiguration . fromJson ( confJson ) ; // actually we dont need expTable , since it produces exact results on subsequent runs untill you dont modify expTable size : ) String eTable = iterator . nextSentence ( ) ; double [ ] expTable ; String nTable = iterator . nextSentence ( ) ; if ( configuration . getNegative ( ) > 0 ) { // TODO : we probably should parse negTable , but it ' s not required until vocab changes are introduced . Since on the predefined vocab it will produce exact nTable , the same goes for expTable btw . } /* Since we ' re restoring vocab from previously serialized model , we can expect minWordFrequency appliance in its vocabulary , so it should NOT be truncated . That ' s why i ' m setting minWordFrequency to configuration value , but applying SPECIAL to each word , to avoid truncation */ VocabularyHolder holder = new VocabularyHolder . Builder ( ) . minWordFrequency ( configuration . getMinWordFrequency ( ) ) . hugeModelExpected ( configuration . isHugeModelExpected ( ) ) . scavengerActivationThreshold ( configuration . getScavengerActivationThreshold ( ) ) . scavengerRetentionDelay ( configuration . getScavengerRetentionDelay ( ) ) . build ( ) ; AtomicInteger counter = new AtomicInteger ( 0 ) ; AbstractCache < VocabWord > vocabCache = new AbstractCache . Builder < VocabWord > ( ) . build ( ) ; while ( iterator . hasNext ( ) ) { // log . info ( " got line : " + iterator . nextSentence ( ) ) ; String wordJson = iterator . nextSentence ( ) ; VocabularyWord word = VocabularyWord . fromJson ( wordJson ) ; word . setSpecial ( true ) ; VocabWord vw = new VocabWord ( word . getCount ( ) , word . getWord ( ) ) ; vw . setIndex ( counter . getAndIncrement ( ) ) ; vw . setIndex ( word . getHuffmanNode ( ) . getIdx ( ) ) ; vw . setCodeLength ( word . getHuffmanNode ( ) . getLength ( ) ) ; vw . setPoints ( arrayToList ( word . getHuffmanNode ( ) . getPoint ( ) , word . getHuffmanNode ( ) . getLength ( ) ) ) ; vw . setCodes ( arrayToList ( word . getHuffmanNode ( ) . getCode ( ) , word . getHuffmanNode ( ) . getLength ( ) ) ) ; vocabCache . addToken ( vw ) ; vocabCache . addWordToIndex ( vw . getIndex ( ) , vw . getLabel ( ) ) ; vocabCache . putVocabWord ( vw . getWord ( ) ) ; } // at this moment vocab is restored , and it ' s time to rebuild Huffman tree // since word counters are equal , huffman tree will be equal too // holder . updateHuffmanCodes ( ) ; // we definitely don ' t need UNK word in this scenarion // holder . transferBackToVocabCache ( vocabCache , false ) ; // now , it ' s time to transfer syn0 / syn1 / syn1 neg values InMemoryLookupTable lookupTable = ( InMemoryLookupTable ) new InMemoryLookupTable . Builder ( ) . negative ( configuration . getNegative ( ) ) . useAdaGrad ( configuration . isUseAdaGrad ( ) ) . lr ( configuration . getLearningRate ( ) ) . cache ( vocabCache ) . vectorLength ( configuration . getLayersSize ( ) ) . build ( ) ; // we create all arrays lookupTable . resetWeights ( true ) ; iterator . reset ( ) ; // we should skip 3 lines from file iterator . nextSentence ( ) ; iterator . nextSentence ( ) ; iterator . nextSentence ( ) ; // now , for each word from vocabHolder we ' ll just transfer actual values while ( iterator . hasNext ( ) ) { String wordJson = iterator . nextSentence ( ) ; VocabularyWord word = VocabularyWord . fromJson ( wordJson ) ; // syn0 transfer INDArray syn0 = lookupTable . getSyn0 ( ) . getRow ( vocabCache . indexOf ( word . getWord ( ) ) ) ; syn0 . assign ( Nd4j . create ( word . getSyn0 ( ) ) ) ; // syn1 transfer // syn1 values are being accessed via tree points , but since our goal is just deserialization - we can just push it row by row INDArray syn1 = lookupTable . getSyn1 ( ) . getRow ( vocabCache . indexOf ( word . getWord ( ) ) ) ; syn1 . assign ( Nd4j . create ( word . getSyn1 ( ) ) ) ; // syn1Neg transfer if ( configuration . getNegative ( ) > 0 ) { INDArray syn1Neg = lookupTable . getSyn1Neg ( ) . getRow ( vocabCache . indexOf ( word . getWord ( ) ) ) ; syn1Neg . assign ( Nd4j . create ( word . getSyn1Neg ( ) ) ) ; } } Word2Vec vec = new Word2Vec . Builder ( configuration ) . vocabCache ( vocabCache ) . lookupTable ( lookupTable ) . resetModel ( false ) . build ( ) ; vec . setModelUtils ( new BasicModelUtils ( ) ) ; return vec ;
public class VariantCustom { /** * Support method to add a new param to this custom variant * @ param name the param name * @ param value the value of this parameter * @ param type the type of this parameter */ public void addParam ( String name , String value , int type ) { } }
// Current size usually is equal to the position params . add ( new NameValuePair ( type , name , value , params . size ( ) ) ) ;
public class GridFTPClient { /** * Performs extended store ( adujsted store mode ) . * @ param remoteFileName file name to store * @ param offset the offset added to the file pointer before storing * the blocks of the file . * @ param source source for the data to transfer * @ param mListener marker listener */ public void extendedPut ( String remoteFileName , long offset , DataSource source , MarkerListener mListener ) throws IOException , ServerException , ClientException { } }
// servers support GridFTP ? checkGridFTPSupport ( ) ; // all parameters set correctly ( or still unset ) ? checkTransferParamsPut ( ) ; localServer . retrieve ( source ) ; controlChannel . write ( new Command ( "ESTO" , "A " + offset + " " + remoteFileName ) ) ; transferRunSingleThread ( localServer . getControlChannel ( ) , mListener ) ;
public class HashTableIterator { /** * / * ( non - Javadoc ) * @ see org . kie . util . Iterator # next ( ) */ public Object next ( ) { } }
if ( this . entry != null ) { this . entry = this . entry . getNext ( ) ; } // if no entry keep skipping rows until we come to the end , or find one that is populated while ( this . entry == null && this . row < this . length ) { this . entry = this . table [ this . row ] ; this . row ++ ; } return this . entry ;
public class BranchRemoteInterface { /** * Method for handling the RESTful POST operations to Branch Servers . Internally calls abstract method { @ link # doRestfulPost ( String , JSONObject ) } * @ param url The url end point * @ param body { @ link JSONObject with parameters to the POST call } * @ param tag { @ link String } Tag for identifying the request for analytical or debugging purpose * @ param branchKey { @ link String } Branch key * @ return { @ link ServerResponse } object representing the result of RESTful POST to Branch Server */ public final ServerResponse make_restful_post ( JSONObject body , String url , String tag , String branchKey ) { } }
long reqStartTime = System . currentTimeMillis ( ) ; body = body != null ? body : new JSONObject ( ) ; if ( ! addCommonParams ( body , branchKey ) ) { return new ServerResponse ( tag , BranchError . ERR_BRANCH_KEY_INVALID ) ; } PrefHelper . Debug ( "posting to " + url ) ; PrefHelper . Debug ( "Post value = " + body . toString ( ) ) ; try { BranchResponse response = doRestfulPost ( url , body ) ; return processEntityForJSON ( response . responseData , response . responseCode , tag ) ; } catch ( BranchRemoteException branchError ) { if ( branchError . branchErrorCode == BranchError . ERR_BRANCH_REQ_TIMED_OUT ) { return new ServerResponse ( tag , BranchError . ERR_BRANCH_REQ_TIMED_OUT ) ; } else { // All other errors are considered as connectivity error return new ServerResponse ( tag , BranchError . ERR_BRANCH_NO_CONNECTIVITY ) ; } } finally { if ( Branch . getInstance ( ) != null ) { int brttVal = ( int ) ( System . currentTimeMillis ( ) - reqStartTime ) ; Branch . getInstance ( ) . addExtraInstrumentationData ( tag + "-" + Defines . Jsonkey . Branch_Round_Trip_Time . getKey ( ) , String . valueOf ( brttVal ) ) ; } }
public class ArchiveService { /** * Get a mapping of original to destination files which a covered by an assembly . This can be used * to watch the source files for changes in order to update the target ( either by recreating a docker image * or by copying it into a running container ) * @ param imageConfig image config for which to get files . The build - and assembly configuration in this image * config must not be null . * @ param mojoParameters needed for tracking the assembly * @ return mapping of assembly files * @ throws MojoExecutionException */ public AssemblyFiles getAssemblyFiles ( ImageConfiguration imageConfig , MojoParameters mojoParameters ) throws MojoExecutionException { } }
String name = imageConfig . getName ( ) ; try { return dockerAssemblyManager . getAssemblyFiles ( name , imageConfig . getBuildConfiguration ( ) , mojoParameters , log ) ; } catch ( InvalidAssemblerConfigurationException | ArchiveCreationException | AssemblyFormattingException e ) { throw new MojoExecutionException ( "Cannot extract assembly files for image " + name + ": " + e , e ) ; }
public class ArgTokenizer { /** * Parses the next token of this tokenizer . */ public void nextToken ( ) { } }
byte ct [ ] = ctype ; int c ; int lctype ; sval = null ; isQuoted = false ; do { c = read ( ) ; if ( c < 0 ) { return ; } lctype = ( c < 256 ) ? ct [ c ] : unicode2ctype ( c ) ; } while ( lctype == CT_WHITESPACE ) ; if ( lctype == CT_ALPHA ) { int i = 0 ; do { if ( i >= buf . length ) { buf = Arrays . copyOf ( buf , buf . length * 2 ) ; } buf [ i ++ ] = ( char ) c ; c = read ( ) ; lctype = c < 0 ? CT_WHITESPACE : ( c < 256 ) ? ct [ c ] : unicode2ctype ( c ) ; } while ( lctype == CT_ALPHA ) ; if ( c >= 0 ) -- next ; // push last back sval = String . copyValueOf ( buf , 0 , i ) ; return ; } if ( lctype == CT_QUOTE ) { int quote = c ; int i = 0 ; /* Invariants ( because \ Octal needs a lookahead ) : * ( i ) c contains char value * ( ii ) d contains the lookahead */ int d = read ( ) ; while ( d >= 0 && d != quote ) { if ( d == '\\' ) { c = read ( ) ; int first = c ; /* To allow \ 377 , but not \ 477 */ if ( c >= '0' && c <= '7' ) { c = c - '0' ; int c2 = read ( ) ; if ( '0' <= c2 && c2 <= '7' ) { c = ( c << 3 ) + ( c2 - '0' ) ; c2 = read ( ) ; if ( '0' <= c2 && c2 <= '7' && first <= '3' ) { c = ( c << 3 ) + ( c2 - '0' ) ; d = read ( ) ; } else d = c2 ; } else d = c2 ; } else { switch ( c ) { case 'a' : c = 0x7 ; break ; case 'b' : c = '\b' ; break ; case 'f' : c = 0xC ; break ; case 'n' : c = '\n' ; break ; case 'r' : c = '\r' ; break ; case 't' : c = '\t' ; break ; case 'v' : c = 0xB ; break ; } d = read ( ) ; } } else { c = d ; d = read ( ) ; } if ( i >= buf . length ) { buf = Arrays . copyOf ( buf , buf . length * 2 ) ; } buf [ i ++ ] = ( char ) c ; } if ( d == quote ) { isQuoted = true ; } sval = String . copyValueOf ( buf , 0 , i ) ; }
public class MenuCreator { /** * Create the menu for the Alignment Panel representation of * Structural Alignments . The alignment can be in AFPChain format * or in the MultipleAlignment format . * @ param frame * @ param actionListener * @ param afpChain * @ param MultipleAlignment * @ return a JMenuBar */ public static JMenuBar getAlignmentPanelMenu ( JFrame frame , ActionListener actionListener , AFPChain afpChain , MultipleAlignment msa ) { } }
JMenuBar menu = new JMenuBar ( ) ; JMenu file = new JMenu ( "File" ) ; file . getAccessibleContext ( ) . setAccessibleDescription ( "File Menu" ) ; menu . add ( file ) ; ImageIcon saveicon = createImageIcon ( "/icons/filesave.png" ) ; JMenuItem saveF = null ; if ( saveicon != null ) saveF = new JMenuItem ( "Save text display" , saveicon ) ; else saveF = new JMenuItem ( "Save text display" ) ; saveF . setMnemonic ( KeyEvent . VK_S ) ; MySaveFileListener listener = new MySaveFileListener ( afpChain , msa ) ; listener . setTextOutput ( true ) ; saveF . addActionListener ( listener ) ; file . add ( saveF ) ; file . addSeparator ( ) ; JMenuItem print = getPrintMenuItem ( ) ; print . addActionListener ( actionListener ) ; file . add ( print ) ; file . addSeparator ( ) ; JMenuItem closeI = MenuCreator . getCloseMenuItem ( frame ) ; file . add ( closeI ) ; JMenuItem exitI = MenuCreator . getExitMenuItem ( ) ; file . add ( exitI ) ; JMenu edit = new JMenu ( "Edit" ) ; edit . setMnemonic ( KeyEvent . VK_E ) ; menu . add ( edit ) ; JMenuItem eqrI = MenuCreator . getIcon ( actionListener , SELECT_EQR ) ; edit . add ( eqrI ) ; JMenuItem eqrcI = MenuCreator . getIcon ( actionListener , EQR_COLOR ) ; edit . add ( eqrcI ) ; JMenuItem simI = MenuCreator . getIcon ( actionListener , SIMILARITY_COLOR ) ; edit . add ( simI ) ; JMenuItem fatcatI = MenuCreator . getIcon ( actionListener , FATCAT_BLOCK ) ; edit . add ( fatcatI ) ; JMenu view = new JMenu ( "View" ) ; view . getAccessibleContext ( ) . setAccessibleDescription ( "View Menu" ) ; view . setMnemonic ( KeyEvent . VK_V ) ; menu . add ( view ) ; JMenuItem textI = MenuCreator . getIcon ( actionListener , TEXT_ONLY ) ; view . add ( textI ) ; JMenuItem fastaI = MenuCreator . getIcon ( actionListener , FASTA_FORMAT ) ; view . add ( fastaI ) ; JMenuItem pairsI = MenuCreator . getIcon ( actionListener , PAIRS_ONLY ) ; view . add ( pairsI ) ; JMenuItem textF = MenuCreator . getIcon ( actionListener , FATCAT_TEXT ) ; view . add ( textF ) ; JMenu about = new JMenu ( "Help" ) ; about . setMnemonic ( KeyEvent . VK_A ) ; JMenuItem helpM = MenuCreator . getHelpMenuItem ( ) ; about . add ( helpM ) ; JMenuItem aboutM = MenuCreator . getAboutMenuItem ( ) ; about . add ( aboutM ) ; menu . add ( Box . createGlue ( ) ) ; menu . add ( about ) ; return menu ;
public class JMElasticsearchSearchAndCount { /** * Search all with target count search response . * @ param indices the indices * @ param types the types * @ param aggregationBuilders the aggregation builders * @ return the search response */ public SearchResponse searchAllWithTargetCount ( String [ ] indices , String [ ] types , AggregationBuilder [ ] aggregationBuilders ) { } }
return searchAllWithTargetCount ( indices , types , null , aggregationBuilders ) ;
public class CollectionConverter { /** * 内部转换 * @ param value 值 * @ return 转换后的集合对象 */ protected Collection < ? > convertInternal ( Object value ) { } }
final Collection < Object > collection = CollectionUtil . create ( TypeUtil . getClass ( this . collectionType ) ) ; return CollUtil . addAll ( collection , value , this . elementType ) ;
public class AmazonSimpleEmailServiceClient { /** * Provides the sending limits for the Amazon SES account . * You can execute this operation no more than once per second . * @ param getSendQuotaRequest * @ return Result of the GetSendQuota operation returned by the service . * @ sample AmazonSimpleEmailService . GetSendQuota * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / email - 2010-12-01 / GetSendQuota " target = " _ top " > AWS API * Documentation < / a > */ @ Override public GetSendQuotaResult getSendQuota ( GetSendQuotaRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeGetSendQuota ( request ) ;
public class Utils { /** * Expands a given node in a JTree . * @ param tree The JTree to expand . * @ param model The TreeModel for tree . * @ param node The node within tree to expand . * @ param row The displayed row in tree that represents * node . * @ param depth The depth to which the tree should be expanded . * Zero will just expand node , a negative * value will fully expand the tree , and a positive * value will recursively expand the tree to that * depth relative to node . */ public static int collapseJTreeNode ( javax . swing . JTree tree , javax . swing . tree . TreeModel model , Object node , int row , int depth ) { } }
if ( node != null && ! model . isLeaf ( node ) ) { tree . collapseRow ( row ) ; if ( depth != 0 ) { for ( int index = 0 ; row + 1 < tree . getRowCount ( ) && index < model . getChildCount ( node ) ; index ++ ) { row ++ ; Object child = model . getChild ( node , index ) ; if ( child == null ) { break ; } javax . swing . tree . TreePath path ; while ( ( path = tree . getPathForRow ( row ) ) != null && path . getLastPathComponent ( ) != child ) { row ++ ; } if ( path == null ) { break ; } row = collapseJTreeNode ( tree , model , child , row , depth - 1 ) ; } } } return row ;
public class appfwprofile_stats { /** * Use this API to fetch statistics of appfwprofile _ stats resource of given name . */ public static appfwprofile_stats get ( nitro_service service , String name ) throws Exception { } }
appfwprofile_stats obj = new appfwprofile_stats ( ) ; obj . set_name ( name ) ; appfwprofile_stats response = ( appfwprofile_stats ) obj . stat_resource ( service ) ; return response ;
public class JsonReader { /** * Skips the next value recursively . If it is an object or array , all nested * elements are skipped . This method is intended for use when the JSON token * stream contains unrecognized or unhandled values . */ public void skipValue ( ) throws IOException { } }
skipping = true ; try { int count = 0 ; do { JsonToken token = advance ( ) ; if ( token == JsonToken . BEGIN_ARRAY || token == JsonToken . BEGIN_OBJECT ) { count ++ ; } else if ( token == JsonToken . END_ARRAY || token == JsonToken . END_OBJECT ) { count -- ; } } while ( count != 0 ) ; } finally { skipping = false ; }
public class TrackerListGroupsResponse { /** * 解析反馈内容 */ @ Override public List < GroupState > decodeContent ( InputStream in , Charset charset ) throws IOException { } }
// 解析报文内容 byte [ ] bytes = new byte [ ( int ) getContentLength ( ) ] ; int contentSize = in . read ( bytes ) ; // 此处fastdfs的服务端有bug if ( contentSize != getContentLength ( ) ) { try { return decode ( bytes , charset ) ; } catch ( Exception e ) { throw new IOException ( "读取到的数据长度与协议长度不符" ) ; } } else { return decode ( bytes , charset ) ; }
public class FileUtils { /** * Ensure writeable directories . * If doesn ' t exist , we attempt creation . * @ param dirs List of Files to test . * @ return The passed < code > dirs < / code > . * @ exception IOException If passed directory does not exist and is not * createable , or directory is not writeable or is not a directory . */ public static List < File > ensureWriteableDirectory ( List < File > dirs ) throws IOException { } }
for ( Iterator < File > i = dirs . iterator ( ) ; i . hasNext ( ) ; ) { FileUtils . ensureWriteableDirectory ( i . next ( ) ) ; } return dirs ;
public class AWSMigrationHubClient { /** * Retrieves a list of all attributes associated with a specific migration task . * @ param describeMigrationTaskRequest * @ return Result of the DescribeMigrationTask operation returned by the service . * @ throws AccessDeniedException * You do not have sufficient access to perform this action . * @ throws InternalServerErrorException * Exception raised when there is an internal , configuration , or dependency error encountered . * @ throws ServiceUnavailableException * Exception raised when there is an internal , configuration , or dependency error encountered . * @ throws InvalidInputException * Exception raised when the provided input violates a policy constraint or is entered in the wrong format * or data type . * @ throws ResourceNotFoundException * Exception raised when the request references a resource ( ADS configuration , update stream , migration * task , etc . ) that does not exist in ADS ( Application Discovery Service ) or in Migration Hub ' s repository . * @ sample AWSMigrationHub . DescribeMigrationTask * @ see < a href = " http : / / docs . aws . amazon . com / goto / WebAPI / AWSMigrationHub - 2017-05-31 / DescribeMigrationTask " * target = " _ top " > AWS API Documentation < / a > */ @ Override public DescribeMigrationTaskResult describeMigrationTask ( DescribeMigrationTaskRequest request ) { } }
request = beforeClientExecution ( request ) ; return executeDescribeMigrationTask ( request ) ;
public class MtasSpanUniquePositionQuery { /** * ( non - Javadoc ) * @ see mtas . search . spans . util . MtasSpanQuery # rewrite ( org . apache . lucene . index . * IndexReader ) */ @ Override public MtasSpanQuery rewrite ( IndexReader reader ) throws IOException { } }
MtasSpanQuery newClause = clause . rewrite ( reader ) ; if ( ! newClause . equals ( clause ) ) { return new MtasSpanUniquePositionQuery ( newClause ) . rewrite ( reader ) ; } else { return super . rewrite ( reader ) ; }
public class ImageDrawing { /** * Drawing src bitmap to dest bitmap with rounded corners * @ param src source bitmap * @ param dest destination bitmap * @ param radius radius in destination bitmap scale * @ param clearColor clear color */ public static void drawRoundedCorners ( Bitmap src , Bitmap dest , int radius , int clearColor ) { } }
clearBitmap ( dest , clearColor ) ; Canvas canvas = new Canvas ( dest ) ; Rect sourceRect = WorkCache . RECT1 . get ( ) ; Rect destRect = WorkCache . RECT2 . get ( ) ; sourceRect . set ( 0 , 0 , src . getWidth ( ) , src . getHeight ( ) ) ; destRect . set ( 0 , 0 , dest . getWidth ( ) , dest . getHeight ( ) ) ; RectF roundRect = WorkCache . RECTF1 . get ( ) ; roundRect . set ( 0 , 0 , dest . getWidth ( ) , dest . getHeight ( ) ) ; Paint paint = WorkCache . PAINT . get ( ) ; paint . reset ( ) ; paint . setStyle ( Paint . Style . FILL ) ; paint . setColor ( Color . RED ) ; paint . setAntiAlias ( true ) ; canvas . drawRoundRect ( roundRect , radius , radius , paint ) ; paint . reset ( ) ; paint . setFilterBitmap ( true ) ; paint . setXfermode ( new PorterDuffXfermode ( PorterDuff . Mode . SRC_IN ) ) ; canvas . drawBitmap ( src , sourceRect , destRect , paint ) ; canvas . setBitmap ( null ) ;
public class PublicanDocBookBuilder { /** * Builds the core publican . cfg file that is a basic requirement to build the publican book . * @ param buildData Information and data structures for the build . * @ param publicanCfgTemplate The publican . cfg template to add content to . * @ return The publican . cfg file filled with content from the Content Spec . */ protected String buildCorePublicanCfgFile ( final BuildData buildData , final String publicanCfgTemplate ) { } }
final ContentSpec contentSpec = buildData . getContentSpec ( ) ; final Map < String , String > overrides = buildData . getBuildOptions ( ) . getOverrides ( ) ; final String brandOverride = overrides . containsKey ( CSConstants . BRAND_OVERRIDE ) ? overrides . get ( CSConstants . BRAND_OVERRIDE ) : ( overrides . containsKey ( CSConstants . BRAND_ALT_OVERRIDE ) ? overrides . get ( CSConstants . BRAND_ALT_OVERRIDE ) : null ) ; final String brand = brandOverride != null ? brandOverride : ( contentSpec . getBrand ( ) == null ? getDefaultBrand ( buildData ) : contentSpec . getBrand ( ) ) ; // Setup publican . cfg String publicanCfg = publicanCfgTemplate . replaceAll ( BuilderConstants . BRAND_REGEX , brand ) ; publicanCfg = publicanCfg . replaceFirst ( "type\\s*:\\s*.*($|\\r\\n|\\n)" , "type: " + contentSpec . getBookType ( ) . toString ( ) . replaceAll ( "-Draft" , "" ) + "\n" ) ; publicanCfg = publicanCfg . replaceAll ( "xml_lang\\s*:\\s*.*?($|\\r\\n|\\n)" , "xml_lang: " + buildData . getOutputLocale ( ) + "\n" ) ; // Remove the image width publicanCfg = publicanCfg . replaceFirst ( "max_image_width\\s*:\\s*\\d+\\s*(\\r)?\\n" , "" ) ; publicanCfg = publicanCfg . replaceFirst ( "toc_section_depth\\s*:\\s*\\d+\\s*(\\r)?\\n" , "" ) ; // Minor formatting cleanup publicanCfg = publicanCfg . trim ( ) + "\n" ; // Add the dtdver property if ( buildData . getDocBookVersion ( ) == DocBookVersion . DOCBOOK_50 ) { publicanCfg += "dtdver: \"5.0\"\n" ; } if ( contentSpec . getPublicanCfg ( ) != null ) { // If the user publican . cfg doesn ' t contain a chunk _ section _ depth , then add a calculated one if ( buildData . getBuildOptions ( ) . getCalculateChunkDepth ( ) && ! contentSpec . getPublicanCfg ( ) . contains ( "chunk_section_depth" ) ) { publicanCfg += "chunk_section_depth: " + calcChunkSectionDepth ( buildData ) + "\n" ; } // Remove the git _ branch if the content spec contains a git _ branch if ( contentSpec . getPublicanCfg ( ) . contains ( "git_branch" ) ) { publicanCfg = publicanCfg . replaceFirst ( "git_branch\\s*:\\s*.*(\\r)?(\\n)?" , "" ) ; } publicanCfg += DocBookBuildUtilities . cleanUserPublicanCfg ( contentSpec . getPublicanCfg ( ) ) ; } else if ( buildData . getBuildOptions ( ) . getCalculateChunkDepth ( ) ) { publicanCfg += "chunk_section_depth: " + calcChunkSectionDepth ( buildData ) + "\n" ; } if ( buildData . getBuildOptions ( ) . getPublicanShowRemarks ( ) ) { // Remove any current show _ remarks definitions if ( publicanCfg . contains ( "show_remarks" ) ) { publicanCfg = publicanCfg . replaceAll ( "show_remarks\\s*:\\s*\\d+\\s*(\\r)?(\\n)?" , "" ) ; } publicanCfg += "show_remarks: 1\n" ; } // Add docname if it wasn ' t specified and the escaped title is valid Matcher m = null ; if ( BuilderConstants . VALID_PUBLICAN_DOCNAME_PATTERN . matcher ( buildData . getEscapedBookTitle ( ) ) . matches ( ) ) { m = DOCNAME_PATTERN . matcher ( publicanCfg ) ; if ( ! m . find ( ) ) { publicanCfg += "docname: " + buildData . getEscapedBookTitle ( ) . replaceAll ( "_" , " " ) + "\n" ; } } // Add product if it wasn ' t specified m = PRODUCT_PATTERN . matcher ( publicanCfg ) ; if ( ! m . find ( ) ) { publicanCfg += "product: " + escapeProduct ( buildData . getOriginalBookProduct ( ) ) + "\n" ; } // Add the mainfile attribute publicanCfg += "mainfile: " + buildData . getRootBookFileName ( ) + "\n" ; // Add a version if one wasn ' t specified m = VERSION_PATTERN . matcher ( publicanCfg ) ; if ( ! m . find ( ) ) { String version = contentSpec . getBookVersion ( ) ; if ( isNullOrEmpty ( version ) ) { version = DocBookBuildUtilities . getKeyValueNodeText ( buildData , contentSpec . getVersionNode ( ) ) ; } if ( isNullOrEmpty ( version ) ) { version = BuilderConstants . DEFAULT_VERSION ; } publicanCfg += "version: " + escapeVersion ( version ) + "\n" ; } return applyPublicanCfgOverrides ( buildData , publicanCfg ) ;
public class VertxResponseWriter { /** * { @ inheritDoc } */ @ Override public boolean suspend ( long timeOut , TimeUnit timeUnit , TimeoutHandler timeoutHandler ) { } }
// TODO : If already suspended should return false according to documentation // Store the timeout handler this . timeoutHandler = timeoutHandler ; // Cancel any existing timer if ( suspendTimerId != 0 ) { vertx . cancelTimer ( suspendTimerId ) ; suspendTimerId = 0 ; } // If timeout < = 0 , then it suspends indefinitely if ( timeOut <= 0 ) { return true ; } // Get milliseconds long ms = timeUnit . toMillis ( timeOut ) ; // Schedule timeout on the event loop this . suspendTimerId = vertx . setTimer ( ms , id -> { if ( id == suspendTimerId ) { VertxResponseWriter . this . timeoutHandler . onTimeout ( VertxResponseWriter . this ) ; } } ) ; return true ;
public class GroupValidatorSupport { /** * バリデーション時のヒントが該当するかどうか 。 * @ param validationGroups 判定対象のグループ * @ return 該当する 。 */ protected boolean containsValidationGroups ( final List < Class < ? > > validationGroups ) { } }
// バリデーション時のグループの指定が無い場合 if ( getSettingGroups ( ) . isEmpty ( ) && validationGroups . isEmpty ( ) ) { return true ; } // デフォルトグループ指定されている場合 、 該当する 。 if ( validationGroups . isEmpty ( ) ) { for ( Class < ? > settingGroup : getSettingGroups ( ) ) { if ( DefaultGroup . class . isAssignableFrom ( settingGroup ) ) { return true ; } } } for ( Class < ? > group : validationGroups ) { if ( getSettingGroups ( ) . isEmpty ( ) && DefaultGroup . class . isAssignableFrom ( group ) ) { return true ; } if ( getSettingGroups ( ) . contains ( group ) ) { return true ; } // 親子関係のチェック for ( Class < ? > parent : getSettingGroups ( ) ) { if ( parent . isAssignableFrom ( group ) ) { return true ; } } } return false ;
public class ColorUtils { /** * Calculate whether a color is light or dark , based on a commonly known * brightness formula . * @ see < a href = " http : / / en . wikipedia . org / wiki / HSV _ color _ space % 23Lightness " > http : / / en . wikipedia . org / wiki / HSV _ color _ space % 23Lightness < / a > */ public static boolean isColorDark ( int color ) { } }
return ( ( 30 * Color . red ( color ) + 59 * Color . green ( color ) + 11 * Color . blue ( color ) ) / 100 ) <= BRIGHTNESS_THRESHOLD ;
public class AbstractFavoritesController { /** * Configures FavoritesController to include a Marketplace portlet functional name in the Model , * which ultimately signals and enables the View to include convenient link to Marketplace for * user to add new favorites . * < p > When set to null , signals Favorites portlet to suppress links to Marketplace . Setting to * the empty String or to the literal value " null " ( ignoring case ) is equivalent to setting to * null . * < p > This is for convenience in expressing no - marketplace - fname - available via injected value * from properties file . Defaults to the value of the property * " org . apereo . portal . portlets . favorites . MarketplaceFunctionalName " , or null if that property is * not set . * < p > The functional name can technically be the fname of any portlet . It doesn ' t have to be The * Marketplace Portlet . Perhaps you ' ve got your own take on Marketplace . * < p > This allows Favorites to support integration with Marketplace without requiring a * Marketplace , gracefully degrading when no Marketplace available . * @ param marketplaceFunctionalName String fname of a marketplace portlet , or null . */ @ Value ( "${org.apereo.portal.portlets.favorites.MarketplaceFunctionalName:null}" ) public void setMarketplaceFName ( String marketplaceFunctionalName ) { } }
// interpret null , non - text - having , or literal " null " as // signaling lack of Marketplace functional name . if ( ! StringUtils . hasText ( marketplaceFunctionalName ) || "null" . equalsIgnoreCase ( marketplaceFunctionalName ) ) { marketplaceFunctionalName = null ; } this . marketplaceFName = marketplaceFunctionalName ;
public class AbstractLinear { /** * Returns an image that simulates a glowing ring which could be used to visualize * a state of the gauge by a color . The LED might be too small if you are not in front * of the screen and so one could see the current state more easy . * @ param WIDTH * @ param HEIGHT * @ param GLOW _ COLOR * @ param ON * @ return an image that simulates a glowing ring */ protected BufferedImage create_GLOW_Image ( final int WIDTH , final int HEIGHT , final Color GLOW_COLOR , final boolean ON ) { } }
return GLOW_FACTORY . createLinearGlow ( WIDTH , HEIGHT , GLOW_COLOR , ON ) ;
public class ShapeFittingOps { /** * Converts the list of indexes in a sequence into a list of { @ link PointIndex _ I32 } . * @ param sequence Sequence of points . * @ param indexes List of indexes in the sequence . * @ param output Output list of { @ link PointIndex _ I32 } . */ public static void indexToPointIndex ( List < Point2D_I32 > sequence , GrowQueue_I32 indexes , FastQueue < PointIndex_I32 > output ) { } }
output . reset ( ) ; for ( int i = 0 ; i < indexes . size ; i ++ ) { int index = indexes . data [ i ] ; Point2D_I32 p = sequence . get ( index ) ; PointIndex_I32 o = output . grow ( ) ; o . x = p . x ; o . y = p . y ; o . index = index ; }
public class TaskInstanceStrategyFactory { /** * Returns a subtask strategy instance based on an attribute value * which can consist of either the subtask strategy logical name , or an xml document . * @ param attributeValue * @ return the subtask strategy implementor instance */ public static SubTaskStrategy getSubTaskStrategy ( String attributeValue ) throws StrategyException { } }
TaskInstanceStrategyFactory factory = getInstance ( ) ; String className = factory . getStrategyClassName ( attributeValue , StrategyType . SubTaskStrategy ) ; SubTaskStrategy strategy = ( SubTaskStrategy ) factory . getStrategyInstance ( SubTaskStrategy . class , className , null ) ; return strategy ;
public class Jdt2Ecore { /** * Replies if the given type is a subclass of the second type . * < p > The type finder could be obtained with { @ link # toTypeFinder ( IJavaProject ) } . * @ param typeFinder the type finder to be used for finding the type definitions . * @ param subClass the name of the sub class . * @ param superClass the name of the expected super class . * @ return < code > true < / code > if it is a subclass . * @ throws JavaModelException if the Java model is invalid . * @ see # toTypeFinder ( IJavaProject ) */ public boolean isSubClassOf ( TypeFinder typeFinder , String subClass , String superClass ) throws JavaModelException { } }
final SuperTypeIterator typeIterator = new SuperTypeIterator ( typeFinder , false , subClass ) ; while ( typeIterator . hasNext ( ) ) { final IType type = typeIterator . next ( ) ; if ( Objects . equals ( type . getFullyQualifiedName ( ) , superClass ) ) { return true ; } } return false ;
public class StaticPathSpec { /** * copied from MultipleInputs . addInputPath with slight modifications */ private static void addInputPath ( Job job , Iterable < String > pathStrings , Class < ? extends InputFormat > inputFormatClass ) { } }
Configuration conf = job . getConfiguration ( ) ; StringBuilder inputFormats = new StringBuilder ( StringUtils . nullToEmptyNonDruidDataString ( conf . get ( MultipleInputs . DIR_FORMATS ) ) ) ; String [ ] paths = Iterables . toArray ( pathStrings , String . class ) ; for ( int i = 0 ; i < paths . length - 1 ; i ++ ) { if ( inputFormats . length ( ) > 0 ) { inputFormats . append ( ',' ) ; } inputFormats . append ( paths [ i ] ) . append ( ';' ) . append ( inputFormatClass . getName ( ) ) ; } if ( inputFormats . length ( ) > 0 ) { conf . set ( MultipleInputs . DIR_FORMATS , inputFormats . toString ( ) ) ; } // add last one separately for possible initialization in MultipleInputs MultipleInputs . addInputPath ( job , new Path ( paths [ paths . length - 1 ] ) , inputFormatClass ) ;
public class TypeValidator { /** * Expect that the property in an interface that this type implements is implemented and correctly * typed . */ private void expectInterfaceProperty ( Node n , ObjectType instance , ObjectType implementedInterface , String prop ) { } }
StaticTypedSlot propSlot = instance . getSlot ( prop ) ; if ( propSlot == null ) { // Not implemented String sourceName = n . getSourceFileName ( ) ; sourceName = nullToEmpty ( sourceName ) ; registerMismatch ( instance , implementedInterface , report ( JSError . make ( n , INTERFACE_METHOD_NOT_IMPLEMENTED , prop , implementedInterface . toString ( ) , instance . toString ( ) ) ) ) ; } else { Node propNode = propSlot . getDeclaration ( ) == null ? null : propSlot . getDeclaration ( ) . getNode ( ) ; // Fall back on the constructor node if we can ' t find a node for the // property . propNode = propNode == null ? n : propNode ; JSType found = propSlot . getType ( ) ; found = found . restrictByNotNullOrUndefined ( ) ; JSType required = implementedInterface . getImplicitPrototype ( ) . getPropertyType ( prop ) ; TemplateTypeMap typeMap = implementedInterface . getTemplateTypeMap ( ) ; if ( ! typeMap . isEmpty ( ) ) { TemplateTypeMapReplacer replacer = new TemplateTypeMapReplacer ( typeRegistry , typeMap ) ; required = required . visit ( replacer ) ; } required = required . restrictByNotNullOrUndefined ( ) ; if ( ! found . isSubtype ( required , this . subtypingMode ) ) { // Implemented , but not correctly typed FunctionType constructor = implementedInterface . toObjectType ( ) . getConstructor ( ) ; JSError err = JSError . make ( propNode , HIDDEN_INTERFACE_PROPERTY_MISMATCH , prop , instance . toString ( ) , constructor . getTopMostDefiningType ( prop ) . toString ( ) , required . toString ( ) , found . toString ( ) ) ; registerMismatch ( found , required , err ) ; report ( err ) ; } }
public class JSConsumerSet { /** * Add a new consumer to this set . * @ param lcp */ public void addConsumer ( DispatchableConsumerPoint lcp ) { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "addConsumer" , lcp ) ; // WARNING : We mustn ' t hold the LCP lock of the consumer at this point synchronized ( consumerList ) { consumerList . add ( lcp ) ; } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "addConsumer" ) ;
public class FactoryDenseOpticalFlow { /** * The original Horn - Schunck algorithm . Only good for very small motions . * @ see HornSchunck * @ param config Configuration parameters . If null then default is used . * @ param imageType Type of input gray scale image * @ return dense optical flow */ public static < T extends ImageGray < T > , D extends ImageGray < D > > DenseOpticalFlow < T > hornSchunck ( @ Nullable ConfigHornSchunck config , Class < T > imageType ) { } }
if ( config == null ) config = new ConfigHornSchunck ( ) ; HornSchunck < T , D > alg ; if ( imageType == GrayU8 . class ) alg = ( HornSchunck ) new HornSchunck_U8 ( config . alpha , config . numIterations ) ; else if ( imageType == GrayF32 . class ) alg = ( HornSchunck ) new HornSchunck_F32 ( config . alpha , config . numIterations ) ; else throw new IllegalArgumentException ( "Unsupported image type " + imageType ) ; return new HornSchunck_to_DenseOpticalFlow < > ( alg , ImageType . single ( imageType ) ) ;
public class PdfLine { /** * Returns the number of space - characters in this line . * @ returna value */ int numberOfSpaces ( ) { } }
String string = toString ( ) ; int length = string . length ( ) ; int numberOfSpaces = 0 ; for ( int i = 0 ; i < length ; i ++ ) { if ( string . charAt ( i ) == ' ' ) { numberOfSpaces ++ ; } } return numberOfSpaces ;
public class HttpInboundServiceContextImpl { /** * Retrieve the next buffer of the request message ' s body . This will give * the buffer without any modifications , avoiding decompression or chunked * encoding removal . * A null buffer will be returned if there is no more data to get . * The caller is responsible for releasing these buffers when complete as the * HTTP Channel does not keep track of them . * @ return WsByteBuffer * @ throws IOException * - - if a socket exceptions happens * @ throws IllegalHttpBodyException * - - if a malformed request body is * present such that the server should send an HTTP 400 Bad Request * back to the client . */ @ Override public WsByteBuffer getRawRequestBodyBuffer ( ) throws IOException , IllegalHttpBodyException { } }
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . entry ( tc , "getRawRequestBodyBuffer(sync)" ) ; } setRawBody ( true ) ; WsByteBuffer buffer = getRequestBodyBuffer ( ) ; if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) { Tr . exit ( tc , "getRawRequestBodyBuffer(sync): " + buffer ) ; } return buffer ;
public class Common { /** * get the component ' s configuration */ public static Map getComponentMap ( DefaultTopologyAssignContext context , Integer task ) { } }
String componentName = context . getTaskToComponent ( ) . get ( task ) ; ComponentCommon componentCommon = ThriftTopologyUtils . getComponentCommon ( context . getSysTopology ( ) , componentName ) ; Map componentMap = ( Map ) JStormUtils . from_json ( componentCommon . get_json_conf ( ) ) ; if ( componentMap == null ) { componentMap = Maps . newHashMap ( ) ; } return componentMap ;
public class AbstractJavaMetadata { /** * Process a { @ link FieldDeclaration } to win information for an array type . * @ param fieldDeclaration - field declaration * @ return an ArrayTypeFieldMetadata , that contains information about an array type . */ protected FieldMetadata processArrayTypeFrom ( FieldDeclaration fieldDeclaration ) { } }
ArrayType arrayType = ( ArrayType ) fieldDeclaration . getType ( ) ; FieldMetadata arrayTypeFieldMetadata = null ; // the element type is never an array type Type type = arrayType . getElementType ( ) ; if ( type . isPrimitiveType ( ) ) { PrimitiveType primitiveType = ( PrimitiveType ) type ; arrayTypeFieldMetadata = FieldMetadata . arrayType ( primitiveType . getPrimitiveTypeCode ( ) . toString ( ) ) ; processModifiersAndVariablesOfFieldDeclaration ( fieldDeclaration , arrayTypeFieldMetadata ) ; arrayTypeFieldMetadata . setName ( getFieldName ( fieldDeclaration ) ) ; return arrayTypeFieldMetadata ; } // can ' t be an array type if ( type . isSimpleType ( ) ) { SimpleType simpleType = ( SimpleType ) type ; arrayTypeFieldMetadata = FieldMetadata . arrayType ( JavaMetadataUtil . getName ( simpleType . getName ( ) ) ) ; processModifiersAndVariablesOfFieldDeclaration ( fieldDeclaration , arrayTypeFieldMetadata ) ; arrayTypeFieldMetadata . setName ( getFieldName ( fieldDeclaration ) ) ; return arrayTypeFieldMetadata ; } return null ;
public class Handler { /** * Detect if the handler is a MiddlewareHandler instance . If yes , then register it . * @ param handler */ private static void registerMiddlewareHandler ( Object handler ) { } }
if ( handler instanceof MiddlewareHandler ) { // register the middleware handler if it is enabled . if ( ( ( MiddlewareHandler ) handler ) . isEnabled ( ) ) { ( ( MiddlewareHandler ) handler ) . register ( ) ; } }
public class NDArrayIndex { /** * Generates an interval from begin ( inclusive ) to end ( exclusive ) * @ param begin the begin * @ param stride the stride at which to increment * @ param end the end index * @ param inclusive whether the end should be inclusive or not * @ return the interval */ public static INDArrayIndex interval ( int begin , int stride , int end , boolean inclusive ) { } }
Preconditions . checkArgument ( begin <= end , "Beginning index (%s) in range must be less than or equal to end (%s)" , begin , end ) ; INDArrayIndex index = new IntervalIndex ( inclusive , stride ) ; index . init ( begin , end ) ; return index ;
public class LocationReferenceCheck { /** * Setter . * @ param pMethodCalls list of qualified method calls to cover */ public void setMethodCalls ( final String ... pMethodCalls ) { } }
final Set < String > newMethodCalls = new HashSet < > ( ) ; Collections . addAll ( newMethodCalls , pMethodCalls ) ; methodCalls = Collections . unmodifiableSet ( newMethodCalls ) ;
public class Matrix4d { /** * / * ( non - Javadoc ) * @ see org . joml . Matrix4dc # projectedGridRange ( org . joml . Matrix4dc , double , double , org . joml . Matrix4d ) */ public Matrix4d projectedGridRange ( Matrix4dc projector , double sLower , double sUpper , Matrix4d dest ) { } }
// Compute intersection with frustum edges and plane double minX = Double . POSITIVE_INFINITY , minY = Double . POSITIVE_INFINITY ; double maxX = Double . NEGATIVE_INFINITY , maxY = Double . NEGATIVE_INFINITY ; boolean intersection = false ; for ( int t = 0 ; t < 3 * 4 ; t ++ ) { double c0X , c0Y , c0Z ; double c1X , c1Y , c1Z ; if ( t < 4 ) { // all x edges c0X = - 1 ; c1X = + 1 ; c0Y = c1Y = ( ( t & 1 ) << 1 ) - 1.0 ; c0Z = c1Z = ( ( ( t >>> 1 ) & 1 ) << 1 ) - 1.0 ; } else if ( t < 8 ) { // all y edges c0Y = - 1 ; c1Y = + 1 ; c0X = c1X = ( ( t & 1 ) << 1 ) - 1.0 ; c0Z = c1Z = ( ( ( t >>> 1 ) & 1 ) << 1 ) - 1.0 ; } else { // all z edges c0Z = - 1 ; c1Z = + 1 ; c0X = c1X = ( ( t & 1 ) << 1 ) - 1.0 ; c0Y = c1Y = ( ( ( t >>> 1 ) & 1 ) << 1 ) - 1.0 ; } // unproject corners double invW = 1.0 / ( m03 * c0X + m13 * c0Y + m23 * c0Z + m33 ) ; double p0x = ( m00 * c0X + m10 * c0Y + m20 * c0Z + m30 ) * invW ; double p0y = ( m01 * c0X + m11 * c0Y + m21 * c0Z + m31 ) * invW ; double p0z = ( m02 * c0X + m12 * c0Y + m22 * c0Z + m32 ) * invW ; invW = 1.0 / ( m03 * c1X + m13 * c1Y + m23 * c1Z + m33 ) ; double p1x = ( m00 * c1X + m10 * c1Y + m20 * c1Z + m30 ) * invW ; double p1y = ( m01 * c1X + m11 * c1Y + m21 * c1Z + m31 ) * invW ; double p1z = ( m02 * c1X + m12 * c1Y + m22 * c1Z + m32 ) * invW ; double dirX = p1x - p0x ; double dirY = p1y - p0y ; double dirZ = p1z - p0z ; double invDenom = 1.0 / dirY ; // test for intersection for ( int s = 0 ; s < 2 ; s ++ ) { double isectT = - ( p0y + ( s == 0 ? sLower : sUpper ) ) * invDenom ; if ( isectT >= 0.0 && isectT <= 1.0 ) { intersection = true ; // project with projector matrix double ix = p0x + isectT * dirX ; double iz = p0z + isectT * dirZ ; invW = 1.0 / ( projector . m03 ( ) * ix + projector . m23 ( ) * iz + projector . m33 ( ) ) ; double px = ( projector . m00 ( ) * ix + projector . m20 ( ) * iz + projector . m30 ( ) ) * invW ; double py = ( projector . m01 ( ) * ix + projector . m21 ( ) * iz + projector . m31 ( ) ) * invW ; minX = minX < px ? minX : px ; minY = minY < py ? minY : py ; maxX = maxX > px ? maxX : px ; maxY = maxY > py ? maxY : py ; } } } if ( ! intersection ) return null ; // < - projected grid is not visible dest . set ( maxX - minX , 0 , 0 , 0 , 0 , maxY - minY , 0 , 0 , 0 , 0 , 1 , 0 , minX , minY , 0 , 1 ) ; dest . properties = PROPERTY_AFFINE ; return dest ;
public class HttpRequest { /** * 获取请求URL最后的一个 / 后面的部分的short值 < br > * 例如请求URL / pipes / record / query / 2 < br > * 获取type参数 : short type = request . getRequstURILastPath ( ( short ) 0 ) ; / / type = 2 * @ param defvalue 默认short值 * @ return short值 */ public short getRequstURILastPath ( short defvalue ) { } }
String val = getRequstURILastPath ( ) ; if ( val . isEmpty ( ) ) return defvalue ; try { return Short . parseShort ( val ) ; } catch ( NumberFormatException e ) { return defvalue ; }
public class Cron4jJob { @ Override public synchronized void reschedule ( String cronExp , VaryingCronOpCall opLambda ) { } }
verifyCanRescheduleState ( ) ; assertArgumentNotNull ( "cronExp" , cronExp ) ; assertArgumentNotNull ( "opLambda" , opLambda ) ; if ( isNonCromExp ( cronExp ) ) { throw new IllegalArgumentException ( "The cronExp for reschedule() should not be non-cron: " + toString ( ) ) ; } if ( unscheduled ) { unscheduled = false ; // can revive from unscheduled } final String existingCronExp = cron4jTask . getVaryingCron ( ) . getCronExp ( ) ; cron4jTask . switchCron ( cronExp , createCronOption ( opLambda ) ) ; final Cron4jScheduler cron4jScheduler = cron4jNow . getCron4jScheduler ( ) ; cron4jId . ifPresent ( id -> { if ( JobChangeLog . isEnabled ( ) ) { JobChangeLog . log ( "#job ...Rescheduling {} as cron from '{}' to '{}'" , jobKey , existingCronExp , cronExp ) ; } if ( isNativeScheduledId ( cron4jScheduler , id ) ) { cron4jScheduler . reschedule ( id , cronExp ) ; } else { // after descheduled cron4jId = scheduleNative ( cronExp , cron4jScheduler ) ; } } ) . orElse ( ( ) -> { if ( JobChangeLog . isEnabled ( ) ) { JobChangeLog . log ( "#job ...Rescheduling {} as cron from non-cron to '{}'" , jobKey , cronExp ) ; } cron4jId = scheduleNative ( cronExp , cron4jScheduler ) ; } ) ;
public class ProgrammaticWrappingProxyInstaller { /** * Wrap a annotation type element doc . * @ param source the source * @ return the wrapper . */ public AnnotationTypeElementDoc wrap ( AnnotationTypeElementDoc source ) { } }
if ( source == null || source instanceof Proxy < ? > || ! ( source instanceof AnnotationTypeElementDocImpl ) ) { return source ; } return new AnnotationTypeElementDocWrapper ( ( AnnotationTypeElementDocImpl ) source ) ;
public class NullabilityUtil { /** * finds the symbol for the top - level class containing the given symbol * @ param symbol the given symbol * @ return symbol for the non - nested enclosing class */ public static Symbol . ClassSymbol getOutermostClassSymbol ( Symbol symbol ) { } }
// get the symbol for the outermost enclosing class . this handles // the case of anonymous classes Symbol . ClassSymbol outermostClassSymbol = ASTHelpers . enclosingClass ( symbol ) ; while ( outermostClassSymbol . getNestingKind ( ) . isNested ( ) ) { Symbol . ClassSymbol enclosingSymbol = ASTHelpers . enclosingClass ( outermostClassSymbol . owner ) ; if ( enclosingSymbol != null ) { outermostClassSymbol = enclosingSymbol ; } else { // enclosingSymbol can be null in weird cases like for array methods break ; } } return outermostClassSymbol ;
public class DeviceDefinitionVersion { /** * A list of devices in the definition version . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setDevices ( java . util . Collection ) } or { @ link # withDevices ( java . util . Collection ) } if you want to override * the existing values . * @ param devices * A list of devices in the definition version . * @ return Returns a reference to this object so that method calls can be chained together . */ public DeviceDefinitionVersion withDevices ( Device ... devices ) { } }
if ( this . devices == null ) { setDevices ( new java . util . ArrayList < Device > ( devices . length ) ) ; } for ( Device ele : devices ) { this . devices . add ( ele ) ; } return this ;
public class JUnit4 { /** * Adds a classpath source which contains the given resource . * TODO : [ GH - 213 ] this is extremely ugly ; separate the code required to run on the * forked JVM into an isolated bundle and either create it on - demand ( in temp . * files location ? ) or locate it in classpath somehow ( in a portable way ) . */ private org . apache . tools . ant . types . Path addSlaveClasspath ( ) { } }
org . apache . tools . ant . types . Path path = new org . apache . tools . ant . types . Path ( getProject ( ) ) ; String [ ] REQUIRED_SLAVE_CLASSES = { SlaveMain . class . getName ( ) , Strings . class . getName ( ) , MethodGlobFilter . class . getName ( ) , TeeOutputStream . class . getName ( ) } ; for ( String clazz : Arrays . asList ( REQUIRED_SLAVE_CLASSES ) ) { String resource = clazz . replace ( "." , "/" ) + ".class" ; File f = LoaderUtils . getResourceSource ( getClass ( ) . getClassLoader ( ) , resource ) ; if ( f != null ) { path . createPath ( ) . setLocation ( f ) ; } else { throw new BuildException ( "Could not locate classpath for resource: " + resource ) ; } } return path ;
public class LocalJcrDriver { /** * Get the information describing the connection . This method can be overridden to return a subclass of { @ link ConnectionInfo } * that implements { @ link ConnectionInfo # getCredentials ( ) } for a specific JCR implementation . * @ param url the JDBC URL * @ param info the JDBC connection properties * @ return the connection information , or null if the URL is null or not of the proper format * @ throws SQLException */ protected ConnectionInfo createConnectionInfo ( String url , Properties info ) throws SQLException { } }
RepositoryDelegate repositoryDelegate = delegateFactory . createRepositoryDelegate ( url , info , this . contextFactory ) ; return repositoryDelegate . getConnectionInfo ( ) ;
public class Utils { /** * Indicate if the given string matches the given pattern , which can contain ' * ' * and / or ' ? ' wildcards . The strings are compared case - insensitive . If the given * string is null or empty , false is returned . The given pattern must have a value . * @ param strIn String to be tested . * @ param patternIn Pattern to be matched . * @ return True if the string matches the pattern . * @ throws IllegalArgumentException If the given pattern is null or empty . */ public static boolean matchesPattern ( String strIn , String patternIn ) throws IllegalArgumentException { } }
if ( patternIn == null || patternIn . length ( ) == 0 ) { throw new IllegalArgumentException ( ) ; } // If the test string is empty , we say that it doesn ' t match the given pattern . if ( strIn == null || strIn . length ( ) == 0 ) { return false ; } // Upcase both strings so that we perform a case - insensitive comparison . String str = strIn . toUpperCase ( ) ; String pattern = patternIn . toUpperCase ( ) ; // Move through string as it matches pattern . int strInx = 0 ; int patInx = 0 ; while ( strInx < str . length ( ) ) { // Did we consume all pattern chars ? if ( patInx >= pattern . length ( ) ) { // Pattern ended but more chars in string return false ; } if ( pattern . charAt ( patInx ) == '*' ) { // Multi - char wildcard ; start by skipping all next wildcard chars do patInx ++ ; while ( patInx < pattern . length ( ) && isWildcardChar ( pattern . charAt ( patInx ) ) ) ; if ( patInx >= pattern . length ( ) ) { // Rest of pattern was wildcards ; string is considered matched . return true ; } // See if string contains the current non - wildcard pattern char subset boolean bSubsetMatched = false ; int strStartInx = strInx ; do { // Skip to next string char that matches current char in pattern strInx = strStartInx ; while ( strInx < str . length ( ) && str . charAt ( strInx ) != pattern . charAt ( patInx ) ) { strInx ++ ; } if ( strInx >= str . length ( ) ) { // Hit end of string without finding a match . return false ; } // See how far string and pattern characters match . int subPatInx = patInx ; do { // Current string and subset chars match ; skip both . subPatInx ++ ; strInx ++ ; } while ( strInx < str . length ( ) && subPatInx < pattern . length ( ) && pattern . charAt ( subPatInx ) != '*' && ( str . charAt ( strInx ) == pattern . charAt ( subPatInx ) || pattern . charAt ( subPatInx ) == '?' ) ) ; if ( ( subPatInx >= pattern . length ( ) && strInx >= str . length ( ) ) || ( subPatInx < pattern . length ( ) && pattern . charAt ( subPatInx ) == '*' ) ) { // String matched pattern subset ( * ) or entire rest of pattern . bSubsetMatched = true ; patInx = subPatInx ; } else { strStartInx ++ ; } } while ( ! bSubsetMatched ) ; } else if ( pattern . charAt ( patInx ) == '?' || str . charAt ( strInx ) == pattern . charAt ( patInx ) ) { // single char matched ; advance to next char strInx ++ ; patInx ++ ; } else { return false ; // String char didn ' t match pattern char } } // If we get here , we hit the end of string ; it matches the pattern if the // rest of the pattern consists only of ' * ' while ( patInx < pattern . length ( ) && pattern . charAt ( patInx ) == '*' ) { patInx ++ ; } return patInx >= pattern . length ( ) ;
public class ProfileNode { /** * Returns a sorted enumerable of the nodes in descending order of * how long they took to run . */ public Iterable < Entry < String , ProfileNode > > getDescendingOrderedNodes ( ) { } }
if ( nodes == null ) return null ; List < Entry < String , ProfileNode > > averageStepTimes = new LinkedList < Entry < String , ProfileNode > > ( nodes . entrySet ( ) ) ; Collections . sort ( averageStepTimes , new Comparator < Entry < String , ProfileNode > > ( ) { public int compare ( Entry < String , ProfileNode > o1 , Entry < String , ProfileNode > o2 ) { return ( int ) ( o1 . getValue ( ) . totalMillisecs - o2 . getValue ( ) . totalMillisecs ) ; } } ) ; return averageStepTimes ;
public class ClassUtils { /** * Returns the given Array in a string representation . Even though the * parameter is of type " Object " one can hand over primitve arrays , e . g . * int [ 3 ] or double [ 2 ] [ 4 ] . * @ param array the array to return in a string representation * @ param outputClasswhether to output the class name instead of calling * the object ' s " toString ( ) " method * @ return the array as string */ public static String arrayToString ( Object array , boolean outputClass ) { } }
StringBuilder result ; int dimensions ; int i ; Object obj ; result = new StringBuilder ( ) ; dimensions = getArrayDimensions ( array ) ; if ( dimensions == 0 ) { result . append ( "null" ) ; } else if ( dimensions == 1 ) { for ( i = 0 ; i < Array . getLength ( array ) ; i ++ ) { if ( i > 0 ) result . append ( "," ) ; if ( Array . get ( array , i ) == null ) { result . append ( "null" ) ; } else { obj = Array . get ( array , i ) ; if ( outputClass ) { if ( obj instanceof Class ) result . append ( ( ( Class ) obj ) . getName ( ) ) ; else result . append ( obj . getClass ( ) . getName ( ) ) ; } else { result . append ( obj . toString ( ) ) ; } } } } else { for ( i = 0 ; i < Array . getLength ( array ) ; i ++ ) { if ( i > 0 ) result . append ( "," ) ; result . append ( "[" + arrayToString ( Array . get ( array , i ) ) + "]" ) ; } } return result . toString ( ) ;
public class Event { /** * Util method to create a string representation of a 32 bit integer representing * an IPv4 address . * @ param address The address , MSB first . * @ return The IP address string . */ public static String formatAddress ( int address ) { } }
int b1 = ( address >>> 24 ) & 0xff ; int b2 = ( address >>> 16 ) & 0xff ; int b3 = ( address >>> 8 ) & 0xff ; int b4 = address & 0xff ; return "" + b1 + '.' + b2 + '.' + b3 + '.' + b4 ;
public class CmsShellCommands { /** * Sets the login message . < p > * @ param messageText the message text * @ param loginDisabled true if login should be disabled * @ throws CmsRoleViolationException when this is not called with the correct privileges */ public void setLoginMessage ( String messageText , boolean loginDisabled ) throws CmsRoleViolationException { } }
CmsLoginMessage message = new CmsLoginMessage ( 0 , 0 , messageText , loginDisabled ) ; OpenCms . getLoginManager ( ) . setLoginMessage ( m_cms , message ) ; OpenCms . writeConfiguration ( CmsVariablesConfiguration . class ) ;
public class Mutator { /** * Creates a new schema which has exactly the same fields as the input Schema minus the field names * that are specified as " minusFields " . This is equivalent to calling { @ link # subSetOf ( Schema , String . . . ) } * with the list of Fields that must remain , but instead here we specify the fields that should NOT remain . * The name of the schema is also specified as a parameter . */ public static Schema minusFields ( String newName , Schema schema , String ... minusFields ) { } }
List < Field > newSchema = new ArrayList < Field > ( ) ; l1 : for ( Field f : schema . getFields ( ) ) { for ( String minsField : minusFields ) { if ( f . getName ( ) . equals ( minsField ) ) { continue l1 ; } } newSchema . add ( f ) ; } return new Schema ( newName , newSchema ) ;
public class Feature { /** * Transform this object into a DTO feature . * @ return dto for this feature */ public org . geomajas . layer . feature . Feature toDto ( ) { } }
org . geomajas . layer . feature . Feature dto = new org . geomajas . layer . feature . Feature ( ) ; dto . setAttributes ( attributes ) ; dto . setClipped ( clipped ) ; dto . setId ( id ) ; dto . setGeometry ( GeometryConverter . toDto ( geometry ) ) ; dto . setCrs ( crs ) ; dto . setLabel ( getLabel ( ) ) ; return dto ;
public class BeanBoxUtils { /** * Get @ INJECT or @ POSTCONSTRUCT or @ PARAM or @ PREDESTROY or @ PROTOTYPE * annotation values , return Object [ 3 ] or null if no above annotations found */ private static Object [ ] getInjectAnnotationAsArray ( Object target , boolean allowSpringJsrAnno ) { } }
Annotation [ ] anno = getAnnotations ( target ) ; return getInjectAsArray ( anno , allowSpringJsrAnno ) ;
public class GlobalOperationClient { /** * Deletes the specified Operations resource . * < p > Sample code : * < pre > < code > * try ( GlobalOperationClient globalOperationClient = GlobalOperationClient . create ( ) ) { * ProjectGlobalOperationName operation = ProjectGlobalOperationName . of ( " [ PROJECT ] " , " [ OPERATION ] " ) ; * globalOperationClient . deleteGlobalOperation ( operation ) ; * < / code > < / pre > * @ param operation Name of the Operations resource to delete . * @ throws com . google . api . gax . rpc . ApiException if the remote call fails */ @ BetaApi public final void deleteGlobalOperation ( ProjectGlobalOperationName operation ) { } }
DeleteGlobalOperationHttpRequest request = DeleteGlobalOperationHttpRequest . newBuilder ( ) . setOperation ( operation == null ? null : operation . toString ( ) ) . build ( ) ; deleteGlobalOperation ( request ) ;
public class AbstractLayer { /** * Make the layer visible or invisible . * @ param visible * the visible to set * @ since 1.10.0 */ @ Api public void setVisible ( boolean visible ) { } }
if ( visible != this . visible ) { this . visible = visible ; updateShowing ( false ) ; handlerManager . fireEvent ( new LayerShownEvent ( this ) ) ; }
public class ApiOvhTelephony { /** * List all available offer changes compatibilities * REST : GET / telephony / { billingAccount } / service / { serviceName } / offerChanges * @ param billingAccount [ required ] The name of your billingAccount * @ param serviceName [ required ] */ public ArrayList < OvhLineOffer > billingAccount_service_serviceName_offerChanges_GET ( String billingAccount , String serviceName ) throws IOException { } }
String qPath = "/telephony/{billingAccount}/service/{serviceName}/offerChanges" ; StringBuilder sb = path ( qPath , billingAccount , serviceName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , t15 ) ;
public class GenericResponseBuilder { /** * Sets the response entity body on the builder . * < p > A specific media type can be set using the { @ code type ( . . . ) } methods . * @ param entity the response entity body * @ return this builder * @ see # type ( MediaType ) * @ see # type ( String ) */ public GenericResponseBuilder < T > entity ( T entity ) { } }
if ( hasErrorEntity ) { throw new IllegalStateException ( "errorEntity already set. Only one of entity and errorEntity may be set" ) ; } this . body = entity ; rawBuilder . entity ( entity ) ; return this ;
public class CmsObject { /** * Returns all relations for the given resource matching the given filter . < p > * You should have view / read permissions on the given resource . < p > * You may become source and / or target paths to resource you do not have view / read permissions on . < p > * @ param resource the resource to retrieve the relations for * @ param filter the filter to match the relation * @ return a List containing all { @ link org . opencms . relations . CmsRelation } * objects for the given resource matching the given filter * @ throws CmsException if something goes wrong * @ see CmsSecurityManager # getRelationsForResource ( CmsRequestContext , CmsResource , CmsRelationFilter ) */ public List < CmsRelation > getRelationsForResource ( CmsResource resource , CmsRelationFilter filter ) throws CmsException { } }
return m_securityManager . getRelationsForResource ( m_context , resource , filter ) ;
public class KeenClient { /** * Set the attempts Map in the eventStore * @ param projectId the project id * @ param eventCollection the collection name * @ param attempts the current attempts Map * @ throws IOException */ private void setAttemptsMap ( String projectId , String eventCollection , Map < String , Integer > attempts ) throws IOException { } }
if ( eventStore instanceof KeenAttemptCountingEventStore ) { KeenAttemptCountingEventStore res = ( KeenAttemptCountingEventStore ) eventStore ; StringWriter writer = null ; try { writer = new StringWriter ( ) ; jsonHandler . writeJson ( writer , attempts ) ; String attemptsJSON = writer . toString ( ) ; res . setAttempts ( projectId , eventCollection , attemptsJSON ) ; } finally { KeenUtils . closeQuietly ( writer ) ; } }
public class CommerceCountryPersistenceImpl { /** * Returns the commerce country where uuid = & # 63 ; and groupId = & # 63 ; or throws a { @ link NoSuchCountryException } if it could not be found . * @ param uuid the uuid * @ param groupId the group ID * @ return the matching commerce country * @ throws NoSuchCountryException if a matching commerce country could not be found */ @ Override public CommerceCountry findByUUID_G ( String uuid , long groupId ) throws NoSuchCountryException { } }
CommerceCountry commerceCountry = fetchByUUID_G ( uuid , groupId ) ; if ( commerceCountry == null ) { StringBundler msg = new StringBundler ( 6 ) ; msg . append ( _NO_SUCH_ENTITY_WITH_KEY ) ; msg . append ( "uuid=" ) ; msg . append ( uuid ) ; msg . append ( ", groupId=" ) ; msg . append ( groupId ) ; msg . append ( "}" ) ; if ( _log . isDebugEnabled ( ) ) { _log . debug ( msg . toString ( ) ) ; } throw new NoSuchCountryException ( msg . toString ( ) ) ; } return commerceCountry ;
public class HdfsUnderFileSystem { /** * Factory method to constructs a new HDFS { @ link UnderFileSystem } instance . * @ param ufsUri the { @ link AlluxioURI } for this UFS * @ param conf the configuration for Hadoop * @ param alluxioConf Alluxio configuration * @ return a new HDFS { @ link UnderFileSystem } instance */ public static HdfsUnderFileSystem createInstance ( AlluxioURI ufsUri , UnderFileSystemConfiguration conf , AlluxioConfiguration alluxioConf ) { } }
Configuration hdfsConf = createConfiguration ( conf ) ; return new HdfsUnderFileSystem ( ufsUri , conf , hdfsConf , alluxioConf ) ;
public class GlobalMetrics { /** * Thread safe created increment of counterName . ( Slow ) */ public static void safeIncrBy ( String counterName , int incVal ) { } }
org . apache . heron . api . metric . GlobalMetrics . safeIncrBy ( counterName , incVal ) ;
public class IdGenerator { /** * Generate a unique id attribute using the passed text as the seed value . The generated id complies with the XHTML * specification . Extract from < a href = " http : / / www . w3 . org / TR / xhtml1 / # C _ 8 " > XHTML RFC < / a > : * < code > When defining fragment identifiers to be backward - compatible , only strings matching the pattern * [ A - Za - z ] [ A - Za - z0-9 : _ . - ] * should be used . < / code > * @ param prefix the prefix of the identifier . Has to match [ a - zA - Z ] . * @ param text the text used to generate the unique id * @ return the unique id . For example " Hello world " will generate prefix + " Helloworld " . */ public String generateUniqueId ( String prefix , String text ) { } }
// Verify that the passed prefix contains only alpha characters since the generated id must be a valid HTML id . if ( StringUtils . isEmpty ( prefix ) || ! StringUtils . isAlpha ( prefix ) ) { throw new IllegalArgumentException ( "The prefix [" + prefix + "] should only contain alphanumerical characters and not be empty." ) ; } String idPrefix = prefix + normalizeId ( text ) ; int occurence = 0 ; String id = idPrefix ; while ( this . generatedIds . contains ( id ) ) { occurence ++ ; id = idPrefix + "-" + occurence ; } // Save the generated id so that the next call to this method will not generate the same id . this . generatedIds . add ( id ) ; return id ;
public class Cleaner { /** * Creates a new , clean document , from the original dirty document , containing only elements allowed by the whitelist . * The original document is not modified . Only elements from the dirt document ' s < code > body < / code > are used . * @ param dirtyDocument Untrusted base document to clean . * @ return cleaned document . */ public Document clean ( Document dirtyDocument ) { } }
Validate . notNull ( dirtyDocument ) ; Document clean = Document . createShell ( dirtyDocument . baseUri ( ) ) ; if ( dirtyDocument . body ( ) != null ) // frameset documents won ' t have a body . the clean doc will have empty body . copySafeNodes ( dirtyDocument . body ( ) , clean . body ( ) ) ; return clean ;
public class CmsCategorySelectDialog { /** * On OK click . < p > */ void onOk ( ) { } }
if ( ! m_selectionHandlers . isEmpty ( ) ) { Collection < CmsCategory > categories = m_tree . getSelectedCategories ( ) ; for ( I_CmsSelectionHandler < Collection < CmsCategory > > handler : m_selectionHandlers ) { handler . onSelection ( categories ) ; } }
public class RemoteMongoCollectionImpl { /** * Finds a document in the collection and performs the given update . * @ param filter the query filter * @ param update the update document * @ return the resulting document */ public DocumentT findOneAndUpdate ( final Bson filter , final Bson update ) { } }
return proxy . findOneAndUpdate ( filter , update ) ;
public class AllianceApi { /** * List alliance & # 39 ; s corporations ( asynchronously ) List all current member * corporations of an alliance - - - This route is cached for up to 3600 * seconds * @ param allianceId * An EVE alliance ID ( required ) * @ param datasource * The server name you would like data from ( optional , default to * tranquility ) * @ param ifNoneMatch * ETag from a previous request . A 304 will be returned if this * matches the current ETag ( optional ) * @ param callback * The callback to be executed when the API call finishes * @ return The request call * @ throws ApiException * If fail to process the API call , e . g . serializing the request * body object */ public com . squareup . okhttp . Call getAlliancesAllianceIdCorporationsAsync ( Integer allianceId , String datasource , String ifNoneMatch , final ApiCallback < List < Integer > > callback ) throws ApiException { } }
com . squareup . okhttp . Call call = getAlliancesAllianceIdCorporationsValidateBeforeCall ( allianceId , datasource , ifNoneMatch , callback ) ; Type localVarReturnType = new TypeToken < List < Integer > > ( ) { } . getType ( ) ; apiClient . executeAsync ( call , localVarReturnType , callback ) ; return call ;
public class ConfigImpl { /** * further down on the stack . */ static ConfigException . NotResolved improveNotResolved ( Path what , ConfigException . NotResolved original ) { } }
String newMessage = what . render ( ) + " has not been resolved, you need to call Config#resolve()," + " see API docs for Config#resolve()" ; if ( newMessage . equals ( original . getMessage ( ) ) ) return original ; else return new ConfigException . NotResolved ( newMessage , original ) ;
public class Bootstrap { /** * Connect a { @ link Channel } to the remote peer . */ public ChannelFuture connect ( SocketAddress remoteAddress ) { } }
if ( remoteAddress == null ) { throw new NullPointerException ( "remoteAddress" ) ; } validate ( ) ; return doResolveAndConnect ( remoteAddress , config . localAddress ( ) ) ;
public class SntpClient { /** * Sends an SNTP request to the given host and processes the response . * @ param host host name of the server . * @ param timeout network timeout in milliseconds . * @ return true if the transaction was successful . */ public boolean requestTime ( String host , int timeout ) { } }
DatagramSocket socket = null ; try { socket = new DatagramSocket ( ) ; socket . setSoTimeout ( timeout ) ; InetAddress address = InetAddress . getByName ( host ) ; byte [ ] buffer = new byte [ NTP_PACKET_SIZE ] ; DatagramPacket request = new DatagramPacket ( buffer , buffer . length , address , NTP_PORT ) ; // set mode = 3 ( client ) and version = 3 // mode is in low 3 bits of first byte // version is in bits 3-5 of first byte buffer [ 0 ] = NTP_MODE_CLIENT | ( NTP_VERSION << 3 ) ; // get current time and write it to the request packet long requestTime = System . currentTimeMillis ( ) ; long requestTicks = SystemClock . elapsedRealtime ( ) ; writeTimeStamp ( buffer , TRANSMIT_TIME_OFFSET , requestTime ) ; socket . send ( request ) ; // read the response DatagramPacket response = new DatagramPacket ( buffer , buffer . length ) ; socket . receive ( response ) ; long responseTicks = SystemClock . elapsedRealtime ( ) ; long responseTime = requestTime + ( responseTicks - requestTicks ) ; // extract the results long originateTime = readTimeStamp ( buffer , ORIGINATE_TIME_OFFSET ) ; long receiveTime = readTimeStamp ( buffer , RECEIVE_TIME_OFFSET ) ; long transmitTime = readTimeStamp ( buffer , TRANSMIT_TIME_OFFSET ) ; long roundTripTime = responseTicks - requestTicks - ( transmitTime - receiveTime ) ; // receiveTime = originateTime + transit + skew // responseTime = transmitTime + transit - skew // clockOffset = ( ( receiveTime - originateTime ) + ( transmitTime - responseTime ) ) / 2 // = ( ( originateTime + transit + skew - originateTime ) + // ( transmitTime - ( transmitTime + transit - skew ) ) ) / 2 // = ( ( transit + skew ) + ( transmitTime - transmitTime - transit + skew ) ) / 2 // = ( transit + skew - transit + skew ) / 2 // = ( 2 * skew ) / 2 = skew mClockOffset = ( ( receiveTime - originateTime ) + ( transmitTime - responseTime ) ) / 2 ; // if ( false ) Log . d ( TAG , " round trip : " + roundTripTime + " ms " ) ; // if ( false ) Log . d ( TAG , " clock offset : " + clockOffset + " ms " ) ; // save our results - use the times on this side of the network latency // ( response rather than request time ) mNtpTime = responseTime + mClockOffset ; mNtpTimeReference = responseTicks ; mRoundTripTime = roundTripTime ; } catch ( Exception e ) { return false ; } finally { if ( socket != null ) { socket . close ( ) ; } } return true ;
public class TracerFactory { /** * Closes all pooled tracers . */ public void closePoolTracer ( ) { } }
this . poolWriteLock . lock ( ) ; try { for ( AbstractTracer tracer : this . tracerPool . values ( ) ) { tracer . close ( ) ; } } finally { this . poolWriteLock . unlock ( ) ; }
public class AbstractMetaCache { /** * Caches the given object using the given Identity as key * @ param oid The Identity key * @ param obj The object o cache */ public void cache ( Identity oid , Object obj ) { } }
if ( oid != null && obj != null ) { ObjectCache cache = getCache ( oid , obj , METHOD_CACHE ) ; if ( cache != null ) { cache . cache ( oid , obj ) ; } }
public class GroupReduceDriver { @ Override public void prepare ( ) throws Exception { } }
TaskConfig config = this . taskContext . getTaskConfig ( ) ; if ( config . getDriverStrategy ( ) != DriverStrategy . SORTED_GROUP_REDUCE ) { throw new Exception ( "Unrecognized driver strategy for GroupReduce driver: " + config . getDriverStrategy ( ) . name ( ) ) ; } final Counter numRecordsIn = this . taskContext . getMetricGroup ( ) . getIOMetricGroup ( ) . getNumRecordsInCounter ( ) ; this . serializer = this . taskContext . < IT > getInputSerializer ( 0 ) . getSerializer ( ) ; this . comparator = this . taskContext . getDriverComparator ( 0 ) ; this . input = new CountingMutableObjectIterator < > ( this . taskContext . < IT > getInput ( 0 ) , numRecordsIn ) ; ExecutionConfig executionConfig = taskContext . getExecutionConfig ( ) ; this . objectReuseEnabled = executionConfig . isObjectReuseEnabled ( ) ; if ( LOG . isDebugEnabled ( ) ) { LOG . debug ( "GroupReduceDriver object reuse: " + ( this . objectReuseEnabled ? "ENABLED" : "DISABLED" ) + "." ) ; }
public class GetBranchRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( GetBranchRequest getBranchRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( getBranchRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( getBranchRequest . getRepositoryName ( ) , REPOSITORYNAME_BINDING ) ; protocolMarshaller . marshall ( getBranchRequest . getBranchName ( ) , BRANCHNAME_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class FlowTypeCheck { /** * Extract the element type from a reference . The array type can be null if some * earlier part of type checking generated an error message and we are just * continuing after that . * @ param type * @ param item * @ return */ public SemanticType extractElementType ( SemanticType . Reference type , SyntacticItem item ) { } }
if ( type == null ) { return null ; } else { return type . getElement ( ) ; }
public class DnsCacheManipulator { /** * Set a < b > never expired < / b > dns cache entry * @ param host host * @ param ips ips * @ throws DnsCacheManipulatorException Operation fail * @ see DnsCacheManipulator # setDnsCache ( long , java . lang . String , java . lang . String . . . ) */ public static void setDnsCache ( String host , String ... ips ) { } }
try { InetAddressCacheUtil . setInetAddressCache ( host , ips , NEVER_EXPIRATION ) ; } catch ( Exception e ) { final String message = String . format ( "Fail to setDnsCache for host %s ip %s, cause: %s" , host , Arrays . toString ( ips ) , e . toString ( ) ) ; throw new DnsCacheManipulatorException ( message , e ) ; }
public class InjectiveVar2VarSubstitutionImpl { /** * More efficient implementation */ @ Override public < T extends ImmutableTerm > Optional < ImmutableSubstitution < T > > applyToSubstitution ( ImmutableSubstitution < T > substitution ) { } }
return Optional . of ( applyRenaming ( substitution ) ) ;
public class DfState { /** * Pops a stack frame until a the query / mol atom pairing is unmapped * or we reach the bottom of the stack */ private void backtrack ( ) { } }
IAtom qatom = stack [ sptr ] . atom ; -- sptr ; if ( qatom != null ) { -- numMapped ; avisit [ amap [ qatom . getIndex ( ) ] ] = false ; amap [ qatom . getIndex ( ) ] = UNMAPPED ; } else if ( sptr != 0 ) { backtrack ( ) ; }
public class PipedInputStream { /** * Receives a byte and stores it in this stream ' s { @ code buffer } . This * method is called by { @ link PipedOutputStream # write ( int ) } . The least * significant byte of the integer { @ code oneByte } is stored at index * { @ code in } in the { @ code buffer } . * This method blocks as long as { @ code buffer } is full . * @ param oneByte * the byte to store in this pipe . * @ throws InterruptedIOException * if the { @ code buffer } is full and the thread that has called * this method is interrupted . * @ throws IOException * if this stream is closed or the thread that has last read * from this stream is no longer alive . */ protected synchronized void receive ( int oneByte ) throws IOException { } }
if ( buffer == null || isClosed ) { throw new IOException ( "Pipe is closed" ) ; } if ( lastReader != null && ! lastReader . isAlive ( ) ) { throw new IOException ( "Pipe broken" ) ; } /* * Set the last thread to be writing on this PipedInputStream . If * lastWriter dies while someone is waiting to read an IOException of * " Pipe broken " will be thrown in read ( ) */ lastWriter = Thread . currentThread ( ) ; try { while ( buffer != null && out == in ) { if ( lastReader != null && ! lastReader . isAlive ( ) ) { throw new IOException ( "Pipe broken" ) ; } notifyAll ( ) ; wait ( 1000 ) ; } } catch ( InterruptedException e ) { IoUtils . throwInterruptedIoException ( ) ; } if ( buffer == null ) { throw new IOException ( "Pipe is closed" ) ; } if ( in == - 1 ) { in = 0 ; } buffer [ in ++ ] = ( byte ) oneByte ; if ( in == buffer . length ) { in = 0 ; } // let blocked readers read the newly available data notifyAll ( ) ;
public class SimpleScheduleBuilder { /** * Build the actual Trigger - - NOT intended to be invoked by end users , but * will rather be invoked by a TriggerBuilder which this ScheduleBuilder is * given to . * @ see TriggerBuilder # withSchedule ( IScheduleBuilder ) */ @ Override @ Nonnull public SimpleTrigger build ( ) { } }
final SimpleTrigger ret = new SimpleTrigger ( ) ; ret . setRepeatInterval ( m_nInterval ) ; ret . setRepeatCount ( m_nRepeatCount ) ; ret . setMisfireInstruction ( m_nMisfireInstruction ) ; return ret ;
public class ListGroupCertificateAuthoritiesResult { /** * A list of certificate authorities associated with the group . * < b > NOTE : < / b > This method appends the values to the existing list ( if any ) . Use * { @ link # setGroupCertificateAuthorities ( java . util . Collection ) } or * { @ link # withGroupCertificateAuthorities ( java . util . Collection ) } if you want to override the existing values . * @ param groupCertificateAuthorities * A list of certificate authorities associated with the group . * @ return Returns a reference to this object so that method calls can be chained together . */ public ListGroupCertificateAuthoritiesResult withGroupCertificateAuthorities ( GroupCertificateAuthorityProperties ... groupCertificateAuthorities ) { } }
if ( this . groupCertificateAuthorities == null ) { setGroupCertificateAuthorities ( new java . util . ArrayList < GroupCertificateAuthorityProperties > ( groupCertificateAuthorities . length ) ) ; } for ( GroupCertificateAuthorityProperties ele : groupCertificateAuthorities ) { this . groupCertificateAuthorities . add ( ele ) ; } return this ;
public class EntityInfo { /** * 获取查询字段列表 * @ param tabalis 表别名 * @ param selects 过滤字段 * @ return String */ public CharSequence getQueryColumns ( String tabalis , SelectColumn selects ) { } }
if ( selects == null ) return tabalis == null ? "*" : ( tabalis + ".*" ) ; StringBuilder sb = new StringBuilder ( ) ; for ( Attribute attr : this . attributes ) { if ( ! selects . test ( attr . field ( ) ) ) continue ; if ( sb . length ( ) > 0 ) sb . append ( ',' ) ; sb . append ( getSQLColumn ( tabalis , attr . field ( ) ) ) ; } if ( sb . length ( ) == 0 ) sb . append ( '*' ) ; return sb ;
public class CliArgumentContainer { /** * This method gets the { @ link CliArgument # id ( ) ID } of the { @ link CliArgument argument } . * @ return the { @ link CliArgument # id ( ) ID } or the { @ link CliArgument # name ( ) name } if ID is not set . */ public String getId ( ) { } }
String id = this . argument . id ( ) ; if ( ( id == null ) || ( id . length ( ) == 0 ) ) { id = this . argument . name ( ) ; } return id ;
public class AuditAnnotationAttributes { /** * Gets the selection . * @ param method * the method * @ return the selection */ public SelectionType getSelection ( final Method method ) { } }
final Annotation [ ] annotations = method . getAnnotations ( ) ; return getSelection ( annotations ) ;
public class BTreePage { /** * Inserts the new PageEntry instances at the given offsets . * @ param ci A { @ link ChangeInfo } object containing information to change . * @ return A new BTreePage instance with the updated contents . */ private BTreePage applyInsertsAndRemovals ( ChangeInfo ci ) { } }
int newCount = getCount ( ) + ci . insertCount - ci . deleteCount ; // Allocate new buffer of the correct size and start copying from the old one . val newPage = new BTreePage ( this . config , new ByteArraySegment ( new byte [ DATA_OFFSET + newCount * this . config . entryLength + FOOTER_LENGTH ] ) , false ) ; newPage . formatHeaderAndFooter ( newCount , getHeaderId ( ) ) ; int readIndex = 0 ; int writeIndex = 0 ; for ( val e : ci . changes ) { int entryIndex = e . getKey ( ) * this . config . entryLength ; if ( entryIndex > readIndex ) { // Copy from source . int length = entryIndex - readIndex ; assert length % this . config . entryLength == 0 ; newPage . data . copyFrom ( this . data , readIndex , writeIndex , length ) ; writeIndex += length ; } // Write new Entry . PageEntry entryContents = e . getValue ( ) ; readIndex = entryIndex ; if ( entryContents != null ) { // Insert new PageEntry . newPage . setEntryAtIndex ( writeIndex , entryContents ) ; writeIndex += this . config . entryLength ; } else { // This PageEntry has been deleted . Skip over it . readIndex += this . config . getEntryLength ( ) ; } } if ( readIndex < this . data . getLength ( ) ) { // Copy the last part that we may have missed . int length = this . data . getLength ( ) - readIndex ; newPage . data . copyFrom ( this . data , readIndex , writeIndex , length ) ; } return newPage ;
public class LocalTaskExecutorService { /** * TODO : should we index this predicate somehow ? * @ param predicate * @ return */ public Map < G , Integer > getGroupSizes ( Predicate < G > predicate ) { } }
Timer . Context ctx = getGroupSizesTimer . time ( ) ; try { return this . taskQueue . getGroupSizes ( predicate ) ; } finally { ctx . stop ( ) ; }
public class RevisionEncoder { /** * Encodes a Cut operation . * @ param part * Reference to the Cut operation * @ throws EncodingException * if the encoding failed */ private void encodeCut ( final DiffPart part ) throws EncodingException { } }
data . writeBit ( 1 ) ; data . writeBit ( 0 ) ; data . writeBit ( 1 ) ; data . writeValue ( codecData . getBlocksizeS ( ) , part . getStart ( ) ) ; data . writeValue ( codecData . getBlocksizeE ( ) , part . getLength ( ) ) ; data . writeValue ( codecData . getBlocksizeB ( ) , Integer . parseInt ( part . getText ( ) ) ) ; data . writeFillBits ( ) ;
public class WikipediaTemplateInfo { /** * Returns a list containing the ids of all pages that contain a template * the name of which equals any of the given Strings . * @ param templateNames * the names of the template that we want to match * @ return A list with the ids of all pages that contain any of the the * specified templates * @ throws WikiApiException * If there was any error retrieving the page object ( most * likely if the templates are corrupted ) */ public List < Integer > getPageIdsContainingTemplateNames ( List < String > templateNames ) throws WikiApiException { } }
return getFilteredPageIds ( templateNames , true ) ;
public class WebAuthenticatorProxy { /** * Create an instance of BasicAuthAuthenticator . * @ return A BasicAuthAuthenticator or { @ code null } if the it could not be created . */ public BasicAuthAuthenticator getBasicAuthAuthenticator ( ) { } }
try { return createBasicAuthenticator ( ) ; } catch ( RegistryException e ) { if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isDebugEnabled ( ) ) { Tr . debug ( tc , "RegistryException while trying to create BasicAuthAuthenticator" , e ) ; } } return null ;
public class ManagedClustersInner { /** * Reset AAD Profile of a managed cluster . * Update the AAD Profile for a managed cluster . * @ param resourceGroupName The name of the resource group . * @ param resourceName The name of the managed cluster resource . * @ param parameters Parameters supplied to the Reset AAD Profile operation for a Managed Cluster . * @ throws IllegalArgumentException thrown if parameters fail the validation * @ throws CloudException thrown if the request is rejected by server * @ throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void resetAADProfile ( String resourceGroupName , String resourceName , ManagedClusterAADProfile parameters ) { } }
resetAADProfileWithServiceResponseAsync ( resourceGroupName , resourceName , parameters ) . toBlocking ( ) . last ( ) . body ( ) ;
public class JcrTools { /** * Register new mixin type if does not exists on workspace the add it to the given node * @ param session the JCR session * @ param node the node where to add the mixin * @ param mixin the mixin name to add * @ throws RepositoryException */ public static void registerAndAddMixinType ( Session session , Node node , String mixin ) throws RepositoryException { } }
registerMixinType ( session , mixin ) ; if ( ! Arrays . asList ( node . getMixinNodeTypes ( ) ) . contains ( mixin ) ) { node . addMixin ( mixin ) ; }
public class Cells { /** * Returns the { @ code String } value of the { @ link Cell } ( associated to { @ code table } ) whose name iscellName , or null * if this Cells object contains no cell whose name is cellName . * @ param nameSpace the name of the owning table * @ param cellName the name of the Cell we want to retrieve from this Cells object . * @ return the { @ code String } value of the { @ link Cell } ( associated to { @ code table } ) whose name is cellName , or * null if this Cells object contains no cell whose name is cellName */ public String getString ( String nameSpace , String cellName ) { } }
return getValue ( nameSpace , cellName , String . class ) ;
public class InjectorJobFactory { /** * { @ inheritDoc } */ public Job newJob ( TriggerFiredBundle bundle , Scheduler scheduler ) throws SchedulerException { } }
Class < ? extends Job > jobClass = bundle . getJobDetail ( ) . getJobClass ( ) ; return this . injector . getInstance ( jobClass ) ;
public class CacheImpl { /** * { @ inheritDoc } */ @ Override public boolean hasKey ( String scope , Object key ) throws InvalidScopeException , InternalCacheEngineException { } }
try { return cacheStoreAdapter . hasKey ( scope , key ) ; } catch ( InvalidScopeException ex ) { throw ex ; } catch ( Exception ex ) { throw new InternalCacheEngineException ( "Exception while checking if the scope " + scope + " has key " + key , ex ) ; }
public class ListQueryExecutionsRequestMarshaller { /** * Marshall the given parameter object . */ public void marshall ( ListQueryExecutionsRequest listQueryExecutionsRequest , ProtocolMarshaller protocolMarshaller ) { } }
if ( listQueryExecutionsRequest == null ) { throw new SdkClientException ( "Invalid argument passed to marshall(...)" ) ; } try { protocolMarshaller . marshall ( listQueryExecutionsRequest . getNextToken ( ) , NEXTTOKEN_BINDING ) ; protocolMarshaller . marshall ( listQueryExecutionsRequest . getMaxResults ( ) , MAXRESULTS_BINDING ) ; protocolMarshaller . marshall ( listQueryExecutionsRequest . getWorkGroup ( ) , WORKGROUP_BINDING ) ; } catch ( Exception e ) { throw new SdkClientException ( "Unable to marshall request to JSON: " + e . getMessage ( ) , e ) ; }
public class MetaMap { /** * Takes a Unicode string and tries to decompose non - 7bit - ascii ( Unicode Basic Latin ) characters into 7bit ascii . * For example , the string ' âåäöốở ' is turned into ' aaaooo ' . * Note that it doesn ' t always succeed for some of the much more complicated characters ( e . g . ' μ ' ) . * Occasionally some complicated characters end up as two characters when the ASCIIFoldingFilter is used . . . * Perhaps we want to adopt this library : * http : / / www . ippatsuman . com / projects / junidecode / */ public static String decomposeToAscii ( final String s ) { } }
/* pure java version , doesn ' t work all the time : String normalized = Normalizer . normalize ( s , Normalizer . Form . NFD ) ; return normalized . replaceAll ( " \ \ p { InCombiningDiacriticalMarks } + " , " " ) ; */ // this works on more cases final char [ ] input = new char [ s . length ( ) ] ; s . getChars ( 0 , s . length ( ) , input , 0 ) ; final char [ ] output = new char [ input . length * 4 ] ; final int numChars = ASCIIFoldingFilter . foldToASCII ( input , 0 , output , 0 , input . length ) ; // now remove anything not in the printable US - ASCII range , but keep newlines final StringBuilder sb = new StringBuilder ( numChars ) ; for ( int i = 0 ; i < numChars ; i ++ ) { final char c = output [ i ] ; // printable US - ASCII is from 32 to 126 if ( ( 32 <= c && c <= 126 ) || '\n' == c ) sb . append ( c ) ; } return sb . toString ( ) ;
public class DocBookBuilder { /** * Creates the Topic component of a chapter . xml for a specific SpecTopic . * @ param buildData Information and data structures for the build . * @ param specTopic The build topic object to get content from . * @ param parentFileLocation The topics parent file location , so the topic can be saved in a subdirectory . * @ return The Topics filename . */ protected String createTopicXMLFile ( final BuildData buildData , final SpecTopic specTopic , final String parentFileLocation ) throws BuildProcessingException { } }
String topicFileName ; final BaseTopicWrapper < ? > topic = specTopic . getTopic ( ) ; if ( topic != null ) { topicFileName = specTopic . getUniqueLinkId ( buildData . isUseFixedUrls ( ) ) + ".xml" ; final String fixedParentFileLocation = buildData . getBuildOptions ( ) . getFlattenTopics ( ) ? buildData . getBookTopicsFolder ( ) : parentFileLocation ; final String fixedEntityPath = fixedParentFileLocation . replace ( buildData . getBookLocaleFolder ( ) , "" ) . replaceAll ( ".*?" + File . separator + "" , "../" ) ; final String topicXML = DocBookBuildUtilities . convertDocumentToDocBookFormattedString ( buildData . getDocBookVersion ( ) , specTopic . getXMLDocument ( ) , DocBookUtilities . TOPIC_ROOT_NODE_NAME , fixedEntityPath + buildData . getEntityFileName ( ) , getXMLFormatProperties ( ) ) ; addToZip ( fixedParentFileLocation + topicFileName , topicXML , buildData ) ; return topicFileName ; } return null ;